CombinedText stringlengths 4 3.42M |
|---|
require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Glowfic
ALLOWED_TAGS = %w(b i u sub sup del ins hr p br div span pre code h1 h2 h3 h4 h5 h6 ul ol li dl dt dd a img blockquote q table td th tr strike s strong em big small font cite abbr var samp kbd mark ruby rp rt bdo wbr details summary)
ALLOWED_ATTRIBUTES = {
:all => %w(xml:lang class style title lang dir),
"hr" => %w(width),
"li" => %w(value),
"ol" => %w(reversed start type),
"a" => %w(href hreflang rel target type),
"del" => %w(cite datetime),
"table" => %w(width),
"td" => %w(abbr width),
"th" => %w(abbr width),
"blockquote" => %w(cite),
"cite" => %w(href)
}
DISCORD_LINK_GLOWFIC = 'https://discord.gg/Mytf2ruKpv'
DISCORD_LINK_CONSTELLATION = 'https://discord.gg/RWUPXQD'
module Sanitizers
WRITTEN_CONF = Sanitize::Config.merge(Sanitize::Config::RELAXED,
elements: ALLOWED_TAGS,
attributes: ALLOWED_ATTRIBUTES,
)
def self.written(text)
Sanitize.fragment(text, WRITTEN_CONF).html_safe
end
DESCRIPTION_CONF = Sanitize::Config.merge(Sanitize::Config::RELAXED,
elements: ['a'],
attributes: {'a' => ['href']},
)
def self.description(text)
Sanitize.fragment(text, DESCRIPTION_CONF).html_safe
end
def self.full(text)
Sanitize.fragment(text).html_safe
end
end
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Eastern Time (US & Canada)'
config.action_view.sanitized_allowed_tags = Glowfic::ALLOWED_TAGS
config.action_view.sanitized_allowed_attributes = %w(href src width height alt cite datetime title class name xml:lang abbr style target)
config.middleware.use Rack::Pratchett
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
end
end
Disable cache versioning
require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Glowfic
ALLOWED_TAGS = %w(b i u sub sup del ins hr p br div span pre code h1 h2 h3 h4 h5 h6 ul ol li dl dt dd a img blockquote q table td th tr strike s strong em big small font cite abbr var samp kbd mark ruby rp rt bdo wbr details summary)
ALLOWED_ATTRIBUTES = {
:all => %w(xml:lang class style title lang dir),
"hr" => %w(width),
"li" => %w(value),
"ol" => %w(reversed start type),
"a" => %w(href hreflang rel target type),
"del" => %w(cite datetime),
"table" => %w(width),
"td" => %w(abbr width),
"th" => %w(abbr width),
"blockquote" => %w(cite),
"cite" => %w(href)
}
DISCORD_LINK_GLOWFIC = 'https://discord.gg/Mytf2ruKpv'
DISCORD_LINK_CONSTELLATION = 'https://discord.gg/RWUPXQD'
module Sanitizers
WRITTEN_CONF = Sanitize::Config.merge(Sanitize::Config::RELAXED,
elements: ALLOWED_TAGS,
attributes: ALLOWED_ATTRIBUTES,
)
def self.written(text)
Sanitize.fragment(text, WRITTEN_CONF).html_safe
end
DESCRIPTION_CONF = Sanitize::Config.merge(Sanitize::Config::RELAXED,
elements: ['a'],
attributes: {'a' => ['href']},
)
def self.description(text)
Sanitize.fragment(text, DESCRIPTION_CONF).html_safe
end
def self.full(text)
Sanitize.fragment(text).html_safe
end
end
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Eastern Time (US & Canada)'
config.action_view.sanitized_allowed_tags = Glowfic::ALLOWED_TAGS
config.action_view.sanitized_allowed_attributes = %w(href src width height alt cite datetime title class name xml:lang abbr style target)
config.middleware.use Rack::Pratchett
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# redis-rails does not support cache versioning
config.active_record.cache_versioning = false
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module DiscoverbpsV2
class Application < Rails::Application
config.autoload_paths += Dir["#{config.root}/lib/**/"]
Dir[File.join(Rails.root, "lib", "core_ext", "*.rb")].each {|l| require l }
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Eastern Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
# Enable the asset pipeline
config.assets.enabled = true
config.assets.initialize_on_precompile = false
config.assets.precompile += [
'admin.css',
'admin.js',
'application.css',
'application.js'
]
end
end
Use Rack::Deflater
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module DiscoverbpsV2
class Application < Rails::Application
config.autoload_paths += Dir["#{config.root}/lib/**/"]
Dir[File.join(Rails.root, "lib", "core_ext", "*.rb")].each {|l| require l }
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Eastern Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
# Enable the asset pipeline
config.assets.enabled = true
config.assets.initialize_on_precompile = false
config.assets.precompile += [
'admin.css',
'admin.js',
'application.css',
'application.js'
]
config.middleware.use Rack::Deflater
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Prelaunchr
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
config.assets.debug = false
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
config.ended = false
end
end
modified pre-compile against Heroku
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Prelaunchr
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
config.assets.debug = false
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
config.assets.initialize_on_precompile = false
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
config.ended = false
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
config.active_job.queue_adapter = :delayed_job
module Malstrom
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
end
end
Removed application.rb change
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Malstrom
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
end
end
|
require File.expand_path('../boot', __FILE__)
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
#require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "sprockets/railtie"
require "rails/test_unit/railtie"
require 'mongoid'
require 'adroit-age'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
Mongoid.load!("config/mongoid.yml", :production)
module Dezande
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Paris'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = :fr
# Configure fallbacks for mongoid errors:
require "i18n/backend/fallbacks"
I18n::Backend::Simple.send(:include, I18n::Backend::Fallbacks)
config.i18n.fallbacks = {'fr' => 'en'}
config.generators do |g|
g.orm :mongoid
end
# Configure redis
config.cache_store = :redis_store, "redis://0.0.0.0:6379/0/cache", { expires_in: 90.minutes }
end
end
change ip adresse
require File.expand_path('../boot', __FILE__)
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
#require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "sprockets/railtie"
require "rails/test_unit/railtie"
require 'mongoid'
require 'adroit-age'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
Mongoid.load!("config/mongoid.yml", :production)
module Dezande
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Paris'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = :fr
# Configure fallbacks for mongoid errors:
require "i18n/backend/fallbacks"
I18n::Backend::Simple.send(:include, I18n::Backend::Fallbacks)
config.i18n.fallbacks = {'fr' => 'en'}
config.generators do |g|
g.orm :mongoid
end
# Configure redis
config.cache_store = :redis_store, "redis://127.0.0.1:6379/0/cache", { expires_in: 90.minutes }
end
end
|
# Put this in config/application.rb
require File.expand_path('../boot', __FILE__)
require_relative '../lib/canvas_yaml'
# Yes, it doesn't seem DRY to list these both in the if and else
# but this used to be "require 'rails/all'" which included sprockets.
# I needed to explicitly opt-out of sprockets but since I'm not sure
# about the other frameworks, I left this so it would be exactly the same
# as "require 'rails/all'" but without sprockets--even though it is a little
# different then the rails 3 else block. If the difference is not intended,
# they can be pulled out of the if/else
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
# require "sprockets/railtie" # Do not enable the Rails Asset Pipeline
require "rails/test_unit/railtie"
Bundler.require(*Rails.groups)
if CANVAS_RAILS4_0
ActiveRecord::Base.class_eval do
mattr_accessor :dump_schema_after_migration, instance_writer: false
self.dump_schema_after_migration = true
end
end
module CanvasRails
class Application < Rails::Application
config.autoload_paths += [config.root.join('lib').to_s]
$LOAD_PATH << config.root.to_s
config.encoding = 'utf-8'
require_dependency 'logging_filter'
config.filter_parameters.concat LoggingFilter.filtered_parameters
config.action_dispatch.rescue_responses['AuthenticationMethods::AccessTokenError'] = 401
config.action_dispatch.rescue_responses['AuthenticationMethods::LoggedOutError'] = 401
config.action_dispatch.default_headers['X-UA-Compatible'] = "IE=Edge,chrome=1"
config.action_dispatch.default_headers.delete('X-Frame-Options')
config.app_generators do |c|
c.test_framework :rspec
c.integration_tool :rspec
c.performance_tool :rspec
end
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Make Time.zone default to the specified zone, and make Active Record store time values
# in the database in UTC, and return them converted to the specified local zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Comment line to use default local time.
config.time_zone = 'UTC'
log_config = File.exist?(Rails.root+"config/logging.yml") && YAML.load_file(Rails.root+"config/logging.yml")[Rails.env]
log_config = { 'logger' => 'rails', 'log_level' => 'debug' }.merge(log_config || {})
opts = {}
require 'canvas_logger'
config.log_level = log_config['log_level']
log_level = ActiveSupport::Logger.const_get(config.log_level.to_s.upcase)
opts[:skip_thread_context] = true if log_config['log_context'] == false
case log_config["logger"]
when "syslog"
require 'syslog_wrapper'
log_config["app_ident"] ||= "canvas-lms"
log_config["daemon_ident"] ||= "canvas-lms-daemon"
facilities = 0
(log_config["facilities"] || []).each do |facility|
facilities |= Syslog.const_get "LOG_#{facility.to_s.upcase}"
end
ident = ENV['RUNNING_AS_DAEMON'] == 'true' ? log_config["daemon_ident"] : log_config["app_ident"]
opts[:include_pid] = true if log_config["include_pid"] == true
config.logger = SyslogWrapper.new(ident, facilities, opts)
config.logger.level = log_level
else
log_path = config.paths['log'].first
if ENV['RUNNING_AS_DAEMON'] == 'true'
log_path = Rails.root+'log/delayed_job.log'
end
config.logger = CanvasLogger.new(log_path, log_level, opts)
end
# Activate observers that should always be running
config.active_record.observers = [:cacher, :stream_item_cache, :live_events_observer ]
config.active_record.whitelist_attributes = false
unless CANVAS_RAILS4_0
config.active_record.raise_in_transactional_callbacks = true # may as well opt into the new behavior
end
config.active_support.encode_big_decimal_as_string = false
config.autoload_paths += %W(#{Rails.root}/app/middleware
#{Rails.root}/app/observers
#{Rails.root}/app/presenters
#{Rails.root}/app/services
#{Rails.root}/app/serializers
#{Rails.root}/app/presenters)
config.autoload_once_paths << Rails.root.join("app/middleware")
# prevent directory->module inference in these directories from wreaking
# havoc on the app (e.g. stylesheets/base -> ::Base)
config.eager_load_paths -= %W(#{Rails.root}/app/coffeescripts
#{Rails.root}/app/stylesheets)
# we don't know what middleware to make SessionsTimeout follow until after
# we've loaded config/initializers/session_store.rb
initializer("extend_middleware_stack", after: "load_config_initializers") do |app|
app.config.middleware.insert_before(config.session_store, 'LoadAccount')
app.config.middleware.insert_before(config.session_store, 'SessionsTimeout')
app.config.middleware.swap('ActionDispatch::RequestId', 'RequestContextGenerator')
app.config.middleware.insert_after(config.session_store, 'RequestContextSession')
app.config.middleware.insert_before('ActionDispatch::ParamsParser', 'RequestThrottle')
app.config.middleware.insert_before('Rack::MethodOverride', 'PreventNonMultipartParse')
end
config.to_prepare do
require_dependency 'canvas/plugins/default_plugins'
ActiveSupport::JSON::Encoding.escape_html_entities_in_json = true
end
module PostgreSQLEarlyExtensions
def initialize(connection, logger, connection_parameters, config)
unless config.key?(:prepared_statements)
config = config.dup
config[:prepared_statements] = false
end
super(connection, logger, connection_parameters, config)
end
def connect
hosts = Array(@connection_parameters[:host]).presence || [nil]
hosts.each_with_index do |host, index|
begin
connection_parameters = @connection_parameters.dup
connection_parameters[:host] = host
@connection = PGconn.connect(connection_parameters)
if CANVAS_RAILS4_0
ActiveRecord::ConnectionAdapters::PostgreSQLColumn.money_precision = (postgresql_version >= 80300) ? 19 : 10
else
ActiveRecord::ConnectionAdapters::PostgreSQLAdapter::OID::Money.precision = (postgresql_version >= 80300) ? 19 : 10
end
configure_connection
break
rescue ::PG::Error => error
if !CANVAS_RAILS4_0 && error.message.include?("does not exist")
raise ActiveRecord::NoDatabaseError.new(error.message, error)
elsif index == hosts.length - 1
raise
end
# else try next host
end
end
end
end
Autoextend.hook(:"ActiveRecord::ConnectionAdapters::PostgreSQLAdapter",
PostgreSQLEarlyExtensions,
method: :prepend)
SafeYAML.singleton_class.send(:attr_accessor, :safe_parsing)
module SafeYAMLWithFlag
def load(*args)
previous, self.safe_parsing = safe_parsing, true
super
ensure
self.safe_parsing = previous
end
end
SafeYAML.singleton_class.prepend(SafeYAMLWithFlag)
# safe_yaml can't whitelist specific instances of scalar values, so just override the loading
# here, and do a weird check
YAML.add_ruby_type("object:Class") do |_type, val|
if SafeYAML.safe_parsing && !Canvas::Migration.valid_converter_classes.include?(val)
raise "Cannot load class #{val} from YAML"
end
val.constantize
end
# TODO: Use this instead of the above block when we switch to Psych
Psych.add_domain_type("ruby/object", "Class") do |_type, val|
if SafeYAML.safe_parsing && !Canvas::Migration.valid_converter_classes.include?(val)
raise "Cannot load class #{val} from YAML"
end
val.constantize
end
# Extend any base classes, even gem classes
Dir.glob("#{Rails.root}/lib/ext/**/*.rb").each { |file| require file }
# tell Rails to use the native XML parser instead of REXML
ActiveSupport::XmlMini.backend = 'Nokogiri'
class NotImplemented < StandardError; end
if defined?(PhusionPassenger)
PhusionPassenger.on_event(:starting_worker_process) do |forked|
if forked
# We're in smart spawning mode, and need to make unique connections for this fork.
Canvas.reconnect_redis
end
end
end
if defined?(PhusionPassenger)
PhusionPassenger.on_event(:after_installing_signal_handlers) do
Canvas::Reloader.trap_signal
end
else
config.to_prepare do
Canvas::Reloader.trap_signal
end
end
if defined?(Spring)
Spring.after_fork do
Canvas.reconnect_redis
end
end
# don't wrap fields with errors with a <div class="fieldWithErrors" />,
# since that could leak information (e.g. valid vs invalid username on
# login page)
config.action_view.field_error_proc = Proc.new { |html_tag, instance| html_tag }
class ExceptionsApp
def call(env)
@app_controller ||= ActionDispatch::Routing::RouteSet::Dispatcher.new({}).controller(:controller => 'application')
@app_controller.action('rescue_action_dispatch_exception').call(env)
end
end
config.exceptions_app = ExceptionsApp.new
config.before_initialize do
config.action_controller.asset_host = Canvas::Cdn.method(:asset_host_for)
end
if config.action_dispatch.rack_cache != false
config.action_dispatch.rack_cache[:ignore_headers] =
%w[Set-Cookie X-Request-Context-Id X-Canvas-User-Id X-Canvas-Meta]
end
end
end
require postgresql 9.3 or newer
Change-Id: Ia79b94b3d0a6f6407461ca0e5712b306ca0906e8
Reviewed-on: https://gerrit.instructure.com/85384
Tested-by: Jenkins
Reviewed-by: Rob Orton <7e09c9d3e96378bf549fc283fd6e1e5b7014cc33@instructure.com>
Product-Review: Cody Cutrer <c4fe2b1d90ef8f2548c8aeabfa633434316f0305@instructure.com>
QA-Review: Cody Cutrer <c4fe2b1d90ef8f2548c8aeabfa633434316f0305@instructure.com>
# Put this in config/application.rb
require File.expand_path('../boot', __FILE__)
require_relative '../lib/canvas_yaml'
# Yes, it doesn't seem DRY to list these both in the if and else
# but this used to be "require 'rails/all'" which included sprockets.
# I needed to explicitly opt-out of sprockets but since I'm not sure
# about the other frameworks, I left this so it would be exactly the same
# as "require 'rails/all'" but without sprockets--even though it is a little
# different then the rails 3 else block. If the difference is not intended,
# they can be pulled out of the if/else
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
# require "sprockets/railtie" # Do not enable the Rails Asset Pipeline
require "rails/test_unit/railtie"
Bundler.require(*Rails.groups)
if CANVAS_RAILS4_0
ActiveRecord::Base.class_eval do
mattr_accessor :dump_schema_after_migration, instance_writer: false
self.dump_schema_after_migration = true
end
end
module CanvasRails
class Application < Rails::Application
config.autoload_paths += [config.root.join('lib').to_s]
$LOAD_PATH << config.root.to_s
config.encoding = 'utf-8'
require_dependency 'logging_filter'
config.filter_parameters.concat LoggingFilter.filtered_parameters
config.action_dispatch.rescue_responses['AuthenticationMethods::AccessTokenError'] = 401
config.action_dispatch.rescue_responses['AuthenticationMethods::LoggedOutError'] = 401
config.action_dispatch.default_headers['X-UA-Compatible'] = "IE=Edge,chrome=1"
config.action_dispatch.default_headers.delete('X-Frame-Options')
config.app_generators do |c|
c.test_framework :rspec
c.integration_tool :rspec
c.performance_tool :rspec
end
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Make Time.zone default to the specified zone, and make Active Record store time values
# in the database in UTC, and return them converted to the specified local zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Comment line to use default local time.
config.time_zone = 'UTC'
log_config = File.exist?(Rails.root+"config/logging.yml") && YAML.load_file(Rails.root+"config/logging.yml")[Rails.env]
log_config = { 'logger' => 'rails', 'log_level' => 'debug' }.merge(log_config || {})
opts = {}
require 'canvas_logger'
config.log_level = log_config['log_level']
log_level = ActiveSupport::Logger.const_get(config.log_level.to_s.upcase)
opts[:skip_thread_context] = true if log_config['log_context'] == false
case log_config["logger"]
when "syslog"
require 'syslog_wrapper'
log_config["app_ident"] ||= "canvas-lms"
log_config["daemon_ident"] ||= "canvas-lms-daemon"
facilities = 0
(log_config["facilities"] || []).each do |facility|
facilities |= Syslog.const_get "LOG_#{facility.to_s.upcase}"
end
ident = ENV['RUNNING_AS_DAEMON'] == 'true' ? log_config["daemon_ident"] : log_config["app_ident"]
opts[:include_pid] = true if log_config["include_pid"] == true
config.logger = SyslogWrapper.new(ident, facilities, opts)
config.logger.level = log_level
else
log_path = config.paths['log'].first
if ENV['RUNNING_AS_DAEMON'] == 'true'
log_path = Rails.root+'log/delayed_job.log'
end
config.logger = CanvasLogger.new(log_path, log_level, opts)
end
# Activate observers that should always be running
config.active_record.observers = [:cacher, :stream_item_cache, :live_events_observer ]
config.active_record.whitelist_attributes = false
unless CANVAS_RAILS4_0
config.active_record.raise_in_transactional_callbacks = true # may as well opt into the new behavior
end
config.active_support.encode_big_decimal_as_string = false
config.autoload_paths += %W(#{Rails.root}/app/middleware
#{Rails.root}/app/observers
#{Rails.root}/app/presenters
#{Rails.root}/app/services
#{Rails.root}/app/serializers
#{Rails.root}/app/presenters)
config.autoload_once_paths << Rails.root.join("app/middleware")
# prevent directory->module inference in these directories from wreaking
# havoc on the app (e.g. stylesheets/base -> ::Base)
config.eager_load_paths -= %W(#{Rails.root}/app/coffeescripts
#{Rails.root}/app/stylesheets)
# we don't know what middleware to make SessionsTimeout follow until after
# we've loaded config/initializers/session_store.rb
initializer("extend_middleware_stack", after: "load_config_initializers") do |app|
app.config.middleware.insert_before(config.session_store, 'LoadAccount')
app.config.middleware.insert_before(config.session_store, 'SessionsTimeout')
app.config.middleware.swap('ActionDispatch::RequestId', 'RequestContextGenerator')
app.config.middleware.insert_after(config.session_store, 'RequestContextSession')
app.config.middleware.insert_before('ActionDispatch::ParamsParser', 'RequestThrottle')
app.config.middleware.insert_before('Rack::MethodOverride', 'PreventNonMultipartParse')
end
config.to_prepare do
require_dependency 'canvas/plugins/default_plugins'
ActiveSupport::JSON::Encoding.escape_html_entities_in_json = true
end
module PostgreSQLEarlyExtensions
def initialize(connection, logger, connection_parameters, config)
unless config.key?(:prepared_statements)
config = config.dup
config[:prepared_statements] = false
end
super(connection, logger, connection_parameters, config)
end
def connect
hosts = Array(@connection_parameters[:host]).presence || [nil]
hosts.each_with_index do |host, index|
begin
connection_parameters = @connection_parameters.dup
connection_parameters[:host] = host
@connection = PGconn.connect(connection_parameters)
raise "Canvas requires PostgreSQL 9.3 or newer" unless postgresql_version >= 90300
if CANVAS_RAILS4_0
ActiveRecord::ConnectionAdapters::PostgreSQLColumn.money_precision = (postgresql_version >= 80300) ? 19 : 10
else
ActiveRecord::ConnectionAdapters::PostgreSQLAdapter::OID::Money.precision = (postgresql_version >= 80300) ? 19 : 10
end
configure_connection
break
rescue ::PG::Error => error
if !CANVAS_RAILS4_0 && error.message.include?("does not exist")
raise ActiveRecord::NoDatabaseError.new(error.message, error)
elsif index == hosts.length - 1
raise
end
# else try next host
end
end
end
end
Autoextend.hook(:"ActiveRecord::ConnectionAdapters::PostgreSQLAdapter",
PostgreSQLEarlyExtensions,
method: :prepend)
SafeYAML.singleton_class.send(:attr_accessor, :safe_parsing)
module SafeYAMLWithFlag
def load(*args)
previous, self.safe_parsing = safe_parsing, true
super
ensure
self.safe_parsing = previous
end
end
SafeYAML.singleton_class.prepend(SafeYAMLWithFlag)
# safe_yaml can't whitelist specific instances of scalar values, so just override the loading
# here, and do a weird check
YAML.add_ruby_type("object:Class") do |_type, val|
if SafeYAML.safe_parsing && !Canvas::Migration.valid_converter_classes.include?(val)
raise "Cannot load class #{val} from YAML"
end
val.constantize
end
# TODO: Use this instead of the above block when we switch to Psych
Psych.add_domain_type("ruby/object", "Class") do |_type, val|
if SafeYAML.safe_parsing && !Canvas::Migration.valid_converter_classes.include?(val)
raise "Cannot load class #{val} from YAML"
end
val.constantize
end
# Extend any base classes, even gem classes
Dir.glob("#{Rails.root}/lib/ext/**/*.rb").each { |file| require file }
# tell Rails to use the native XML parser instead of REXML
ActiveSupport::XmlMini.backend = 'Nokogiri'
class NotImplemented < StandardError; end
if defined?(PhusionPassenger)
PhusionPassenger.on_event(:starting_worker_process) do |forked|
if forked
# We're in smart spawning mode, and need to make unique connections for this fork.
Canvas.reconnect_redis
end
end
end
if defined?(PhusionPassenger)
PhusionPassenger.on_event(:after_installing_signal_handlers) do
Canvas::Reloader.trap_signal
end
else
config.to_prepare do
Canvas::Reloader.trap_signal
end
end
if defined?(Spring)
Spring.after_fork do
Canvas.reconnect_redis
end
end
# don't wrap fields with errors with a <div class="fieldWithErrors" />,
# since that could leak information (e.g. valid vs invalid username on
# login page)
config.action_view.field_error_proc = Proc.new { |html_tag, instance| html_tag }
class ExceptionsApp
def call(env)
@app_controller ||= ActionDispatch::Routing::RouteSet::Dispatcher.new({}).controller(:controller => 'application')
@app_controller.action('rescue_action_dispatch_exception').call(env)
end
end
config.exceptions_app = ExceptionsApp.new
config.before_initialize do
config.action_controller.asset_host = Canvas::Cdn.method(:asset_host_for)
end
if config.action_dispatch.rack_cache != false
config.action_dispatch.rack_cache[:ignore_headers] =
%w[Set-Cookie X-Request-Context-Id X-Canvas-User-Id X-Canvas-Meta]
end
end
end
|
# frozen_string_literal: true
require_relative "boot"
%w(
rails
active_model/railtie
active_job/railtie
active_record/railtie
action_controller/railtie
action_mailer/railtie
action_view/railtie
view_component/engine
).each do |railtie|
require railtie
end
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Annict
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.0
# Heroku will set `RAILS_LOG_TO_STDOUT` when you deploy a Ruby app via
# the Heroku Ruby Buildpack for Rails 4.2+ apps.
# https://blog.heroku.com/container_ready_rails_5#stdout-logging
if ENV["RAILS_LOG_TO_STDOUT"].present?
config.logger = ActiveSupport::Logger.new(STDOUT)
end
# Don't generate system test files.
config.generators.system_tests = nil
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
# Set Time.zone default to the specified zone and
# make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'UTC'
# config.active_record.default_timezone = :local
config.i18n.enforce_available_locales = false
# The default locale is :en and all translations from
# config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = :ja
config.i18n.available_locales = %i(ja en)
config.generators do |g|
g.test_framework :rspec, controller_specs: false, helper_specs: false,
routing_specs: false, view_specs: false
g.factory_bot false
end
config.active_job.queue_adapter = :delayed_job
config.active_record.schema_format = :sql
config.middleware.insert_before(Rack::Runtime, Rack::Rewrite) do
# Redirect: annict.herokuapp.com -> annict.com
r301 /.*/, "https://#{ENV.fetch('ANNICT_HOST')}$&", if: proc { |rack_env|
rack_env["SERVER_NAME"].include?("annict.herokuapp.com")
}
# Redirect: www.annict.com -> annict.com
r301 /.*/, "https://#{ENV.fetch('ANNICT_HOST')}$&", if: proc { |rack_env|
rack_env["SERVER_NAME"].in?(["www.#{ENV.fetch('ANNICT_HOST')}"])
}
# Redirect: www.annict.jp -> annict.jp
r301 /.*/, "https://#{ENV.fetch('ANNICT_JP_HOST')}$&", if: proc { |rack_env|
rack_env["SERVER_NAME"].in?(["www.#{ENV.fetch('ANNICT_JP_HOST')}"])
}
r301 %r{\A/activities}, "/"
r301 %r{\A/users/([A-Za-z0-9_]+)\z}, "/@$1"
r301 %r{\A/users/([A-Za-z0-9_]+)/(following|followers|wanna_watch|watching|watched|on_hold|stop_watching)\z}, "/@$1/$2"
r301 %r{\A/@([A-Za-z0-9_]+)/reviews\z}, "/@$1/records"
r301 %r{\A/episodes/[0-9]+/items}, "/"
r301 %r{\A/works/[0-9]+/items}, "/"
maintenance_file = File.join(Rails.root, "public", "maintenance.html")
send_file /(.*)$(?<!maintenance|favicons)/, maintenance_file, if: proc { |rack_env|
ip_address = rack_env["HTTP_X_FORWARDED_FOR"]&.split(",")&.last&.strip
File.exist?(maintenance_file) &&
ENV["ANNICT_MAINTENANCE_MODE"] == "on" &&
ip_address != ENV["ANNICT_ADMIN_IP"]
}
end
config.middleware.insert_before(0, Rack::Cors) do
ALLOWED_METHODS = %i(get post patch delete options).freeze
EXPOSED_HEADERS = %w(ETag).freeze
allow do
origins "*"
resource "*", headers: :any, methods: ALLOWED_METHODS, expose: EXPOSED_HEADERS
end
end
# Gzip all the things
# https://schneems.com/2017/11/08/80-smaller-rails-footprint-with-rack-deflate/
config.middleware.insert_after ActionDispatch::Static, Rack::Deflater
Raven.configure do |config|
config.dsn = ENV.fetch("SENTRY_DSN")
config.sanitize_fields = Rails.application.config.filter_parameters.map(&:to_s)
end
ActiveRecord::SessionStore::Session.serializer = :null
end
end
tweak
# frozen_string_literal: true
require_relative "boot"
%w(
rails
active_model/railtie
active_job/railtie
active_record/railtie
action_controller/railtie
action_mailer/railtie
action_view/railtie
view_component/engine
).each do |railtie|
require railtie
end
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Annict
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.0
# Heroku will set `RAILS_LOG_TO_STDOUT` when you deploy a Ruby app via
# the Heroku Ruby Buildpack for Rails 4.2+ apps.
# https://blog.heroku.com/container_ready_rails_5#stdout-logging
if ENV["RAILS_LOG_TO_STDOUT"].present?
config.logger = ActiveSupport::Logger.new(STDOUT)
end
# Don't generate system test files.
config.generators.system_tests = nil
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
# Set Time.zone default to the specified zone and
# make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'UTC'
# config.active_record.default_timezone = :local
config.i18n.enforce_available_locales = false
# The default locale is :en and all translations from
# config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = :ja
config.i18n.available_locales = %i(ja en)
config.generators do |g|
g.test_framework :rspec, controller_specs: false, helper_specs: false,
routing_specs: false, view_specs: false
g.factory_bot false
end
config.active_job.queue_adapter = :delayed_job
config.active_record.schema_format = :sql
config.middleware.insert_before(Rack::Runtime, Rack::Rewrite) do
# Redirect: annict.herokuapp.com -> annict.com
r301 /.*/, "https://#{ENV.fetch('ANNICT_HOST')}$&", if: proc { |rack_env|
rack_env["SERVER_NAME"].include?("annict.herokuapp.com")
}
# Redirect: www.annict.com -> annict.com
r301 /.*/, "https://#{ENV.fetch('ANNICT_HOST')}$&", if: proc { |rack_env|
rack_env["SERVER_NAME"].in?(["www.#{ENV.fetch('ANNICT_HOST')}"])
}
# Redirect: www.annict.jp -> annict.jp
r301 /.*/, "https://#{ENV.fetch('ANNICT_JP_HOST')}$&", if: proc { |rack_env|
rack_env["SERVER_NAME"].in?(["www.#{ENV.fetch('ANNICT_JP_HOST')}"])
}
r301 %r{\A/activities}, "/"
r301 %r{\A/users/([A-Za-z0-9_]+)\z}, "/@$1"
r301 %r{\A/users/([A-Za-z0-9_]+)/(following|followers|wanna_watch|watching|watched|on_hold|stop_watching)\z}, "/@$1/$2"
r301 %r{\A/@([A-Za-z0-9_]+)/reviews\z}, "/@$1/records"
r301 %r{\A/episodes/[0-9]+/items}, "/"
r301 %r{\A/works/[0-9]+/items}, "/"
maintenance_file = File.join(Rails.root, "public", "maintenance.html")
send_file /(.*)$(?<!maintenance|favicons)/, maintenance_file, if: proc { |rack_env|
ip_address = rack_env["HTTP_X_FORWARDED_FOR"]&.split(",")&.first&.strip
File.exist?(maintenance_file) &&
ENV["ANNICT_MAINTENANCE_MODE"] == "on" &&
ip_address != ENV["ANNICT_ADMIN_IP"]
}
end
config.middleware.insert_before(0, Rack::Cors) do
ALLOWED_METHODS = %i(get post patch delete options).freeze
EXPOSED_HEADERS = %w(ETag).freeze
allow do
origins "*"
resource "*", headers: :any, methods: ALLOWED_METHODS, expose: EXPOSED_HEADERS
end
end
# Gzip all the things
# https://schneems.com/2017/11/08/80-smaller-rails-footprint-with-rack-deflate/
config.middleware.insert_after ActionDispatch::Static, Rack::Deflater
Raven.configure do |config|
config.dsn = ENV.fetch("SENTRY_DSN")
config.sanitize_fields = Rails.application.config.filter_parameters.map(&:to_s)
end
ActiveRecord::SessionStore::Session.serializer = :null
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module Collector
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
end
end
config.secret_key_base in application.rb
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module Collector
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
config.secret_key_base = "wqieuiohfjkwehtuiqencrioqvehq2co84uqo3icthqc2o"
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Assets should be precompiled for production (so we don't need the gems loaded then)
Bundler.require(*Rails.groups(assets: %w(development test)))
module WriteSome
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types.
# config.active_record.schema_format = :sql
# Enable the asset pipeline.
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# heroku stuff.
Rails.logger = Logger.new(STDOUT)
config.assets.initialize_on_precompile = false
end
end
expire all assets!
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Assets should be precompiled for production (so we don't need the gems loaded then)
Bundler.require(*Rails.groups(assets: %w(development test)))
module WriteSome
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types.
# config.active_record.schema_format = :sql
# Enable the asset pipeline.
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0.1'
# heroku stuff.
Rails.logger = Logger.new(STDOUT)
config.assets.initialize_on_precompile = false
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module Lobsters
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/extras)
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.i18n.enforce_available_locales = true
# Future Rails version will disable implicit joins, so we'll be prepared.
config.active_record.disable_implicit_join_references = true
# Raise an exception when using mass assignment with unpermitted attributes
config.action_controller.action_on_unpermitted_parameters = :raise
config.cache_store = :file_store, "#{config.root}/tmp/cache/"
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
# Heroku requires this to be false
config.assets.initialize_on_precompile=false
config.assets.paths << Rails.root.join("app", "assets", "fonts")
end
end
# disable yaml/xml/whatever input parsing
silence_warnings do
ActionDispatch::ParamsParser::DEFAULT_PARSERS = {}
end
# define site name and domain to be used globally, can be overridden in
# config/initializers/production.rb
class << Rails.application
def domain
"servicedesignnews.herokuapp.com"
end
def name
"Service Design News"
end
# used as mailing list prefix and countinual prefix, cannot have spaces
def shortname
name.downcase.gsub(/[^a-z]/, "")
end
end
Rails.application.routes.default_url_options[:host] = Rails.application.domain
require "#{Rails.root}/lib/monkey"
ammended site domain
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module Lobsters
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/extras)
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.i18n.enforce_available_locales = true
# Future Rails version will disable implicit joins, so we'll be prepared.
config.active_record.disable_implicit_join_references = true
# Raise an exception when using mass assignment with unpermitted attributes
config.action_controller.action_on_unpermitted_parameters = :raise
config.cache_store = :file_store, "#{config.root}/tmp/cache/"
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
# Heroku requires this to be false
config.assets.initialize_on_precompile=false
config.assets.paths << Rails.root.join("app", "assets", "fonts")
end
end
# disable yaml/xml/whatever input parsing
silence_warnings do
ActionDispatch::ParamsParser::DEFAULT_PARSERS = {}
end
# define site name and domain to be used globally, can be overridden in
# config/initializers/production.rb
class << Rails.application
def domain
"servicedesignnews.com"
end
def name
"Service Design News"
end
# used as mailing list prefix and countinual prefix, cannot have spaces
def shortname
name.downcase.gsub(/[^a-z]/, "")
end
end
Rails.application.routes.default_url_options[:host] = Rails.application.domain
require "#{Rails.root}/lib/monkey"
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Openfoodweb
class Application < Rails::Application
config.to_prepare do
# Load application's model / class decorators
Dir.glob(File.join(File.dirname(__FILE__), "../app/**/*_decorator*.rb")) do |c|
Rails.configuration.cache_classes ? require(c) : load(c)
end
# Load application's view overrides
Dir.glob(File.join(File.dirname(__FILE__), "../app/overrides/*.rb")) do |c|
Rails.configuration.cache_classes ? require(c) : load(c)
end
end
# Register Spree calculators
initializer "spree.register.calculators" do |app|
app.config.spree.calculators.shipping_methods << OpenFoodWeb::Calculator::Itemwise
app.config.spree.calculators.shipping_methods << OpenFoodWeb::Calculator::Weight
end
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = 'en'
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
end
end
allow asset precompile to work on new heroku instance
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Openfoodweb
class Application < Rails::Application
config.to_prepare do
# Load application's model / class decorators
Dir.glob(File.join(File.dirname(__FILE__), "../app/**/*_decorator*.rb")) do |c|
Rails.configuration.cache_classes ? require(c) : load(c)
end
# Load application's view overrides
Dir.glob(File.join(File.dirname(__FILE__), "../app/overrides/*.rb")) do |c|
Rails.configuration.cache_classes ? require(c) : load(c)
end
end
# Register Spree calculators
initializer "spree.register.calculators" do |app|
app.config.spree.calculators.shipping_methods << OpenFoodWeb::Calculator::Itemwise
app.config.spree.calculators.shipping_methods << OpenFoodWeb::Calculator::Weight
end
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = 'en'
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
config.assets.initialize_on_precompile = false
end
end
|
require_relative 'boot'
require 'rails/all'
require_relative "../lib/open_food_network/i18n_config"
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Openfoodnetwork
class Application < Rails::Application
config.to_prepare do
# Load application's model / class decorators
Dir.glob(File.join(File.dirname(__FILE__), "../app/**/*_decorator*.rb")) do |c|
Rails.configuration.cache_classes ? require(c) : load(c)
end
# Load application's view overrides
Dir.glob(File.join(File.dirname(__FILE__), "../app/overrides/*.rb")) do |c|
Rails.configuration.cache_classes ? require(c) : load(c)
end
end
# Activate the Skylight agent in staging. You need to provision the
# SKYLIGHT_AUTHENTICATION env var in your OFN instance for this to work.
#
# Check https://github.com/openfoodfoundation/openfoodnetwork/pull/2070 for
# details
config.skylight.environments += ["staging"]
# Settings dependent on locale
#
# We need to set this config before the promo environment gets loaded and
# after the spree environment gets loaded...
# This is because Spree uses `Spree::Config` while evaluating classes :scream:
#
# https://github.com/spree/spree/blob/2-0-stable/core/app/models/spree/calculator/per_item.rb#L6
#
# TODO: move back to spree initializer once we upgrade to a more recent version
# of Spree
initializer 'ofn.spree_locale_settings', before: 'spree.promo.environment' do |app|
Spree::Config['checkout_zone'] = ENV['CHECKOUT_ZONE']
Spree::Config['currency'] = ENV['CURRENCY']
if Spree::Country.table_exists?
country = Spree::Country.find_by_iso(ENV['DEFAULT_COUNTRY_CODE'])
Spree::Config['default_country_id'] = country.id if country.present?
else
Spree::Config['default_country_id'] = 12 # Australia
end
end
# Register Spree calculators
initializer 'spree.register.calculators' do |app|
app.config.spree.calculators.shipping_methods << OpenFoodNetwork::Calculator::Weight
app.config.spree.calculators.add_class('enterprise_fees')
config.spree.calculators.enterprise_fees = [
Calculator::FlatPercentPerItem,
Spree::Calculator::FlatRate,
Spree::Calculator::FlexiRate,
Spree::Calculator::PerItem,
Spree::Calculator::PriceSack,
OpenFoodNetwork::Calculator::Weight
]
app.config.spree.calculators.add_class('payment_methods')
config.spree.calculators.payment_methods = [
Spree::Calculator::FlatPercentItemTotal,
Spree::Calculator::FlatRate,
Spree::Calculator::FlexiRate,
Spree::Calculator::PerItem,
Spree::Calculator::PriceSack
]
end
# Register Spree payment methods
initializer "spree.gateway.payment_methods", :after => "spree.register.payment_methods" do |app|
app.config.spree.payment_methods << Spree::Gateway::Migs
app.config.spree.payment_methods << Spree::Gateway::Pin
app.config.spree.payment_methods << Spree::Gateway::StripeConnect
end
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(
#{config.root}/app/presenters
#{config.root}/app/jobs
)
config.paths["config/routes"] = %w(
config/routes.rb
config/routes/admin.rb
config/routes/spree.rb
).map { |relative_path| Rails.root.join(relative_path) }
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = ENV["TIMEZONE"]
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = OpenFoodNetwork::I18nConfig.default_locale
config.i18n.available_locales = OpenFoodNetwork::I18nConfig.available_locales
I18n.locale = config.i18n.locale = config.i18n.default_locale
# Setting this to true causes a performance regression in Rails 3.2.17
# When we're on a version with the fix below, we can set it to true
# https://github.com/svenfuchs/i18n/issues/230
I18n.config.enforce_available_locales = false
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.2'
config.sass.load_paths += [
"#{Gem.loaded_specs['foundation-rails'].full_gem_path}/vendor/assets/stylesheets/foundation/components",
"#{Gem.loaded_specs['foundation-rails'].full_gem_path}/vendor/assets/stylesheets/foundation/"
]
# css and js files other than application.* are not precompiled by default
# Instead, they must be explicitly included below
# http://stackoverflow.com/questions/8012434/what-is-the-purpose-of-config-assets-precompile
config.assets.initialize_on_precompile = true
config.assets.precompile += ['store/all.css', 'store/all.js', 'store/shop_front.js', 'iehack.js']
config.assets.precompile += ['admin/all.css', 'admin/*.js', 'admin/**/*.js']
config.assets.precompile += ['darkswarm/all.css', 'darkswarm/all_split2.css', 'darkswarm/all.js']
config.assets.precompile += ['mail/all.css']
config.assets.precompile += ['search/all.css', 'search/*.js']
config.assets.precompile += ['shared/*']
config.assets.precompile += ['qz/*']
config.active_support.escape_html_entities_in_json = true
end
end
Delete _split2.css from application.rb assets precompile array.
require_relative 'boot'
require 'rails/all'
require_relative "../lib/open_food_network/i18n_config"
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Openfoodnetwork
class Application < Rails::Application
config.to_prepare do
# Load application's model / class decorators
Dir.glob(File.join(File.dirname(__FILE__), "../app/**/*_decorator*.rb")) do |c|
Rails.configuration.cache_classes ? require(c) : load(c)
end
# Load application's view overrides
Dir.glob(File.join(File.dirname(__FILE__), "../app/overrides/*.rb")) do |c|
Rails.configuration.cache_classes ? require(c) : load(c)
end
end
# Activate the Skylight agent in staging. You need to provision the
# SKYLIGHT_AUTHENTICATION env var in your OFN instance for this to work.
#
# Check https://github.com/openfoodfoundation/openfoodnetwork/pull/2070 for
# details
config.skylight.environments += ["staging"]
# Settings dependent on locale
#
# We need to set this config before the promo environment gets loaded and
# after the spree environment gets loaded...
# This is because Spree uses `Spree::Config` while evaluating classes :scream:
#
# https://github.com/spree/spree/blob/2-0-stable/core/app/models/spree/calculator/per_item.rb#L6
#
# TODO: move back to spree initializer once we upgrade to a more recent version
# of Spree
initializer 'ofn.spree_locale_settings', before: 'spree.promo.environment' do |app|
Spree::Config['checkout_zone'] = ENV['CHECKOUT_ZONE']
Spree::Config['currency'] = ENV['CURRENCY']
if Spree::Country.table_exists?
country = Spree::Country.find_by_iso(ENV['DEFAULT_COUNTRY_CODE'])
Spree::Config['default_country_id'] = country.id if country.present?
else
Spree::Config['default_country_id'] = 12 # Australia
end
end
# Register Spree calculators
initializer 'spree.register.calculators' do |app|
app.config.spree.calculators.shipping_methods << OpenFoodNetwork::Calculator::Weight
app.config.spree.calculators.add_class('enterprise_fees')
config.spree.calculators.enterprise_fees = [
Calculator::FlatPercentPerItem,
Spree::Calculator::FlatRate,
Spree::Calculator::FlexiRate,
Spree::Calculator::PerItem,
Spree::Calculator::PriceSack,
OpenFoodNetwork::Calculator::Weight
]
app.config.spree.calculators.add_class('payment_methods')
config.spree.calculators.payment_methods = [
Spree::Calculator::FlatPercentItemTotal,
Spree::Calculator::FlatRate,
Spree::Calculator::FlexiRate,
Spree::Calculator::PerItem,
Spree::Calculator::PriceSack
]
end
# Register Spree payment methods
initializer "spree.gateway.payment_methods", :after => "spree.register.payment_methods" do |app|
app.config.spree.payment_methods << Spree::Gateway::Migs
app.config.spree.payment_methods << Spree::Gateway::Pin
app.config.spree.payment_methods << Spree::Gateway::StripeConnect
end
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(
#{config.root}/app/presenters
#{config.root}/app/jobs
)
config.paths["config/routes"] = %w(
config/routes.rb
config/routes/admin.rb
config/routes/spree.rb
).map { |relative_path| Rails.root.join(relative_path) }
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = ENV["TIMEZONE"]
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = OpenFoodNetwork::I18nConfig.default_locale
config.i18n.available_locales = OpenFoodNetwork::I18nConfig.available_locales
I18n.locale = config.i18n.locale = config.i18n.default_locale
# Setting this to true causes a performance regression in Rails 3.2.17
# When we're on a version with the fix below, we can set it to true
# https://github.com/svenfuchs/i18n/issues/230
I18n.config.enforce_available_locales = false
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.2'
config.sass.load_paths += [
"#{Gem.loaded_specs['foundation-rails'].full_gem_path}/vendor/assets/stylesheets/foundation/components",
"#{Gem.loaded_specs['foundation-rails'].full_gem_path}/vendor/assets/stylesheets/foundation/"
]
# css and js files other than application.* are not precompiled by default
# Instead, they must be explicitly included below
# http://stackoverflow.com/questions/8012434/what-is-the-purpose-of-config-assets-precompile
config.assets.initialize_on_precompile = true
config.assets.precompile += ['store/all.css', 'store/all.js', 'store/shop_front.js', 'iehack.js']
config.assets.precompile += ['admin/all.css', 'admin/*.js', 'admin/**/*.js']
config.assets.precompile += ['darkswarm/all.css', 'darkswarm/all.js']
config.assets.precompile += ['mail/all.css']
config.assets.precompile += ['search/all.css', 'search/*.js']
config.assets.precompile += ['shared/*']
config.assets.precompile += ['qz/*']
config.active_support.escape_html_entities_in_json = true
end
end
|
require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module RoboconDb
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
config.i18n.default_locale = :ja
ActiveRecord::Base.protected_environments = []
end
end
Revert "本番環境でdb:resetができるように設定変更"
This reverts commit 84e1ce31cfa7290ffb02406cbc1d73cd3b547ab5.
require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module RoboconDb
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
config.i18n.default_locale = :ja
end
end
|
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
# require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "active_resource/railtie"
require "rails/test_unit/railtie"
# If you have a Gemfile, require the gems listed there, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env) if defined?(Bundler)
module Publisher
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib #{config.root}/app/presenters)
config.generators do |g|
g.orm :mongoid
g.template_engine :erb # this could be :haml or whatever
g.test_framework :test_unit, :fixture => false # this could be :rpsec or whatever
end
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# JavaScript files you want as :defaults (application.js is always included).
# config.action_view.javascript_expansions[:defaults] = %w(jquery rails)
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
end
end
require 'open-uri'
require 'builder'
# This configuration is suitable for development, it should be managed by puppet
# in production.
# TODO: Check if this is thread/forked process safe under passenger. Possible risk
# that client connections get copied when passenger forks a process but the mutexes
# protecting those connections do not.
require 'messenger'
if File.basename($0) != "rake" && !Rails.env.test?
Messenger.transport = Stomp::Client.new "stomp://localhost:61613"
end
Object.send :include, Pethau::InitializeWith
The broker lives on support.cluster
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
# require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "active_resource/railtie"
require "rails/test_unit/railtie"
# If you have a Gemfile, require the gems listed there, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env) if defined?(Bundler)
module Publisher
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib #{config.root}/app/presenters)
config.generators do |g|
g.orm :mongoid
g.template_engine :erb # this could be :haml or whatever
g.test_framework :test_unit, :fixture => false # this could be :rpsec or whatever
end
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# JavaScript files you want as :defaults (application.js is always included).
# config.action_view.javascript_expansions[:defaults] = %w(jquery rails)
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
end
end
require 'open-uri'
require 'builder'
# This configuration is suitable for development, it should be managed by puppet
# in production.
# TODO: Check if this is thread/forked process safe under passenger. Possible risk
# that client connections get copied when passenger forks a process but the mutexes
# protecting those connections do not.
require 'messenger'
if File.basename($0) != "rake" && !Rails.env.test?
Messenger.transport = Stomp::Client.new "stomp://support.cluster:61613"
end
Object.send :include, Pethau::InitializeWith
|
require File.expand_path('../boot', __FILE__)
require "action_controller/railtie"
require "action_mailer/railtie"
require "rails/test_unit/railtie"
require "sprockets/railtie"
Bundler.require(:default, Rails.env)
module Catchlater
class Application < Rails::Application
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib/authentication #{config.root}/lib)
config.assets.paths << Rails.root.join('app', 'assets', 'fonts')
end
end
Acknowledge deprecation notice
require File.expand_path('../boot', __FILE__)
require "action_controller/railtie"
require "action_mailer/railtie"
require "rails/test_unit/railtie"
require "sprockets/railtie"
Bundler.require(:default, Rails.env)
module Catchlater
class Application < Rails::Application
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib/authentication #{config.root}/lib)
config.assets.paths << Rails.root.join('app', 'assets', 'fonts')
config.i18n.enforce_available_locales = false
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Openfoodweb
class Application < Rails::Application
config.to_prepare do
# Load application's model / class decorators
Dir.glob(File.join(File.dirname(__FILE__), "../app/**/*_decorator*.rb")) do |c|
Rails.configuration.cache_classes ? require(c) : load(c)
end
# Load application's view overrides
Dir.glob(File.join(File.dirname(__FILE__), "../app/overrides/*.rb")) do |c|
Rails.configuration.cache_classes ? require(c) : load(c)
end
end
# Register Spree calculators
initializer "spree.register.calculators" do |app|
app.config.spree.calculators.shipping_methods << OpenFoodWeb::Calculator::Itemwise
app.config.spree.calculators.shipping_methods << OpenFoodWeb::Calculator::Weight
app.config.spree.calculators.enterprise_fees = [Spree::Calculator::FlatPercentItemTotal,
Spree::Calculator::FlatRate,
Spree::Calculator::FlexiRate,
Spree::Calculator::PerItem,
Spree::Calculator::PriceSack,
OpenFoodWeb::Calculator::Weight]
end
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/app/presenters)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = 'en'
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
config.assets.initialize_on_precompile = false
config.assets.precompile += ['store/all.css', 'store/all.js', 'admin/all.css', 'admin/*.js', 'admin/**/*.js', 'comfortable_mexican_sofa/*']
end
end
Turn on asset init on precompile - now precompile can find assets stored in engines (lib/chili/...), but this breaks Heroku deployment
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Openfoodweb
class Application < Rails::Application
config.to_prepare do
# Load application's model / class decorators
Dir.glob(File.join(File.dirname(__FILE__), "../app/**/*_decorator*.rb")) do |c|
Rails.configuration.cache_classes ? require(c) : load(c)
end
# Load application's view overrides
Dir.glob(File.join(File.dirname(__FILE__), "../app/overrides/*.rb")) do |c|
Rails.configuration.cache_classes ? require(c) : load(c)
end
end
# Register Spree calculators
initializer "spree.register.calculators" do |app|
app.config.spree.calculators.shipping_methods << OpenFoodWeb::Calculator::Itemwise
app.config.spree.calculators.shipping_methods << OpenFoodWeb::Calculator::Weight
app.config.spree.calculators.enterprise_fees = [Spree::Calculator::FlatPercentItemTotal,
Spree::Calculator::FlatRate,
Spree::Calculator::FlexiRate,
Spree::Calculator::PerItem,
Spree::Calculator::PriceSack,
OpenFoodWeb::Calculator::Weight]
end
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/app/presenters)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = 'en'
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
config.assets.initialize_on_precompile = true
config.assets.precompile += ['store/all.css', 'store/all.js', 'admin/all.css', 'admin/*.js', 'admin/**/*.js', 'comfortable_mexican_sofa/*']
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails'
require 'action_controller/railtie'
unless Rails.env.maintenance?
require 'rails/test_unit/railtie'
require 'action_mailer/railtie'
require 'active_record/railtie'
end
Bundler.require(:default, Rails.env) if defined?(Bundler)
$rubygems_config = YAML.load_file("config/rubygems.yml")[Rails.env].symbolize_keys
HOST = $rubygems_config[:host]
# DO NOT EDIT THIS LINE DIRECTLY
# Instead, run: bundle exec rake gemcutter:rubygems:update VERSION=[version number] RAILS_ENV=[staging|production] S3_KEY=[key] S3_SECRET=[secret]
RUBYGEMS_VERSION = "1.8.25"
module Gemcutter
class Application < Rails::Application
config.time_zone = "UTC"
config.encoding = "utf-8"
config.middleware.use "Hostess"
config.middleware.insert_after "Hostess", "Redirector" if $rubygems_config[:redirector] && ENV["LOCAL"].nil?
unless Rails.env.maintenance?
config.action_mailer.default_url_options = { :host => HOST }
config.action_mailer.delivery_method = $rubygems_config[:delivery_method]
config.active_record.include_root_in_json = false
end
config.after_initialize do
Hostess.local = $rubygems_config[:local_storage]
end
config.plugins = [:dynamic_form]
config.plugins << :heroku_asset_cacher if $rubygems_config[:asset_cacher]
config.autoload_paths << "./app/jobs"
end
end
Bump to 2.0.0
require File.expand_path('../boot', __FILE__)
require 'rails'
require 'action_controller/railtie'
unless Rails.env.maintenance?
require 'rails/test_unit/railtie'
require 'action_mailer/railtie'
require 'active_record/railtie'
end
Bundler.require(:default, Rails.env) if defined?(Bundler)
$rubygems_config = YAML.load_file("config/rubygems.yml")[Rails.env].symbolize_keys
HOST = $rubygems_config[:host]
# DO NOT EDIT THIS LINE DIRECTLY
# Instead, run: bundle exec rake gemcutter:rubygems:update VERSION=[version number] RAILS_ENV=[staging|production] S3_KEY=[key] S3_SECRET=[secret]
RUBYGEMS_VERSION = "2.0.0"
module Gemcutter
class Application < Rails::Application
config.time_zone = "UTC"
config.encoding = "utf-8"
config.middleware.use "Hostess"
config.middleware.insert_after "Hostess", "Redirector" if $rubygems_config[:redirector] && ENV["LOCAL"].nil?
unless Rails.env.maintenance?
config.action_mailer.default_url_options = { :host => HOST }
config.action_mailer.delivery_method = $rubygems_config[:delivery_method]
config.active_record.include_root_in_json = false
end
config.after_initialize do
Hostess.local = $rubygems_config[:local_storage]
end
config.plugins = [:dynamic_form]
config.plugins << :heroku_asset_cacher if $rubygems_config[:asset_cacher]
config.autoload_paths << "./app/jobs"
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module Lobsters
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/extras)
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.i18n.enforce_available_locales = true
# Future Rails version will disable implicit joins, so we'll be prepared.
config.active_record.disable_implicit_join_references = true
# Raise an exception when using mass assignment with unpermitted attributes
config.action_controller.action_on_unpermitted_parameters = :raise
config.cache_store = :file_store, "#{config.root}/tmp/cache/"
config.after_initialize do
Rails.application.routes.default_url_options[:host] =
Rails.application.domain
end
end
end
# disable yaml/xml/whatever input parsing
silence_warnings do
ActionDispatch::ParamsParser::DEFAULT_PARSERS = {}
end
# define site name and domain to be used globally, can be overridden in
# config/initializers/production.rb
class << Rails.application
def allow_invitation_requests?
true
end
def domain
"example.com"
end
def name
"Example News"
end
# used as mailing list prefix and countinual prefix, cannot have spaces
def shortname
name.downcase.gsub(/[^a-z]/, "")
end
end
require "#{Rails.root}/lib/monkey"
asset precompile
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module Lobsters
class Application < Rails::Application
#asset precompile for heroku
config.assets.initialize_on_precompile = false
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/extras)
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.i18n.enforce_available_locales = true
# Future Rails version will disable implicit joins, so we'll be prepared.
config.active_record.disable_implicit_join_references = true
# Raise an exception when using mass assignment with unpermitted attributes
config.action_controller.action_on_unpermitted_parameters = :raise
config.cache_store = :file_store, "#{config.root}/tmp/cache/"
config.after_initialize do
Rails.application.routes.default_url_options[:host] =
Rails.application.domain
end
end
end
# disable yaml/xml/whatever input parsing
silence_warnings do
ActionDispatch::ParamsParser::DEFAULT_PARSERS = {}
end
# define site name and domain to be used globally, can be overridden in
# config/initializers/production.rb
class << Rails.application
def allow_invitation_requests?
true
end
def domain
"example.com"
end
def name
"Example News"
end
# used as mailing list prefix and countinual prefix, cannot have spaces
def shortname
name.downcase.gsub(/[^a-z]/, "")
end
end
require "#{Rails.root}/lib/monkey"
|
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module NewApp
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Brasilia'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = 'pt-BR'
# Prevent initializing your application and connecting to the database
config.assets.initialize_on_precompile = false
# Authentication configuration for ActionMailer
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
address: ENV['SMTP_ADDRESS'],
port: ENV['SMTP_PORT'] || 587,
domain: ENV['SMTP_DOMAIN'],
user_name: ENV['SMTP_USERNAME'],
password: ENV['SMTP_PASSWORD'],
authentication: :plain,
enable_starttls_auto: false
}
# HTML generated for form fields with error
config.action_view.field_error_proc = Proc.new do |html_tag|
html_tag.html_safe
end
# Be sure to have the adapter's gem in your Gemfile
# and follow the adapter's specific installation
# and deployment instructions.
config.active_job.queue_adapter = :sidekiq
end
end
Alterando configuração de SMTP
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module NewApp
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Brasilia'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = 'pt-BR'
# Prevent initializing your application and connecting to the database
config.assets.initialize_on_precompile = false
# Authentication configuration for ActionMailer
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
address: ENV['SMTP_ADDRESS'],
port: ENV['SMTP_PORT'] || 587,
domain: ENV['SMTP_DOMAIN'],
user_name: ENV['SMTP_USERNAME'],
password: ENV['SMTP_PASSWORD'],
authentication: :plain,
enable_starttls_auto: true
}
# HTML generated for form fields with error
config.action_view.field_error_proc = Proc.new do |html_tag|
html_tag.html_safe
end
# Be sure to have the adapter's gem in your Gemfile
# and follow the adapter's specific installation
# and deployment instructions.
config.active_job.queue_adapter = :sidekiq
end
end
|
require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Francecul
class Application < Rails::Application
config.assets.enabled = true
config.assets.paths << "#{Rails.root}/app/assets/fonts"
config.eager_load_paths += %W(#{config.root}/lib)
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
config.action_dispatch.default_headers = {
'Access-Control-Allow-Origin' => 'http://www.francecul.party',
'Access-Control-Request-Method' => %w{GET POST OPTIONS}.join(",")
}
end
end
fix API address
require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Francecul
class Application < Rails::Application
config.assets.enabled = true
config.assets.paths << "#{Rails.root}/app/assets/fonts"
config.eager_load_paths += %W(#{config.root}/lib)
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
config.action_dispatch.default_headers = {
'Access-Control-Allow-Origin' => '*',
'Access-Control-Request-Method' => %w{GET POST OPTIONS}.join(",")
}
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails'
# Pick the frameworks you want:
require 'active_model/railtie'
require 'active_job/railtie'
require 'active_record/railtie'
require 'action_controller/railtie'
require 'action_mailer/railtie'
require 'action_view/railtie'
require 'sprockets/railtie'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Undp
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
config.autoload_paths += Dir[Rails.root.join('app', 'models', 'users')]
config.autoload_paths += Dir[Rails.root.join('app', 'models', 'actors')]
config.autoload_paths += Dir[Rails.root.join('app', 'models', 'acts')]
config.autoload_paths += Dir[Rails.root.join('app', 'models', 'localizations')]
config.autoload_paths += Dir[Rails.root.join('app', 'models', 'categories')]
config.autoload_paths += Dir[Rails.root.join('app', 'models', 'search')]
config.included_models = ActiveRecord::Base.descendants.map!(&:name)
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
config.time_zone = 'Europe/Madrid'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.assets.initialize_on_precompile = true
config.i18n.available_locales = [:en]
config.i18n.default_locale = :en
config.i18n.fallbacks = true
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
config.generators do |g|
g.test_framework :rspec
g.view_specs false
g.helper_specs false
g.factory_girl false
g.template_engine :slim
g.stylesheets false
g.javascripts false
g.helper false
end
end
end
Enable GC::Profiler for new relic
require File.expand_path('../boot', __FILE__)
require 'rails'
# Pick the frameworks you want:
require 'active_model/railtie'
require 'active_job/railtie'
require 'active_record/railtie'
require 'action_controller/railtie'
require 'action_mailer/railtie'
require 'action_view/railtie'
require 'sprockets/railtie'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
GC::Profiler.enable
module Undp
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
config.autoload_paths += Dir[Rails.root.join('app', 'models', 'users')]
config.autoload_paths += Dir[Rails.root.join('app', 'models', 'actors')]
config.autoload_paths += Dir[Rails.root.join('app', 'models', 'acts')]
config.autoload_paths += Dir[Rails.root.join('app', 'models', 'localizations')]
config.autoload_paths += Dir[Rails.root.join('app', 'models', 'categories')]
config.autoload_paths += Dir[Rails.root.join('app', 'models', 'search')]
config.included_models = ActiveRecord::Base.descendants.map!(&:name)
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
config.time_zone = 'Europe/Madrid'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.assets.initialize_on_precompile = true
config.i18n.available_locales = [:en]
config.i18n.default_locale = :en
config.i18n.fallbacks = true
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
config.generators do |g|
g.test_framework :rspec
g.view_specs false
g.helper_specs false
g.factory_girl false
g.template_engine :slim
g.stylesheets false
g.javascripts false
g.helper false
end
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module Twinenyc
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
end
end
add video folder to asset pipeline
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module Twinenyc
class Application < Rails::Application
config.assets.paths << "#{Rails.root}/app/assets/videos"
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module Poichecker
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Berlin'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = :de
config.autoload_paths += Dir["#{config.root}/lib/**/*.rb"]
# Needed for the ActiveAdmin's manifest assets.
config.assets.precompile += ['map.js']
end
end
Precompile locate_me.js
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module Poichecker
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Berlin'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = :de
config.autoload_paths += Dir["#{config.root}/lib/**/*.rb"]
# Needed for the ActiveAdmin's manifest assets.
config.assets.precompile += ['map.js', 'locate_me.js']
end
end
|
require 'rexml/document'
require 'rest_client'
require 'uri'
require 'time'
require 'json'
# A Ruby class to call the Heroku REST API. You might use this if you want to
# manage your Heroku apps from within a Ruby program, such as Capistrano.
#
# Example:
#
# require 'heroku'
# heroku = Heroku::Client.new('me@example.com', 'mypass')
# heroku.create('myapp')
#
class Heroku::Client
def self.version
'1.8.2'
end
def self.gem_version_string
"heroku-gem/#{version}"
end
attr_reader :host, :user, :password
def initialize(user, password, host='heroku.com')
@user = user
@password = password
@host = host
end
# Show a list of apps which you are a collaborator on.
def list
doc = xml(get('/apps'))
doc.elements.to_a("//apps/app").map do |a|
name = a.elements.to_a("name").first
owner = a.elements.to_a("owner").first
[name.text, owner.text]
end
end
# Show info such as mode, custom domain, and collaborators on an app.
def info(name_or_domain)
name_or_domain = name_or_domain.gsub(/^(http:\/\/)?(www\.)?/, '')
doc = xml(get("/apps/#{name_or_domain}"))
attrs = doc.elements.to_a('//app/*').inject({}) do |hash, element|
hash[element.name.gsub(/-/, '_').to_sym] = element.text; hash
end
attrs.merge!(:collaborators => list_collaborators(attrs[:name]))
attrs.merge!(:addons => installed_addons(attrs[:name]))
end
# Create a new app, with an optional name.
def create(name=nil, options={})
name = create_request(name, options)
loop do
break if create_complete?(name)
sleep 1
end
name
end
def create_request(name=nil, options={})
options[:name] = name if name
xml(post('/apps', :app => options)).elements["//app/name"].text
end
def create_complete?(name)
put("/apps/#{name}/status", {}).code == 201
end
# Update an app. Available attributes:
# :name => rename the app (changes http and git urls)
def update(name, attributes)
put("/apps/#{name}", :app => attributes)
end
# Destroy the app permanently.
def destroy(name)
delete("/apps/#{name}")
end
# Get a list of collaborators on the app, returns an array of hashes each with :email
def list_collaborators(app_name)
doc = xml(get("/apps/#{app_name}/collaborators"))
doc.elements.to_a("//collaborators/collaborator").map do |a|
{ :email => a.elements['email'].text }
end
end
# Invite a person by email address to collaborate on the app.
def add_collaborator(app_name, email)
xml(post("/apps/#{app_name}/collaborators", { 'collaborator[email]' => email }))
rescue RestClient::RequestFailed => e
raise e unless e.http_code == 422
e.response
end
# Remove a collaborator.
def remove_collaborator(app_name, email)
delete("/apps/#{app_name}/collaborators/#{escape(email)}")
end
def list_domains(app_name)
doc = xml(get("/apps/#{app_name}/domains"))
doc.elements.to_a("//domain-names/*").map do |d|
attrs = { :domain => d.elements['domain'].text }
if cert = d.elements['cert']
attrs[:cert] = {
:expires_at => Time.parse(cert.elements['expires-at'].text),
:subject => cert.elements['subject'].text,
:issuer => cert.elements['issuer'].text,
}
end
attrs
end
end
def add_domain(app_name, domain)
post("/apps/#{app_name}/domains", domain)
end
def remove_domain(app_name, domain)
delete("/apps/#{app_name}/domains/#{domain}")
end
def remove_domains(app_name)
delete("/apps/#{app_name}/domains")
end
def add_ssl(app_name, pem, key)
JSON.parse(post("/apps/#{app_name}/ssl", :pem => pem, :key => key))
end
def remove_ssl(app_name, domain)
delete("/apps/#{app_name}/domains/#{domain}/ssl")
end
# Get the list of ssh public keys for the current user.
def keys
doc = xml get('/user/keys')
doc.elements.to_a('//keys/key').map do |key|
key.elements['contents'].text
end
end
# Add an ssh public key to the current user.
def add_key(key)
post("/user/keys", key, { 'Content-Type' => 'text/ssh-authkey' })
end
# Remove an existing ssh public key from the current user.
def remove_key(key)
delete("/user/keys/#{escape(key)}")
end
# Clear all keys on the current user.
def remove_all_keys
delete("/user/keys")
end
# Get a list of stacks available to the app, with the current one marked.
def list_stacks(app_name)
JSON.parse resource("/apps/#{app_name}/stack").get(:accept => 'application/json')
end
# Request a stack migration.
def migrate_to_stack(app_name, stack)
resource("/apps/#{app_name}/stack").put(stack, :accept => 'text/plain')
end
class AppCrashed < RuntimeError; end
# Run a rake command on the Heroku app and return all output as
# a string.
def rake(app_name, cmd)
start(app_name, "rake #{cmd}", attached=true).to_s
end
# support for console sessions
class ConsoleSession
def initialize(id, app, client)
@id = id; @app = app; @client = client
end
def run(cmd)
@client.run_console_command("/apps/#{@app}/consoles/#{@id}/command", cmd, "=> ")
end
end
# Execute a one-off console command, or start a new console tty session if
# cmd is nil.
def console(app_name, cmd=nil)
if block_given?
id = post("/apps/#{app_name}/consoles")
yield ConsoleSession.new(id, app_name, self)
delete("/apps/#{app_name}/consoles/#{id}")
else
run_console_command("/apps/#{app_name}/console", cmd)
end
rescue RestClient::RequestFailed => e
raise(AppCrashed, e.response.to_s) if e.response.code.to_i == 502
raise e
end
# internal method to run console commands formatting the output
def run_console_command(url, command, prefix=nil)
output = post(url, command)
return output unless prefix
if output.include?("\n")
lines = output.split("\n")
(lines[0..-2] << "#{prefix}#{lines.last}").join("\n")
else
prefix + output
end
rescue RestClient::RequestFailed => e
raise e unless e.http_code == 422
e.http_body
end
class Service
attr_accessor :attached, :upid
def initialize(client, app, upid=nil)
@client = client
@app = app
@upid = upid
end
# start the service
def start(command, attached=false)
@attached = attached
@response = @client.post(
"/apps/#{@app}/services",
command,
:content_type => 'text/plain'
)
@next_chunk = @response
@interval = 0
self
rescue RestClient::RequestFailed => e
raise AppCrashed, e.http_body if e.http_code == 502
raise
end
def transition(action)
@response = @client.put(
"/apps/#{@app}/services/#{@upid}",
action,
:content_type => 'text/plain'
)
self
rescue RestClient::RequestFailed => e
raise AppCrashed, e.http_body if e.http_code == 502
raise
end
def down ; transition('down') ; end
def up ; transition('up') ; end
def bounce ; transition('bounce') ; end
# Does the service have any remaining output?
def end_of_stream?
@next_chunk.nil?
end
# Read the next chunk of output.
def read
chunk = @client.get(@next_chunk)
if chunk.nil? or chunk == ''
# assume no content and back off
@interval = 2
''
elsif location = chunk.headers[:location]
# some data read and next chunk available
@next_chunk = location
@interval = 0
chunk
else
# no more chunks
@next_chunk = nil
chunk
end
end
# Iterate over all output chunks until EOF is reached.
def each
until end_of_stream?
sleep(@interval)
output = read
yield output unless output.empty?
end
end
# All output as a string
def to_s
buf = []
each { |part| buf << part }
buf.join
end
end
# Retreive ps list for the given app name.
def ps(app_name)
JSON.parse resource("/apps/#{app_name}/ps").get(:accept => 'application/json')
end
# Run a service. If Responds to #each and yields output as it's received.
def start(app_name, command, attached=false)
service = Service.new(self, app_name)
service.start(command, attached)
end
# Get a Service instance to execute commands against.
def service(app_name, upid)
Service.new(self, app_name, upid)
end
# Bring a service up.
def up(app_name, upid)
service(app_name, upid).up
end
# Bring a service down.
def down(app_name, upid)
service(app_name, upid).down
end
# Bounce a service.
def bounce(app_name, upid)
service(app_name, upid).bounce
end
# Restart the app servers.
def restart(app_name)
delete("/apps/#{app_name}/server")
end
# Fetch recent logs from the app server.
def logs(app_name)
get("/apps/#{app_name}/logs")
end
# Fetch recent cron logs from the app server.
def cron_logs(app_name)
get("/apps/#{app_name}/cron_logs")
end
# Scales the web processes.
def set_dynos(app_name, qty)
put("/apps/#{app_name}/dynos", :dynos => qty).to_i
end
# Scales the background processes.
def set_workers(app_name, qty)
put("/apps/#{app_name}/workers", :workers => qty).to_i
end
# Capture a bundle from the given app, as a backup or for download.
def bundle_capture(app_name, bundle_name=nil)
xml(post("/apps/#{app_name}/bundles", :bundle => { :name => bundle_name })).elements["//bundle/name"].text
end
def bundle_destroy(app_name, bundle_name)
delete("/apps/#{app_name}/bundles/#{bundle_name}")
end
# Get a temporary URL where the bundle can be downloaded.
# If bundle_name is nil it will use the most recently captured bundle for the app
def bundle_url(app_name, bundle_name=nil)
bundle = JSON.parse(get("/apps/#{app_name}/bundles/#{bundle_name || 'latest'}", { :accept => 'application/json' }))
bundle['temporary_url']
end
def bundle_download(app_name, fname, bundle_name=nil)
warn "[DEPRECATION] `bundle_download` is deprecated. Please use `bundle_url` instead"
data = RestClient.get(bundle_url(app_name, bundle_name))
File.open(fname, "wb") { |f| f.write data }
end
# Get a list of bundles of the app.
def bundles(app_name)
doc = xml(get("/apps/#{app_name}/bundles"))
doc.elements.to_a("//bundles/bundle").map do |a|
{
:name => a.elements['name'].text,
:state => a.elements['state'].text,
:created_at => Time.parse(a.elements['created-at'].text),
}
end
end
def config_vars(app_name)
JSON.parse get("/apps/#{app_name}/config_vars")
end
def add_config_vars(app_name, new_vars)
put("/apps/#{app_name}/config_vars", new_vars.to_json)
end
def remove_config_var(app_name, key)
delete("/apps/#{app_name}/config_vars/#{key}")
end
def clear_config_vars(app_name)
delete("/apps/#{app_name}/config_vars")
end
def addons
JSON.parse get("/addons", :accept => 'application/json')
end
def installed_addons(app_name)
JSON.parse get("/apps/#{app_name}/addons", :accept => 'application/json')
end
def install_addon(app_name, addon, config={})
post("/apps/#{app_name}/addons/#{escape(addon)}", { :config => config }, :accept => 'application/json')
end
def uninstall_addon(app_name, addon)
delete("/apps/#{app_name}/addons/#{escape(addon)}", :accept => 'application/json')
end
def confirm_billing
post("/user/#{escape(@user)}/confirm_billing")
end
def on_warning(&blk)
@warning_callback = blk
end
##################
def resource(uri)
RestClient.proxy = ENV['HTTP_PROXY']
if uri =~ /^https?/
RestClient::Resource.new(uri, user, password)
else
RestClient::Resource.new("https://api.#{host}", user, password)[uri]
end
end
def get(uri, extra_headers={}) # :nodoc:
process(:get, uri, extra_headers)
end
def post(uri, payload="", extra_headers={}) # :nodoc:
process(:post, uri, extra_headers, payload)
end
def put(uri, payload, extra_headers={}) # :nodoc:
process(:put, uri, extra_headers, payload)
end
def delete(uri, extra_headers={}) # :nodoc:
process(:delete, uri, extra_headers)
end
def process(method, uri, extra_headers={}, payload=nil)
headers = heroku_headers.merge(extra_headers)
args = [method, payload, headers].compact
response = resource(uri).send(*args)
extract_warning(response)
response
end
def extract_warning(response)
return unless response
if response.headers[:x_heroku_warning] && @warning_callback
warning = response.headers[:x_heroku_warning]
@displayed_warnings ||= {}
unless @displayed_warnings[warning]
@warning_callback.call(warning)
@displayed_warnings[warning] = true
end
end
end
def heroku_headers # :nodoc:
{
'X-Heroku-API-Version' => '2',
'User-Agent' => self.class.gem_version_string,
}
end
def xml(raw) # :nodoc:
REXML::Document.new(raw)
end
def escape(value) # :nodoc:
escaped = URI.escape(value.to_s, Regexp.new("[^#{URI::PATTERN::UNRESERVED}]"))
escaped.gsub('.', '%2E') # not covered by the previous URI.escape
end
def database_session(app_name)
post("/apps/#{app_name}/database/session", '')
end
def database_reset(app_name)
post("/apps/#{app_name}/database/reset", '')
end
def maintenance(app_name, mode)
mode = mode == :on ? '1' : '0'
post("/apps/#{app_name}/server/maintenance", :maintenance_mode => mode)
end
end
fixed rest-client, this change is useless
Signed-off-by: David Dollar <9dff823b3a3030b0213f758941c16b15e3288e92@gmail.com>
require 'rexml/document'
require 'rest_client'
require 'uri'
require 'time'
require 'json'
# A Ruby class to call the Heroku REST API. You might use this if you want to
# manage your Heroku apps from within a Ruby program, such as Capistrano.
#
# Example:
#
# require 'heroku'
# heroku = Heroku::Client.new('me@example.com', 'mypass')
# heroku.create('myapp')
#
class Heroku::Client
def self.version
'1.8.2'
end
def self.gem_version_string
"heroku-gem/#{version}"
end
attr_reader :host, :user, :password
def initialize(user, password, host='heroku.com')
@user = user
@password = password
@host = host
end
# Show a list of apps which you are a collaborator on.
def list
doc = xml(get('/apps'))
doc.elements.to_a("//apps/app").map do |a|
name = a.elements.to_a("name").first
owner = a.elements.to_a("owner").first
[name.text, owner.text]
end
end
# Show info such as mode, custom domain, and collaborators on an app.
def info(name_or_domain)
name_or_domain = name_or_domain.gsub(/^(http:\/\/)?(www\.)?/, '')
doc = xml(get("/apps/#{name_or_domain}"))
attrs = doc.elements.to_a('//app/*').inject({}) do |hash, element|
hash[element.name.gsub(/-/, '_').to_sym] = element.text; hash
end
attrs.merge!(:collaborators => list_collaborators(attrs[:name]))
attrs.merge!(:addons => installed_addons(attrs[:name]))
end
# Create a new app, with an optional name.
def create(name=nil, options={})
name = create_request(name, options)
loop do
break if create_complete?(name)
sleep 1
end
name
end
def create_request(name=nil, options={})
options[:name] = name if name
xml(post('/apps', :app => options)).elements["//app/name"].text
end
def create_complete?(name)
put("/apps/#{name}/status", {}).code == 201
end
# Update an app. Available attributes:
# :name => rename the app (changes http and git urls)
def update(name, attributes)
put("/apps/#{name}", :app => attributes)
end
# Destroy the app permanently.
def destroy(name)
delete("/apps/#{name}")
end
# Get a list of collaborators on the app, returns an array of hashes each with :email
def list_collaborators(app_name)
doc = xml(get("/apps/#{app_name}/collaborators"))
doc.elements.to_a("//collaborators/collaborator").map do |a|
{ :email => a.elements['email'].text }
end
end
# Invite a person by email address to collaborate on the app.
def add_collaborator(app_name, email)
xml(post("/apps/#{app_name}/collaborators", { 'collaborator[email]' => email }))
rescue RestClient::RequestFailed => e
raise e unless e.http_code == 422
e.response
end
# Remove a collaborator.
def remove_collaborator(app_name, email)
delete("/apps/#{app_name}/collaborators/#{escape(email)}")
end
def list_domains(app_name)
doc = xml(get("/apps/#{app_name}/domains"))
doc.elements.to_a("//domain-names/*").map do |d|
attrs = { :domain => d.elements['domain'].text }
if cert = d.elements['cert']
attrs[:cert] = {
:expires_at => Time.parse(cert.elements['expires-at'].text),
:subject => cert.elements['subject'].text,
:issuer => cert.elements['issuer'].text,
}
end
attrs
end
end
def add_domain(app_name, domain)
post("/apps/#{app_name}/domains", domain)
end
def remove_domain(app_name, domain)
delete("/apps/#{app_name}/domains/#{domain}")
end
def remove_domains(app_name)
delete("/apps/#{app_name}/domains")
end
def add_ssl(app_name, pem, key)
JSON.parse(post("/apps/#{app_name}/ssl", :pem => pem, :key => key))
end
def remove_ssl(app_name, domain)
delete("/apps/#{app_name}/domains/#{domain}/ssl")
end
# Get the list of ssh public keys for the current user.
def keys
doc = xml get('/user/keys')
doc.elements.to_a('//keys/key').map do |key|
key.elements['contents'].text
end
end
# Add an ssh public key to the current user.
def add_key(key)
post("/user/keys", key, { 'Content-Type' => 'text/ssh-authkey' })
end
# Remove an existing ssh public key from the current user.
def remove_key(key)
delete("/user/keys/#{escape(key)}")
end
# Clear all keys on the current user.
def remove_all_keys
delete("/user/keys")
end
# Get a list of stacks available to the app, with the current one marked.
def list_stacks(app_name)
JSON.parse resource("/apps/#{app_name}/stack").get(:accept => 'application/json')
end
# Request a stack migration.
def migrate_to_stack(app_name, stack)
resource("/apps/#{app_name}/stack").put(stack, :accept => 'text/plain')
end
class AppCrashed < RuntimeError; end
# Run a rake command on the Heroku app and return all output as
# a string.
def rake(app_name, cmd)
start(app_name, "rake #{cmd}", attached=true).to_s
end
# support for console sessions
class ConsoleSession
def initialize(id, app, client)
@id = id; @app = app; @client = client
end
def run(cmd)
@client.run_console_command("/apps/#{@app}/consoles/#{@id}/command", cmd, "=> ")
end
end
# Execute a one-off console command, or start a new console tty session if
# cmd is nil.
def console(app_name, cmd=nil)
if block_given?
id = post("/apps/#{app_name}/consoles")
yield ConsoleSession.new(id, app_name, self)
delete("/apps/#{app_name}/consoles/#{id}")
else
run_console_command("/apps/#{app_name}/console", cmd)
end
rescue RestClient::RequestFailed => e
raise(AppCrashed, e.response) if e.response.code.to_i == 502
raise e
end
# internal method to run console commands formatting the output
def run_console_command(url, command, prefix=nil)
output = post(url, command)
return output unless prefix
if output.include?("\n")
lines = output.split("\n")
(lines[0..-2] << "#{prefix}#{lines.last}").join("\n")
else
prefix + output
end
rescue RestClient::RequestFailed => e
raise e unless e.http_code == 422
e.http_body
end
class Service
attr_accessor :attached, :upid
def initialize(client, app, upid=nil)
@client = client
@app = app
@upid = upid
end
# start the service
def start(command, attached=false)
@attached = attached
@response = @client.post(
"/apps/#{@app}/services",
command,
:content_type => 'text/plain'
)
@next_chunk = @response
@interval = 0
self
rescue RestClient::RequestFailed => e
raise AppCrashed, e.http_body if e.http_code == 502
raise
end
def transition(action)
@response = @client.put(
"/apps/#{@app}/services/#{@upid}",
action,
:content_type => 'text/plain'
)
self
rescue RestClient::RequestFailed => e
raise AppCrashed, e.http_body if e.http_code == 502
raise
end
def down ; transition('down') ; end
def up ; transition('up') ; end
def bounce ; transition('bounce') ; end
# Does the service have any remaining output?
def end_of_stream?
@next_chunk.nil?
end
# Read the next chunk of output.
def read
chunk = @client.get(@next_chunk)
if chunk.nil? or chunk == ''
# assume no content and back off
@interval = 2
''
elsif location = chunk.headers[:location]
# some data read and next chunk available
@next_chunk = location
@interval = 0
chunk
else
# no more chunks
@next_chunk = nil
chunk
end
end
# Iterate over all output chunks until EOF is reached.
def each
until end_of_stream?
sleep(@interval)
output = read
yield output unless output.empty?
end
end
# All output as a string
def to_s
buf = []
each { |part| buf << part }
buf.join
end
end
# Retreive ps list for the given app name.
def ps(app_name)
JSON.parse resource("/apps/#{app_name}/ps").get(:accept => 'application/json')
end
# Run a service. If Responds to #each and yields output as it's received.
def start(app_name, command, attached=false)
service = Service.new(self, app_name)
service.start(command, attached)
end
# Get a Service instance to execute commands against.
def service(app_name, upid)
Service.new(self, app_name, upid)
end
# Bring a service up.
def up(app_name, upid)
service(app_name, upid).up
end
# Bring a service down.
def down(app_name, upid)
service(app_name, upid).down
end
# Bounce a service.
def bounce(app_name, upid)
service(app_name, upid).bounce
end
# Restart the app servers.
def restart(app_name)
delete("/apps/#{app_name}/server")
end
# Fetch recent logs from the app server.
def logs(app_name)
get("/apps/#{app_name}/logs")
end
# Fetch recent cron logs from the app server.
def cron_logs(app_name)
get("/apps/#{app_name}/cron_logs")
end
# Scales the web processes.
def set_dynos(app_name, qty)
put("/apps/#{app_name}/dynos", :dynos => qty).to_i
end
# Scales the background processes.
def set_workers(app_name, qty)
put("/apps/#{app_name}/workers", :workers => qty).to_i
end
# Capture a bundle from the given app, as a backup or for download.
def bundle_capture(app_name, bundle_name=nil)
xml(post("/apps/#{app_name}/bundles", :bundle => { :name => bundle_name })).elements["//bundle/name"].text
end
def bundle_destroy(app_name, bundle_name)
delete("/apps/#{app_name}/bundles/#{bundle_name}")
end
# Get a temporary URL where the bundle can be downloaded.
# If bundle_name is nil it will use the most recently captured bundle for the app
def bundle_url(app_name, bundle_name=nil)
bundle = JSON.parse(get("/apps/#{app_name}/bundles/#{bundle_name || 'latest'}", { :accept => 'application/json' }))
bundle['temporary_url']
end
def bundle_download(app_name, fname, bundle_name=nil)
warn "[DEPRECATION] `bundle_download` is deprecated. Please use `bundle_url` instead"
data = RestClient.get(bundle_url(app_name, bundle_name))
File.open(fname, "wb") { |f| f.write data }
end
# Get a list of bundles of the app.
def bundles(app_name)
doc = xml(get("/apps/#{app_name}/bundles"))
doc.elements.to_a("//bundles/bundle").map do |a|
{
:name => a.elements['name'].text,
:state => a.elements['state'].text,
:created_at => Time.parse(a.elements['created-at'].text),
}
end
end
def config_vars(app_name)
JSON.parse get("/apps/#{app_name}/config_vars")
end
def add_config_vars(app_name, new_vars)
put("/apps/#{app_name}/config_vars", new_vars.to_json)
end
def remove_config_var(app_name, key)
delete("/apps/#{app_name}/config_vars/#{key}")
end
def clear_config_vars(app_name)
delete("/apps/#{app_name}/config_vars")
end
def addons
JSON.parse get("/addons", :accept => 'application/json')
end
def installed_addons(app_name)
JSON.parse get("/apps/#{app_name}/addons", :accept => 'application/json')
end
def install_addon(app_name, addon, config={})
post("/apps/#{app_name}/addons/#{escape(addon)}", { :config => config }, :accept => 'application/json')
end
def uninstall_addon(app_name, addon)
delete("/apps/#{app_name}/addons/#{escape(addon)}", :accept => 'application/json')
end
def confirm_billing
post("/user/#{escape(@user)}/confirm_billing")
end
def on_warning(&blk)
@warning_callback = blk
end
##################
def resource(uri)
RestClient.proxy = ENV['HTTP_PROXY']
if uri =~ /^https?/
RestClient::Resource.new(uri, user, password)
else
RestClient::Resource.new("https://api.#{host}", user, password)[uri]
end
end
def get(uri, extra_headers={}) # :nodoc:
process(:get, uri, extra_headers)
end
def post(uri, payload="", extra_headers={}) # :nodoc:
process(:post, uri, extra_headers, payload)
end
def put(uri, payload, extra_headers={}) # :nodoc:
process(:put, uri, extra_headers, payload)
end
def delete(uri, extra_headers={}) # :nodoc:
process(:delete, uri, extra_headers)
end
def process(method, uri, extra_headers={}, payload=nil)
headers = heroku_headers.merge(extra_headers)
args = [method, payload, headers].compact
response = resource(uri).send(*args)
extract_warning(response)
response
end
def extract_warning(response)
return unless response
if response.headers[:x_heroku_warning] && @warning_callback
warning = response.headers[:x_heroku_warning]
@displayed_warnings ||= {}
unless @displayed_warnings[warning]
@warning_callback.call(warning)
@displayed_warnings[warning] = true
end
end
end
def heroku_headers # :nodoc:
{
'X-Heroku-API-Version' => '2',
'User-Agent' => self.class.gem_version_string,
}
end
def xml(raw) # :nodoc:
REXML::Document.new(raw)
end
def escape(value) # :nodoc:
escaped = URI.escape(value.to_s, Regexp.new("[^#{URI::PATTERN::UNRESERVED}]"))
escaped.gsub('.', '%2E') # not covered by the previous URI.escape
end
def database_session(app_name)
post("/apps/#{app_name}/database/session", '')
end
def database_reset(app_name)
post("/apps/#{app_name}/database/reset", '')
end
def maintenance(app_name, mode)
mode = mode == :on ? '1' : '0'
post("/apps/#{app_name}/server/maintenance", :maintenance_mode => mode)
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# If you have a Gemfile, require the gems listed there, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env) if defined?(Bundler)
module Hcking
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
config.autoload_paths += %W(#{config.root}/lib)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
config.active_record.observers = :user_edit_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
config.time_zone = "Berlin"
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password, :password_confirm]
# Enable the asset pipeline
config.assets.enabled = true
# We do not need rails when precompiling
config.assets.initialize_on_precompile = false
config.assets.version = '1.0'
end
end
admin area has css now :smile:
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# If you have a Gemfile, require the gems listed there, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env) if defined?(Bundler)
module Hcking
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
config.autoload_paths += %W(#{config.root}/lib)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
config.active_record.observers = :user_edit_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
config.time_zone = "Berlin"
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password, :password_confirm]
# Enable the asset pipeline
config.assets.enabled = true
# We do not need rails when precompiling
config.assets.initialize_on_precompile = false
config.assets.precompile += ['active_admin.css', 'active_admin.js', 'active_admin/print.css']
config.assets.version = '1.0'
end
end
|
# based on the Rails Plugin
module Heroku
class Plugin
class << self
include Heroku::Helpers
end
attr_reader :name, :uri
def self.directory
File.expand_path("#{home_directory}/.heroku/plugins")
end
def self.list
Dir["#{directory}/*"].sort.map do |folder|
File.basename(folder)
end
end
def self.load!
list.each do |plugin|
begin
load_plugin(plugin)
rescue Exception => e
display "Unable to load plugin: #{plugin}: #{e.message}"
end
end
end
def self.load_plugin(plugin)
folder = "#{self.directory}/#{plugin}"
$: << "#{folder}/lib" if File.directory? "#{folder}/lib"
load "#{folder}/init.rb" if File.exists? "#{folder}/init.rb"
end
def initialize(uri)
@uri = uri
guess_name(uri)
end
def to_s
name
end
def path
"#{self.class.directory}/#{name}"
end
def install
FileUtils.mkdir_p(path)
Dir.chdir(path) do
system("git init -q")
if !system("git pull #{uri} -q")
FileUtils.rm_rf path
return false
end
end
true
end
def uninstall
FileUtils.rm_r path if File.directory?(path)
end
private
def guess_name(url)
@name = File.basename(url)
@name = File.basename(File.dirname(url)) if @name.empty?
@name.gsub!(/\.git$/, '') if @name =~ /\.git$/
end
end
end
Specify the branch to pull from when installing plugins.
Without this, the install fails for users without a default branch specified in their .gitconfig.
# based on the Rails Plugin
module Heroku
class Plugin
class << self
include Heroku::Helpers
end
attr_reader :name, :uri
def self.directory
File.expand_path("#{home_directory}/.heroku/plugins")
end
def self.list
Dir["#{directory}/*"].sort.map do |folder|
File.basename(folder)
end
end
def self.load!
list.each do |plugin|
begin
load_plugin(plugin)
rescue Exception => e
display "Unable to load plugin: #{plugin}: #{e.message}"
end
end
end
def self.load_plugin(plugin)
folder = "#{self.directory}/#{plugin}"
$: << "#{folder}/lib" if File.directory? "#{folder}/lib"
load "#{folder}/init.rb" if File.exists? "#{folder}/init.rb"
end
def initialize(uri)
@uri = uri
guess_name(uri)
end
def to_s
name
end
def path
"#{self.class.directory}/#{name}"
end
def install
FileUtils.mkdir_p(path)
Dir.chdir(path) do
system("git init -q")
if !system("git pull #{uri} master -q")
FileUtils.rm_rf path
return false
end
end
true
end
def uninstall
FileUtils.rm_r path if File.directory?(path)
end
private
def guess_name(url)
@name = File.basename(url)
@name = File.basename(File.dirname(url)) if @name.empty?
@name.gsub!(/\.git$/, '') if @name =~ /\.git$/
end
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Ladder
VERSION = "1.5.0"
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Auckland'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.i18n.enforce_available_locales = true
config.action_mailer.default_url_options = { :host => 'ladders.pw' }
config.exceptions_app = self.routes
end
end
Release 1.5.1
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Ladder
VERSION = "1.5.1"
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Auckland'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.i18n.enforce_available_locales = true
config.action_mailer.default_url_options = { :host => 'ladders.pw' }
config.exceptions_app = self.routes
end
end
|
require 'socket'
require 'net/http'
module HGAPI
class Client
SUPPORTED_TRANSPORTS = [:udp, :tcp, :http]
HTTP_URI ="https://hostedgraphite.com/api/v1/sink"
HOST = 'carbon.hostedgraphite.com'
PORT = 2003
attr_reader :disabled, :settings
def initialize(options = {})
@settings = build_settings(options)
@disabled = @settings[:api_key].nil?
end
def metric(key, value, options = {})
return if @disabled
send_metric(key, value, check_transport!(options[:via]) || settings[:default_transport])
end
def time(key, options = {})
start = Time.now
result = yield
metric(key, ((Time.now - start) * 1000).round, options)
result
end
private
def build_settings(options)
{
:api_key => ENV["HOSTED_GRAPHITE_API_KEY"],
:host => ENV["HOSTED_GRAPHITE_HOST"] || HOST,
:port => ENV["HOSTED_GRAPHITE_PORT"] || PORT,
:http_uri => ENV["HOSTED_GRAPHITE_HTTP_URI"] || HTTP_URI,
:default_transport => check_transport!(options[:via]) || :udp,
:prefix => options[:prefix]
}
end
def check_transport!(transport)
if transport && !SUPPORTED_TRANSPORTS.include?(transport.to_sym)
raise "#{transport} is unsupported transport"
end
transport
end
def send_metric(key, value, transport)
self.send("send_metric_#{transport}", key, value)
end
def send_metric_udp(key, value)
sock = UDPSocket.new
sock.send "#{@settings[:api_key]}.#{prefix}#{key} #{value}\n", 0, @settings[:host], @settings[:port]
sock.close
end
def send_metric_tcp(key, value)
conn = TCPSocket.new @settings[:host], @settings[:port]
conn.puts "#{@settings[:api_key]}.#{prefix}#{key} #{value}\n"
conn.close
end
def send_metric_http(key, value)
uri = URI(@settings[:http_uri])
req = Net::HTTP::Post.new(uri.request_uri)
req.basic_auth @settings[:api_key], nil
req.body = "#{prefix}#{key} #{value}"
res = Net::HTTP.start(uri.host, uri.port, use_ssl: uri.scheme == 'https') do |http|
http.request(req)
end
end
def prefix
@prefix ||= if settings[:prefix] && !settings[:prefix].empty?
Array(settings[:prefix]).join('.') << '.'
else
""
end
end
end
end
Remove attr_reader for settings and disabled
require 'socket'
require 'net/http'
module HGAPI
class Client
SUPPORTED_TRANSPORTS = [:udp, :tcp, :http]
HTTP_URI ="https://hostedgraphite.com/api/v1/sink"
HOST = 'carbon.hostedgraphite.com'
PORT = 2003
def initialize(options = {})
@settings = build_settings(options)
@disabled = @settings[:api_key].nil?
end
def metric(key, value, options = {})
return if @disabled
send_metric(key, value, check_transport!(options[:via]) || @settings[:default_transport])
end
def time(key, options = {})
start = Time.now
result = yield
metric(key, ((Time.now - start) * 1000).round, options)
result
end
private
def build_settings(options)
{
:api_key => ENV["HOSTED_GRAPHITE_API_KEY"],
:host => ENV["HOSTED_GRAPHITE_HOST"] || HOST,
:port => ENV["HOSTED_GRAPHITE_PORT"] || PORT,
:http_uri => ENV["HOSTED_GRAPHITE_HTTP_URI"] || HTTP_URI,
:default_transport => check_transport!(options[:via]) || :udp,
:prefix => options[:prefix]
}
end
def check_transport!(transport)
if transport && !SUPPORTED_TRANSPORTS.include?(transport.to_sym)
raise "#{transport} is unsupported transport"
end
transport
end
def send_metric(key, value, transport)
self.send("send_metric_#{transport}", key, value)
end
def send_metric_udp(key, value)
sock = UDPSocket.new
sock.send "#{@settings[:api_key]}.#{prefix}#{key} #{value}\n", 0, @settings[:host], @settings[:port]
sock.close
end
def send_metric_tcp(key, value)
conn = TCPSocket.new @settings[:host], @settings[:port]
conn.puts "#{@settings[:api_key]}.#{prefix}#{key} #{value}\n"
conn.close
end
def send_metric_http(key, value)
uri = URI(@settings[:http_uri])
req = Net::HTTP::Post.new(uri.request_uri)
req.basic_auth @settings[:api_key], nil
req.body = "#{prefix}#{key} #{value}"
res = Net::HTTP.start(uri.host, uri.port, use_ssl: uri.scheme == 'https') do |http|
http.request(req)
end
end
def prefix
@prefix ||= if @settings[:prefix] && !@settings[:prefix].empty?
Array(@settings[:prefix]).join('.') << '.'
else
""
end
end
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
Bundler.require(:default, Rails.env)
end
module DchousingApps
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enable the asset pipeline
config.assets.enabled = true
config.assets.initialize_on_precompile = false
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
# TODO Remove this in Rails 4.1
config.secret_key_base = YAML.load(File.open("#{Rails.root}/config/secrets.yml"))[Rails.env]['secret_key_base']
end
end
Set enforce_available_locales = true
This silences a Rails 4 warning.
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
Bundler.require(:default, Rails.env)
end
module DchousingApps
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enable the asset pipeline
config.assets.enabled = true
config.assets.initialize_on_precompile = false
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
# TODO Remove this in Rails 4.1
config.secret_key_base = YAML.load(File.open("#{Rails.root}/config/secrets.yml"))[Rails.env]['secret_key_base']
config.i18n.enforce_available_locales = true
end
end
|
require 'forwardable'
require 'http/headers'
require 'http/content_type'
require 'http/mime_type'
require 'http/response/status'
module HTTP
class Response
extend Forwardable
include HTTP::Headers::Mixin
# @deprecated Will be removed in 1.0.0
# Use Status::REASONS
STATUS_CODES = Status::REASONS
# @deprecated Will be removed in 1.0.0
SYMBOL_TO_STATUS_CODE = Hash[STATUS_CODES.map { |k, v| [v.downcase.gsub(/\s|-/, '_').to_sym, k] }].freeze
# @return [Status]
attr_reader :status
# @return [Body]
attr_reader :body
# @return [URI, nil]
attr_reader :uri
def initialize(status, version, headers, body, uri = nil) # rubocop:disable ParameterLists
@version, @body, @uri = version, body, uri
@status = HTTP::Response::Status.new status
@headers = HTTP::Headers.coerce(headers || {})
end
# @!method reason
# @return (see HTTP::Response::Status#reason)
def_delegator :status, :reason
# @!method code
# @return (see HTTP::Response::Status#code)
def_delegator :status, :code
# @deprecated Will be removed in 1.0.0
alias_method :status_code, :code
# @!method to_s
# (see HTTP::Response::Body#to_s)
def_delegator :body, :to_s
alias_method :to_str, :to_s
# @!method readpartial
# (see HTTP::Response::Body#readpartial)
def_delegator :body, :readpartial
# Returns an Array ala Rack: `[status, headers, body]`
#
# @return [Array(Fixnum, Hash, String)]
def to_a
[status.to_i, headers.to_h, body.to_s]
end
# Flushes body and returns self-reference
#
# @return [Response]
def flush
body.to_s
self
end
# Parsed Content-Type header
#
# @return [HTTP::ContentType]
def content_type
@content_type ||= ContentType.parse headers['Content-Type']
end
# MIME type of response (if any)
#
# @return [String, nil]
def mime_type
@mime_type ||= content_type.mime_type
end
# Charset of response (if any)
#
# @return [String, nil]
def charset
@charset ||= content_type.charset
end
# Parse response body with corresponding MIME type adapter.
#
# @param [#to_s] as Parse as given MIME type
# instead of the one determined from headers
# @raise [Error] if adapter not found
# @return [Object]
def parse(as = nil)
MimeType[as || mime_type].decode to_s
end
# Inspect a response
def inspect
"#<#{self.class}/#{@version} #{code} #{reason} #{headers.inspect}>"
end
end
end
Remove superfluous space in HTTP::Response inspection
require 'forwardable'
require 'http/headers'
require 'http/content_type'
require 'http/mime_type'
require 'http/response/status'
module HTTP
class Response
extend Forwardable
include HTTP::Headers::Mixin
# @deprecated Will be removed in 1.0.0
# Use Status::REASONS
STATUS_CODES = Status::REASONS
# @deprecated Will be removed in 1.0.0
SYMBOL_TO_STATUS_CODE = Hash[STATUS_CODES.map { |k, v| [v.downcase.gsub(/\s|-/, '_').to_sym, k] }].freeze
# @return [Status]
attr_reader :status
# @return [Body]
attr_reader :body
# @return [URI, nil]
attr_reader :uri
def initialize(status, version, headers, body, uri = nil) # rubocop:disable ParameterLists
@version, @body, @uri = version, body, uri
@status = HTTP::Response::Status.new status
@headers = HTTP::Headers.coerce(headers || {})
end
# @!method reason
# @return (see HTTP::Response::Status#reason)
def_delegator :status, :reason
# @!method code
# @return (see HTTP::Response::Status#code)
def_delegator :status, :code
# @deprecated Will be removed in 1.0.0
alias_method :status_code, :code
# @!method to_s
# (see HTTP::Response::Body#to_s)
def_delegator :body, :to_s
alias_method :to_str, :to_s
# @!method readpartial
# (see HTTP::Response::Body#readpartial)
def_delegator :body, :readpartial
# Returns an Array ala Rack: `[status, headers, body]`
#
# @return [Array(Fixnum, Hash, String)]
def to_a
[status.to_i, headers.to_h, body.to_s]
end
# Flushes body and returns self-reference
#
# @return [Response]
def flush
body.to_s
self
end
# Parsed Content-Type header
#
# @return [HTTP::ContentType]
def content_type
@content_type ||= ContentType.parse headers['Content-Type']
end
# MIME type of response (if any)
#
# @return [String, nil]
def mime_type
@mime_type ||= content_type.mime_type
end
# Charset of response (if any)
#
# @return [String, nil]
def charset
@charset ||= content_type.charset
end
# Parse response body with corresponding MIME type adapter.
#
# @param [#to_s] as Parse as given MIME type
# instead of the one determined from headers
# @raise [Error] if adapter not found
# @return [Object]
def parse(as = nil)
MimeType[as || mime_type].decode to_s
end
# Inspect a response
def inspect
"#<#{self.class}/#{@version} #{code} #{reason} #{headers.inspect}>"
end
end
end
|
require_relative "boot"
# Pick the frameworks you want:
# require "active_record/railtie"
require "action_controller/railtie"
# require "action_mailer/railtie"
# require "action_view/railtie"
# require "sprockets/railtie"
# require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
require "plek"
require "gds_api/router"
module ContentStore
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.0
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
config.i18n.enforce_available_locales = true
config.i18n.available_locales = %i[
ar
az
be
bg
bn
cs
cy
da
de
dr
el
en
es
es-419
et
fa
fi
fr
gd
gu
he
hi
hr
hu
hy
id
is
it
ja
ka
kk
ko
lt
lv
ms
mt
nl
no
pa
pa-ur
pl
ps
pt
ro
ru
si
sk
sl
so
sq
sr
sv
sw
ta
th
tk
tr
uk
ur
uz
vi
zh
zh-hk
zh-tw
]
# Caching defaults
config.default_ttl = ENV.fetch("DEFAULT_TTL", 30.minutes).to_i.seconds
config.minimum_ttl = [config.default_ttl, 5.seconds].min
config.paths["log"] = ENV["LOG_PATH"] if ENV["LOG_PATH"]
config.register_router_retries = 3
def router_api
@router_api ||= GdsApi::Router.new(
Plek.current.find("router-api"),
bearer_token: ENV["ROUTER_API_BEARER_TOKEN"] || "example",
)
end
end
end
Add Punjabi-Pakistan locale
require_relative "boot"
# Pick the frameworks you want:
# require "active_record/railtie"
require "action_controller/railtie"
# require "action_mailer/railtie"
# require "action_view/railtie"
# require "sprockets/railtie"
# require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
require "plek"
require "gds_api/router"
module ContentStore
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 6.0
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
config.i18n.enforce_available_locales = true
config.i18n.available_locales = %i[
ar
az
be
bg
bn
cs
cy
da
de
dr
el
en
es
es-419
et
fa
fi
fr
gd
gu
he
hi
hr
hu
hy
id
is
it
ja
ka
kk
ko
lt
lv
ms
mt
nl
no
pa
pa-pk
pa-ur
pl
ps
pt
ro
ru
si
sk
sl
so
sq
sr
sv
sw
ta
th
tk
tr
uk
ur
uz
vi
zh
zh-hk
zh-tw
]
# Caching defaults
config.default_ttl = ENV.fetch("DEFAULT_TTL", 30.minutes).to_i.seconds
config.minimum_ttl = [config.default_ttl, 5.seconds].min
config.paths["log"] = ENV["LOG_PATH"] if ENV["LOG_PATH"]
config.register_router_retries = 3
def router_api
@router_api ||= GdsApi::Router.new(
Plek.current.find("router-api"),
bearer_token: ENV["ROUTER_API_BEARER_TOKEN"] || "example",
)
end
end
end
|
module Hutch
VERSION = '0.27.0'.freeze
end
Back to dev version
module Hutch
VERSION = '0.28.0.pre'.freeze
end
|
require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module RailsReactBoilerplate
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
end
end
Disable assets pipeline
require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module RailsReactBoilerplate
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
config.assets.enabled = false
config.generators do |generator|
generator.assets false
end
end
end
|
# frozen_string_literal: true
module Icinga2
# namespace for host handling
module Hosts
# add host
#
# @param [Hash] params
# @option params [String] name
# @option params [String] address
# @option params [String] address6
# @option params [String] display_name
# @option params [Bool] enable_notifications (false)
# @option params [Integer] max_check_attempts (3)
# @option params [Integer] check_interval (60)
# @option params [Integer] retry_interval (45)
# @option params [String] notes
# @option params [String] notes_url
# @option params [String] action_url
# @option params [String] check_command
# @option params [Integer] check_interval
# @option params [String] check_period
# @option params [Integer] check_timeout
# @option params [String] command_endpoint
# @option params [Bool] enable_active_checks
# @option params [Bool] enable_event_handler
# @option params [Bool] enable_flapping
# @option params [Bool] enable_passive_checks
# @option params [Bool] enable_perfdata
# @option params [String] event_command
# @option params [Integer] flapping_threshold
# @option params [Integer] flapping_threshold_high
# @option params [Integer] flapping_threshold_low
# @option params [String] icon_image
# @option params [String] icon_image_alt
# @option params [Integer] retry_interval
# @option params [Bool] volatile
# @option params [Hash] vars ({})
#
# @example
# param = {
# name: 'foo',
# address: 'foo.bar.com',
# display_name: 'test node',
# max_check_attempts: 5,
# notes: 'test node',
# vars: {
# description: 'host foo',
# os: 'Linux',
# partitions: {
# '/' => {
# crit: '95%',
# warn: '90%'
# }
# }
# }
# }
# add_host(param)
#
# @return [Hash]
#
def add_host( params )
raise ArgumentError.new(format('wrong type. \'params\' must be an Hash, given \'%s\'', params.class.to_s)) unless( params.is_a?(Hash) )
raise ArgumentError.new('missing params') if( params.size.zero? )
name = validate( params, required: true, var: 'name', type: String )
action_url = validate( params, required: false, var: 'action_url', type: String )
address = validate( params, required: false, var: 'address', type: String )
address6 = validate( params, required: false, var: 'address6', type: String )
check_command = validate( params, required: false, var: 'check_command', type: String )
check_interval = validate( params, required: false, var: 'check_interval', type: Integer ) || 60
check_period = validate( params, required: false, var: 'check_period', type: Integer )
check_timeout = validate( params, required: false, var: 'check_timeout', type: Integer )
command_endpoint = validate( params, required: false, var: 'command_endpoint', type: String )
display_name = validate( params, required: false, var: 'display_name', type: String )
enable_active_checks = validate( params, required: false, var: 'enable_active_checks', type: Boolean )
enable_event_handler = validate( params, required: false, var: 'enable_event_handler', type: Boolean )
enable_flapping = validate( params, required: false, var: 'enable_flapping', type: Boolean )
enable_notifications = validate( params, required: false, var: 'enable_notifications', type: Boolean ) || false
enable_passive_checks = validate( params, required: false, var: 'enable_passive_checks', type: Boolean )
volatile = validate( params, required: false, var: 'volatile', type: Boolean )
enable_perfdata = validate( params, required: false, var: 'enable_perfdata', type: Boolean )
event_command = validate( params, required: false, var: 'event_command', type: String )
flapping_threshold = validate( params, required: false, var: 'flapping_threshold', type: Integer )
groups = validate( params, required: false, var: 'groups', type: Array )
icon_image = validate( params, required: false, var: 'icon_image', type: String )
icon_image_alt = validate( params, required: false, var: 'icon_image_alt', type: String )
notes = validate( params, required: false, var: 'notes', type: String )
notes_url = validate( params, required: false, var: 'notes_url', type: String )
max_check_attempts = validate( params, required: false, var: 'max_check_attempts', type: Integer ) || 3
retry_interval = validate( params, required: false, var: 'retry_interval', type: Integer ) || 45
templates = validate( params, required: false, var: 'templates', type: Array ) || [ 'generic-host' ]
vars = validate( params, required: false, var: 'vars', type: Hash ) || {}
zone = validate( params, required: false, var: 'zone', type: String )
address = Socket.gethostbyname( name ).first if( address.nil? )
payload = {
templates: templates,
attrs: {
action_url: action_url,
address: address,
address6: address6,
check_period: check_period,
check_command: check_command,
check_interval: check_interval,
check_timeout: check_timeout,
command_endpoint: command_endpoint,
display_name: display_name,
enable_active_checks: enable_active_checks,
enable_event_handler: enable_event_handler,
enable_flapping: enable_flapping,
enable_notifications: enable_notifications,
enable_passive_checks: enable_passive_checks,
enable_perfdata: enable_perfdata,
event_command: event_command,
flapping_threshold: flapping_threshold,
groups: groups,
icon_image: icon_image,
icon_image_alt: icon_image_alt,
max_check_attempts: max_check_attempts,
notes: notes,
notes_url: notes_url,
retry_interval: retry_interval,
volatile: volatile,
zone: zone,
vars: vars
}
}
# remove all empty attrs
payload.reject!{ |_k, v| v.nil? }
payload[:attrs].reject!{ |_k, v| v.nil? }
# puts JSON.pretty_generate payload
put(
url: format( '%s/objects/hosts/%s', @icinga_api_url_base, name ),
headers: @headers,
options: @options,
payload: payload
)
end
# delete a host
#
# @param [Hash] params
# @option params [String] name host to delete
# @option params [Bool] cascade (false) delete host also when other objects depend on it
#
# @example
# delete_host(name: 'foo')
# delete_host(name: 'foo', cascade: true)
#
#
# @return [Hash] result
#
def delete_host( params )
raise ArgumentError.new('only Hash are allowed') unless( params.is_a?(Hash) )
raise ArgumentError.new('missing params') if( params.size.zero? )
name = validate( params, required: true, var: 'name', type: String )
cascade = validate( params, required: false, var: 'cascade', type: Boolean ) || false
url = format( '%s/objects/hosts/%s%s', @icinga_api_url_base, name, cascade.is_a?(TrueClass) ? '?cascade=1' : nil )
delete(
url: url,
headers: @headers,
options: @options
)
end
# modify a host
#
# @param [Hash] params
# @option params [String] name
# @option params [String] name
# @option params [String] address
# @option params [String] address6
# @option params [String] display_name
# @option params [Bool] enable_notifications
# @option params [Integer] max_check_attempts
# @option params [Integer] check_interval
# @option params [Integer] retry_interval
# @option params [String] notes
# @option params [String] notes_url
# @option params [String] action_url
# @option params [String] check_command
# @option params [Integer] check_interval
# @option params [String] check_period
# @option params [Integer] check_timeout
# @option params [String] command_endpoint
# @option params [Bool] enable_active_checks
# @option params [Bool] enable_event_handler
# @option params [Bool] enable_flapping
# @option params [Bool] enable_passive_checks
# @option params [Bool] enable_perfdata
# @option params [String] event_command
# @option params [Integer] flapping_threshold
# @option params [Integer] flapping_threshold_high
# @option params [Integer] flapping_threshold_low
# @option params [String] icon_image
# @option params [String] icon_image_alt
# @option params [Integer] retry_interval
# @option params [Bool] volatile
# @option params [Hash] vars ({})
# @option params [Bool] merge_vars (false)
#
# @example
# param = {
# name: 'foo',
# address: 'foo.bar.com',
# display_name: 'Host for an example Problem',
# max_check_attempts: 10,
# }
#
# param = {
# name: 'foo',
# address: 'foo.bar.com',
# notes: 'an demonstration object',
# vars: {
# description: 'schould be delete ASAP',
# os: 'Linux',
# partitions: {
# '/' => {
# crit: '98%',
# warn: '95%'
# }
# }
# },
# merge_vars: true
# }
#
# param = {
# name: 'foo',
# address: 'foo.bar.com',
# vars: {
# description: 'removed all other custom vars',
# }
# }
#
# add_host(param)
#
# @return [Hash]
#
def modify_host( params )
raise ArgumentError.new(format('wrong type. \'params\' must be an Hash, given \'%s\'', params.class.to_s)) unless( params.is_a?(Hash) )
raise ArgumentError.new('missing params') if( params.size.zero? )
name = validate( params, required: true, var: 'name', type: String )
action_url = validate( params, required: false, var: 'action_url', type: String )
address = validate( params, required: false, var: 'address', type: String )
address6 = validate( params, required: false, var: 'address6', type: String )
check_command = validate( params, required: false, var: 'check_command', type: String )
check_interval = validate( params, required: false, var: 'check_interval', type: Integer )
check_period = validate( params, required: false, var: 'check_period', type: Integer )
check_timeout = validate( params, required: false, var: 'check_timeout', type: Integer )
command_endpoint = validate( params, required: false, var: 'command_endpoint', type: String )
display_name = validate( params, required: false, var: 'display_name', type: String )
enable_active_checks = validate( params, required: false, var: 'enable_active_checks', type: Boolean )
enable_event_handler = validate( params, required: false, var: 'enable_event_handler', type: Boolean )
enable_flapping = validate( params, required: false, var: 'enable_flapping', type: Boolean )
enable_notifications = validate( params, required: false, var: 'enable_notifications', type: Boolean )
enable_passive_checks = validate( params, required: false, var: 'enable_passive_checks', type: Boolean )
volatile = validate( params, required: false, var: 'volatile', type: Boolean )
enable_perfdata = validate( params, required: false, var: 'enable_perfdata', type: Boolean )
event_command = validate( params, required: false, var: 'event_command', type: String )
flapping_threshold = validate( params, required: false, var: 'flapping_threshold', type: Integer )
groups = validate( params, required: false, var: 'groups', type: Array )
icon_image = validate( params, required: false, var: 'icon_image', type: String )
icon_image_alt = validate( params, required: false, var: 'icon_image_alt', type: String )
notes = validate( params, required: false, var: 'notes', type: String )
notes_url = validate( params, required: false, var: 'notes_url', type: String )
max_check_attempts = validate( params, required: false, var: 'max_check_attempts', type: Integer )
retry_interval = validate( params, required: false, var: 'retry_interval', type: Integer )
templates = validate( params, required: false, var: 'templates', type: Array ) || [ 'generic-host' ]
vars = validate( params, required: false, var: 'vars', type: Hash ) || {}
zone = validate( params, required: false, var: 'zone', type: String )
merge_vars = validate( params, required: false, var: 'merge_vars', type: Boolean ) || false
# check if host exists
return { 'code' => 404, 'name' => name, 'status' => 'Object not Found' } unless( exists_host?( name ) )
# merge the new with the old vars
if( merge_vars == true )
current_host = hosts( name: name )
current_host_vars = current_host.first
current_host_vars = current_host_vars.dig('attrs','vars')
current_host_vars = current_host_vars.deep_string_keys
vars = vars.deep_string_keys unless( vars.empty? )
vars = current_host_vars.merge( vars )
end
payload = {
templates: templates,
attrs: {
action_url: action_url,
address: address,
address6: address6,
check_period: check_period,
check_command: check_command,
check_interval: check_interval,
check_timeout: check_timeout,
command_endpoint: command_endpoint,
display_name: display_name,
enable_active_checks: enable_active_checks,
enable_event_handler: enable_event_handler,
enable_flapping: enable_flapping,
enable_notifications: enable_notifications,
enable_passive_checks: enable_passive_checks,
enable_perfdata: enable_perfdata,
event_command: event_command,
flapping_threshold: flapping_threshold,
groups: groups,
icon_image: icon_image,
icon_image_alt: icon_image_alt,
max_check_attempts: max_check_attempts,
notes: notes,
notes_url: notes_url,
retry_interval: retry_interval,
volatile: volatile,
zone: zone,
vars: vars
}
}
# remove all empty attrs
payload.reject!{ |_k, v| v.nil? }
payload[:attrs].reject!{ |_k, v| v.nil? }
post(
url: format( '%s/objects/hosts/%s', @icinga_api_url_base, name ),
headers: @headers,
options: @options,
payload: payload
)
end
# return hosts
#
# @param [Hash] params
# @option params [String] name
# @option params [Array] attrs
# @option params [String] filter
# @option params [Array] joins
#
# @example to get all hosts
# hosts
#
# @example to get one host
# hosts( name: 'icinga2')
#
# @return [Array]
#
def hosts( params = {} )
raise ArgumentError.new(format('wrong type. \'params\' must be an Hash, given \'%s\'', params.class.to_s)) unless( params.is_a?(Hash) )
name = validate( params, required: false, var: 'name', type: String )
attrs = validate( params, required: false, var: 'attrs', type: Array )
filter = validate( params, required: false, var: 'filter', type: String )
joins = validate( params, required: false, var: 'joins', type: Array )
payload = {
attrs: attrs,
filter: filter,
joins: joins
}
payload.reject!{ |_k, v| v.nil? }
api_data(
url: format( '%s/objects/hosts/%s', @icinga_api_url_base, name ),
headers: @headers,
options: @options,
payload: payload
)
end
# returns true if the host exists
#
# @param [String] host_name
#
# @example
# exists_host?('icinga2')
#
# @return [Bool]
#
def exists_host?( host_name )
raise ArgumentError.new(format('wrong type. \'host_name\' must be an String, given \'%s\'',host_name.class.to_s)) unless( host_name.is_a?(String) )
raise ArgumentError.new('Missing host_name') if( host_name.size.zero? )
result = hosts( name: host_name )
result = JSON.parse( result ) if( result.is_a?(String) )
result = result.first if( result.is_a?(Array) )
return false if( result.is_a?(Hash) && result.dig('code') == 404 )
true
end
# returns host objects
#
# @param [Hash] params
# @option params [Array] attrs (['name', 'state', 'acknowledgement', 'downtime_depth', 'last_check'])
# @option params [String] filter ([])
# @option params [Array] joins ([])
#
# @example with default attrs and joins
# host_objects
#
# @example
# host_objects(attrs: ['name', 'state'])
#
# @return [Hash]
#
def host_objects( params = {} )
attrs = validate( params, required: false, var: 'attrs', type: Array ) || %w[name state acknowledgement downtime_depth last_check]
filter = validate( params, required: false, var: 'filter', type: String )
joins = validate( params, required: false, var: 'joins', type: Array )
payload = {
attrs: attrs,
filter: filter,
joins: joins
}
payload.reject!{ |_k, v| v.nil? }
data = api_data(
url: format( '%s/objects/hosts', @icinga_api_url_base ),
headers: @headers,
options: @options,
payload: payload
)
@last_host_objects_called = Time.now.to_i
if( !data.nil? && data.is_a?(Array) )
all_hosts = data.clone
unless( all_hosts.nil? )
# global var for count of all hosts
@hosts_all = all_hosts.size
# global var for count of all host with a problem
@hosts_problems = count_problems(all_hosts)
# global var for count of all gost with state HOSTS_DOWN
@hosts_problems_down = count_problems(all_hosts, Icinga2::HOSTS_DOWN)
@hosts_problems_critical = count_problems(all_hosts, Icinga2::HOSTS_CRITICAL)
@hosts_problems_unknown = count_problems(all_hosts, Icinga2::HOSTS_UNKNOWN)
end
end
data
end
# returns adjusted hosts state
# OBSOLETE
#
# @example
# handled, down = hosts_adjusted.values
#
# h = hosts_adjusted
# down = h.dig(:down_adjusted)
#
# @return [Hash]
# * handled_problems
# * down_adjusted
#
def hosts_adjusted
puts 'function hosts_adjusted() is obsolete'
puts 'Please use host_problems()'
cib_data if((Time.now.to_i - @last_cib_data_called).to_i > @last_call_timeout)
host_objects if((Time.now.to_i - @last_host_objects_called).to_i > @last_call_timeout)
raise ArgumentError.new('Integer for @hosts_problems_down needed') unless( @hosts_problems_down.is_a?(Integer) )
raise ArgumentError.new('Integer for @hosts_problems_critical needed') unless( @hosts_problems_critical.is_a?(Integer) )
raise ArgumentError.new('Integer for @hosts_problems_unknown needed') unless( @hosts_problems_unknown.is_a?(Integer) )
raise ArgumentError.new('Integer for @hosts_down needed') unless( @hosts_down.is_a?(Integer) )
# calculate host problems adjusted by handled problems
# count togther handled host problems
handled_problems = @hosts_problems_down + @hosts_problems_critical + @hosts_problems_unknown
down_adjusted = @hosts_down - handled_problems
{
handled_problems: handled_problems.to_i,
down_adjusted: down_adjusted.to_i
}
end
# return count of hosts with problems
#
# @example
# count_hosts_with_problems
#
# @return [Integer]
#
def count_hosts_with_problems
host_data = host_objects
host_data = JSON.parse(host_data) if host_data.is_a?(String)
f = host_data.select { |t| t.dig('attrs','state') != 0 && t.dig('attrs','downtime_depth').zero? && t.dig('attrs','acknowledgement').zero? }
f.size
end
# return a list of hosts with problems
#
# @param [Integer] max_items numbers of list entries
#
# @example
# list_hosts_with_problems
#
# @return [Hash]
#
def list_hosts_with_problems( max_items = 5 )
raise ArgumentError.new(format('wrong type. \'max_items\' must be an Integer, given \'%s\'', max_items.class.to_s)) unless( max_items.is_a?(Integer) )
host_problems = {}
host_problems_severity = {}
host_data = host_objects
host_data = JSON.parse( host_data ) if host_data.is_a?(String)
unless( host_data.nil? )
host_data.each do |h,_v|
name = h.dig('name')
state = h.dig('attrs','state')
next if state.to_i.zero?
host_problems[name] = host_severity(h)
end
end
# get the count of problems
#
host_problems.keys[1..max_items].each { |k| host_problems_severity[k] = host_problems[k] } if( host_problems.count != 0 )
host_problems_severity
end
# returns a counter of all hosts
#
# @example
# hosts_all
#
# @return [Integer]
#
def hosts_all
host_objects if( @hosts_all.nil? || @hosts_all.zero? )
@hosts_all
end
# returns data with host problems
#
# @example
# host_objects
# all, down, critical, unknown, handled, adjusted = host_problems.values
#
# p = host_problems
# down = h.dig(:down)
#
# @return [Hash]
# * all
# * down
# * critical
# * unknown
#
def host_problems
cib_data if((Time.now.to_i - @last_cib_data_called).to_i > @last_call_timeout)
host_objects if((Time.now.to_i - @last_host_objects_called).to_i > @last_call_timeout)
raise ArgumentError.new(format('wrong type. \'@hosts_problems_down\' must be an Integer, given \'%s\'', @hosts_problems_down.class.to_s)) unless( @hosts_problems_down.is_a?(Integer) )
raise ArgumentError.new(format('wrong type. \'@hosts_problems_critical\' must be an Integer, given \'%s\'', @hosts_problems_critical.class.to_s)) unless( @hosts_problems_critical.is_a?(Integer) )
raise ArgumentError.new(format('wrong type. \'@hosts_problems_critical\' must be an Integer, given \'%s\'', @hosts_problems_critical.class.to_s)) unless( @hosts_problems_critical.is_a?(Integer) )
raise ArgumentError.new(format('wrong type. \'@hosts_down\' must be an Integer, given \'%s\'', @hosts_down.class.to_s)) unless( @hosts_down.is_a?(Integer) )
problems_all = @hosts_problems.nil? ? 0 : @hosts_problems
problems_down = @hosts_problems_down.nil? ? 0 : @hosts_problems_down
problems_critical = @hosts_problems_critical.nil? ? 0 : @hosts_problems_critical
problems_unknown = @hosts_problems_unknown.nil? ? 0 : @hosts_problems_unknown
# calculate host problems adjusted by handled problems
# count togther handled host problems
problems_handled = @hosts_problems_down + @hosts_problems_critical + @hosts_problems_unknown
problems_adjusted = @hosts_down - problems_handled
{
all: problems_all.to_i,
down: problems_down.to_i,
critical: problems_critical.to_i,
unknown: problems_unknown.to_i,
handled: problems_handled.to_i,
adjusted: problems_adjusted.to_i
}
end
protected
# calculate a host severity
#
# stolen from Icinga Web 2
# ./modules/monitoring/library/Monitoring/Backend/Ido/Query/ServicestatusQuery.php
#
# @param [Hash] params
# @option params [hash] attrs ()
# * state [Float]
# * acknowledgement [Float] (default: 0)
# * downtime_depth [Float] (default: 0)
#
# @api protected
#
# @example
# host_severity( {'attrs' => { 'state' => 0.0, 'acknowledgement' => 0.0, 'downtime_depth' => 0.0 } } )
#
# @return [Integer]
#
def host_severity( params )
raise ArgumentError.new(format('wrong type. \'params\' must be an Hash, given \'%s\'', params.class.to_s)) unless( params.is_a?(Hash) )
raise ArgumentError.new('missing params') if( params.size.zero? )
attrs = params.dig('attrs')
state = validate( attrs, required: true, var: 'state', type: Float )
acknowledgement = validate( attrs, required: false, var: 'acknowledgement', type: Float ) || 0
downtime_depth = validate( attrs, required: false, var: 'downtime_depth', type: Float ) || 0
severity = 0
severity +=
if acknowledgement != 0
2
elsif downtime_depth > 0
1
else
4
end
severity += 16 if object_has_been_checked?(params)
unless state.zero?
severity +=
if state == 1
32
elsif state == 2
64
else
256
end
end
severity
end
end
end
code style
# frozen_string_literal: true
module Icinga2
# namespace for host handling
module Hosts
# add host
#
# @param [Hash] params
# @option params [String] name
# @option params [String] address
# @option params [String] address6
# @option params [String] display_name
# @option params [Bool] enable_notifications (false)
# @option params [Integer] max_check_attempts (3)
# @option params [Integer] check_interval (60)
# @option params [Integer] retry_interval (45)
# @option params [String] notes
# @option params [String] notes_url
# @option params [String] action_url
# @option params [String] check_command
# @option params [Integer] check_interval
# @option params [String] check_period
# @option params [Integer] check_timeout
# @option params [String] command_endpoint
# @option params [Bool] enable_active_checks
# @option params [Bool] enable_event_handler
# @option params [Bool] enable_flapping
# @option params [Bool] enable_passive_checks
# @option params [Bool] enable_perfdata
# @option params [String] event_command
# @option params [Integer] flapping_threshold
# @option params [Integer] flapping_threshold_high
# @option params [Integer] flapping_threshold_low
# @option params [String] icon_image
# @option params [String] icon_image_alt
# @option params [Integer] retry_interval
# @option params [Bool] volatile
# @option params [Hash] vars ({})
#
# @example
# param = {
# name: 'foo',
# address: 'foo.bar.com',
# display_name: 'test node',
# max_check_attempts: 5,
# notes: 'test node',
# vars: {
# description: 'host foo',
# os: 'Linux',
# partitions: {
# '/' => {
# crit: '95%',
# warn: '90%'
# }
# }
# }
# }
# add_host(param)
#
# @return [Hash]
#
def add_host( params )
raise ArgumentError.new(format('wrong type. \'params\' must be an Hash, given \'%s\'', params.class.to_s)) unless( params.is_a?(Hash) )
raise ArgumentError.new('missing params') if( params.size.zero? )
name = validate( params, required: true, var: 'name', type: String )
action_url = validate( params, required: false, var: 'action_url', type: String )
address = validate( params, required: false, var: 'address', type: String )
address6 = validate( params, required: false, var: 'address6', type: String )
check_command = validate( params, required: false, var: 'check_command', type: String )
check_interval = validate( params, required: false, var: 'check_interval', type: Integer ) || 60
check_period = validate( params, required: false, var: 'check_period', type: Integer )
check_timeout = validate( params, required: false, var: 'check_timeout', type: Integer )
command_endpoint = validate( params, required: false, var: 'command_endpoint', type: String )
display_name = validate( params, required: false, var: 'display_name', type: String )
enable_active_checks = validate( params, required: false, var: 'enable_active_checks', type: Boolean )
enable_event_handler = validate( params, required: false, var: 'enable_event_handler', type: Boolean )
enable_flapping = validate( params, required: false, var: 'enable_flapping', type: Boolean )
enable_notifications = validate( params, required: false, var: 'enable_notifications', type: Boolean ) || false
enable_passive_checks = validate( params, required: false, var: 'enable_passive_checks', type: Boolean )
volatile = validate( params, required: false, var: 'volatile', type: Boolean )
enable_perfdata = validate( params, required: false, var: 'enable_perfdata', type: Boolean )
event_command = validate( params, required: false, var: 'event_command', type: String )
flapping_threshold = validate( params, required: false, var: 'flapping_threshold', type: Integer )
groups = validate( params, required: false, var: 'groups', type: Array )
icon_image = validate( params, required: false, var: 'icon_image', type: String )
icon_image_alt = validate( params, required: false, var: 'icon_image_alt', type: String )
notes = validate( params, required: false, var: 'notes', type: String )
notes_url = validate( params, required: false, var: 'notes_url', type: String )
max_check_attempts = validate( params, required: false, var: 'max_check_attempts', type: Integer ) || 3
retry_interval = validate( params, required: false, var: 'retry_interval', type: Integer ) || 45
templates = validate( params, required: false, var: 'templates', type: Array ) || [ 'generic-host' ]
vars = validate( params, required: false, var: 'vars', type: Hash ) || {}
zone = validate( params, required: false, var: 'zone', type: String )
address = Socket.gethostbyname( name ).first if( address.nil? )
payload = {
templates: templates,
attrs: {
action_url: action_url,
address: address,
address6: address6,
check_period: check_period,
check_command: check_command,
check_interval: check_interval,
check_timeout: check_timeout,
command_endpoint: command_endpoint,
display_name: display_name,
enable_active_checks: enable_active_checks,
enable_event_handler: enable_event_handler,
enable_flapping: enable_flapping,
enable_notifications: enable_notifications,
enable_passive_checks: enable_passive_checks,
enable_perfdata: enable_perfdata,
event_command: event_command,
flapping_threshold: flapping_threshold,
groups: groups,
icon_image: icon_image,
icon_image_alt: icon_image_alt,
max_check_attempts: max_check_attempts,
notes: notes,
notes_url: notes_url,
retry_interval: retry_interval,
volatile: volatile,
zone: zone,
vars: vars
}
}
# remove all empty attrs
payload.reject!{ |_k, v| v.nil? }
payload[:attrs].reject!{ |_k, v| v.nil? }
# puts JSON.pretty_generate payload
put(
url: format( '%s/objects/hosts/%s', @icinga_api_url_base, name ),
headers: @headers,
options: @options,
payload: payload
)
end
# delete a host
#
# @param [Hash] params
# @option params [String] name host to delete
# @option params [Bool] cascade (false) delete host also when other objects depend on it
#
# @example
# delete_host(name: 'foo')
# delete_host(name: 'foo', cascade: true)
#
#
# @return [Hash] result
#
def delete_host( params )
raise ArgumentError.new('only Hash are allowed') unless( params.is_a?(Hash) )
raise ArgumentError.new('missing params') if( params.size.zero? )
name = validate( params, required: true, var: 'name', type: String )
cascade = validate( params, required: false, var: 'cascade', type: Boolean ) || false
url = format( '%s/objects/hosts/%s%s', @icinga_api_url_base, name, cascade.is_a?(TrueClass) ? '?cascade=1' : nil )
delete(
url: url,
headers: @headers,
options: @options
)
end
# modify a host
#
# @param [Hash] params
# @option params [String] name
# @option params [String] name
# @option params [String] address
# @option params [String] address6
# @option params [String] display_name
# @option params [Bool] enable_notifications
# @option params [Integer] max_check_attempts
# @option params [Integer] check_interval
# @option params [Integer] retry_interval
# @option params [String] notes
# @option params [String] notes_url
# @option params [String] action_url
# @option params [String] check_command
# @option params [Integer] check_interval
# @option params [String] check_period
# @option params [Integer] check_timeout
# @option params [String] command_endpoint
# @option params [Bool] enable_active_checks
# @option params [Bool] enable_event_handler
# @option params [Bool] enable_flapping
# @option params [Bool] enable_passive_checks
# @option params [Bool] enable_perfdata
# @option params [String] event_command
# @option params [Integer] flapping_threshold
# @option params [Integer] flapping_threshold_high
# @option params [Integer] flapping_threshold_low
# @option params [String] icon_image
# @option params [String] icon_image_alt
# @option params [Integer] retry_interval
# @option params [Bool] volatile
# @option params [Hash] vars ({})
# @option params [Bool] merge_vars (false)
#
# @example
# param = {
# name: 'foo',
# address: 'foo.bar.com',
# display_name: 'Host for an example Problem',
# max_check_attempts: 10,
# }
#
# param = {
# name: 'foo',
# address: 'foo.bar.com',
# notes: 'an demonstration object',
# vars: {
# description: 'schould be delete ASAP',
# os: 'Linux',
# partitions: {
# '/' => {
# crit: '98%',
# warn: '95%'
# }
# }
# },
# merge_vars: true
# }
#
# param = {
# name: 'foo',
# address: 'foo.bar.com',
# vars: {
# description: 'removed all other custom vars',
# }
# }
#
# add_host(param)
#
# @return [Hash]
#
def modify_host( params )
raise ArgumentError.new(format('wrong type. \'params\' must be an Hash, given \'%s\'', params.class.to_s)) unless( params.is_a?(Hash) )
raise ArgumentError.new('missing params') if( params.size.zero? )
name = validate( params, required: true, var: 'name', type: String )
action_url = validate( params, required: false, var: 'action_url', type: String )
address = validate( params, required: false, var: 'address', type: String )
address6 = validate( params, required: false, var: 'address6', type: String )
check_command = validate( params, required: false, var: 'check_command', type: String )
check_interval = validate( params, required: false, var: 'check_interval', type: Integer )
check_period = validate( params, required: false, var: 'check_period', type: Integer )
check_timeout = validate( params, required: false, var: 'check_timeout', type: Integer )
command_endpoint = validate( params, required: false, var: 'command_endpoint', type: String )
display_name = validate( params, required: false, var: 'display_name', type: String )
enable_active_checks = validate( params, required: false, var: 'enable_active_checks', type: Boolean )
enable_event_handler = validate( params, required: false, var: 'enable_event_handler', type: Boolean )
enable_flapping = validate( params, required: false, var: 'enable_flapping', type: Boolean )
enable_notifications = validate( params, required: false, var: 'enable_notifications', type: Boolean )
enable_passive_checks = validate( params, required: false, var: 'enable_passive_checks', type: Boolean )
volatile = validate( params, required: false, var: 'volatile', type: Boolean )
enable_perfdata = validate( params, required: false, var: 'enable_perfdata', type: Boolean )
event_command = validate( params, required: false, var: 'event_command', type: String )
flapping_threshold = validate( params, required: false, var: 'flapping_threshold', type: Integer )
groups = validate( params, required: false, var: 'groups', type: Array )
icon_image = validate( params, required: false, var: 'icon_image', type: String )
icon_image_alt = validate( params, required: false, var: 'icon_image_alt', type: String )
notes = validate( params, required: false, var: 'notes', type: String )
notes_url = validate( params, required: false, var: 'notes_url', type: String )
max_check_attempts = validate( params, required: false, var: 'max_check_attempts', type: Integer )
retry_interval = validate( params, required: false, var: 'retry_interval', type: Integer )
templates = validate( params, required: false, var: 'templates', type: Array ) || [ 'generic-host' ]
vars = validate( params, required: false, var: 'vars', type: Hash ) || {}
zone = validate( params, required: false, var: 'zone', type: String )
merge_vars = validate( params, required: false, var: 'merge_vars', type: Boolean ) || false
# check if host exists
return { 'code' => 404, 'name' => name, 'status' => 'Object not Found' } unless( exists_host?( name ) )
# merge the new with the old vars
if( merge_vars == true )
current_host = hosts( name: name )
current_host_vars = current_host.first
current_host_vars = current_host_vars.dig('attrs','vars')
current_host_vars = current_host_vars.deep_string_keys
vars = vars.deep_string_keys unless( vars.empty? )
vars = current_host_vars.merge( vars )
end
payload = {
templates: templates,
attrs: {
action_url: action_url,
address: address,
address6: address6,
check_period: check_period,
check_command: check_command,
check_interval: check_interval,
check_timeout: check_timeout,
command_endpoint: command_endpoint,
display_name: display_name,
enable_active_checks: enable_active_checks,
enable_event_handler: enable_event_handler,
enable_flapping: enable_flapping,
enable_notifications: enable_notifications,
enable_passive_checks: enable_passive_checks,
enable_perfdata: enable_perfdata,
event_command: event_command,
flapping_threshold: flapping_threshold,
groups: groups,
icon_image: icon_image,
icon_image_alt: icon_image_alt,
max_check_attempts: max_check_attempts,
notes: notes,
notes_url: notes_url,
retry_interval: retry_interval,
volatile: volatile,
zone: zone,
vars: vars
}
}
# remove all empty attrs
payload.reject!{ |_k, v| v.nil? }
payload[:attrs].reject!{ |_k, v| v.nil? }
post(
url: format( '%s/objects/hosts/%s', @icinga_api_url_base, name ),
headers: @headers,
options: @options,
payload: payload
)
end
# return hosts
#
# @param [Hash] params
# @option params [String] name
# @option params [Array] attrs
# @option params [String] filter
# @option params [Array] joins
#
# @example to get all hosts
# hosts
#
# @example to get one host
# hosts( name: 'icinga2')
#
# @return [Array]
#
def hosts( params = {} )
raise ArgumentError.new(format('wrong type. \'params\' must be an Hash, given \'%s\'', params.class.to_s)) unless( params.is_a?(Hash) )
name = validate( params, required: false, var: 'name', type: String )
attrs = validate( params, required: false, var: 'attrs', type: Array )
filter = validate( params, required: false, var: 'filter', type: String )
joins = validate( params, required: false, var: 'joins', type: Array )
payload = {
attrs: attrs,
filter: filter,
joins: joins
}
payload.reject!{ |_k, v| v.nil? }
api_data(
url: format( '%s/objects/hosts/%s', @icinga_api_url_base, name ),
headers: @headers,
options: @options,
payload: payload
)
end
# returns true if the host exists
#
# @param [String] host_name
#
# @example
# exists_host?('icinga2')
#
# @return [Bool]
#
def exists_host?( host_name )
raise ArgumentError.new(format('wrong type. \'host_name\' must be an String, given \'%s\'',host_name.class.to_s)) unless( host_name.is_a?(String) )
raise ArgumentError.new('Missing host_name') if( host_name.size.zero? )
result = hosts( name: host_name )
result = JSON.parse( result ) if( result.is_a?(String) )
result = result.first if( result.is_a?(Array) )
return false if( result.is_a?(Hash) && result.dig('code') == 404 )
true
end
# returns host objects
#
# @param [Hash] params
# @option params [Array] attrs (['name', 'state', 'acknowledgement', 'downtime_depth', 'last_check'])
# @option params [String] filter ([])
# @option params [Array] joins ([])
#
# @example with default attrs and joins
# host_objects
#
# @example
# host_objects(attrs: ['name', 'state'])
#
# @return [Hash]
#
def host_objects( params = {} )
attrs = validate( params, required: false, var: 'attrs', type: Array ) || %w[name state acknowledgement downtime_depth last_check]
filter = validate( params, required: false, var: 'filter', type: String )
joins = validate( params, required: false, var: 'joins', type: Array )
payload = {
attrs: attrs,
filter: filter,
joins: joins
}
payload.reject!{ |_k, v| v.nil? }
data = api_data(
url: format( '%s/objects/hosts', @icinga_api_url_base ),
headers: @headers,
options: @options,
payload: payload
)
@last_host_objects_called = Time.now.to_i
if( !data.nil? && data.is_a?(Array) )
all_hosts = data.clone
unless( all_hosts.nil? )
# global var for count of all hosts
@hosts_all = all_hosts.size
# global var for count of all host with a problem
@hosts_problems = count_problems(all_hosts)
# global var for count of all gost with state HOSTS_DOWN
@hosts_problems_down = count_problems(all_hosts, Icinga2::HOSTS_DOWN)
@hosts_problems_critical = count_problems(all_hosts, Icinga2::HOSTS_CRITICAL)
@hosts_problems_unknown = count_problems(all_hosts, Icinga2::HOSTS_UNKNOWN)
end
end
data
end
# returns adjusted hosts state
# OBSOLETE
#
# @example
# handled, down = hosts_adjusted.values
#
# h = hosts_adjusted
# down = h.dig(:down_adjusted)
#
# @return [Hash]
# * handled_problems
# * down_adjusted
#
def hosts_adjusted
puts 'function hosts_adjusted() is obsolete'
puts 'Please use host_problems()'
cib_data if((Time.now.to_i - @last_cib_data_called).to_i > @last_call_timeout)
host_objects if((Time.now.to_i - @last_host_objects_called).to_i > @last_call_timeout)
raise ArgumentError.new('Integer for @hosts_problems_down needed') unless( @hosts_problems_down.is_a?(Integer) )
raise ArgumentError.new('Integer for @hosts_problems_critical needed') unless( @hosts_problems_critical.is_a?(Integer) )
raise ArgumentError.new('Integer for @hosts_problems_unknown needed') unless( @hosts_problems_unknown.is_a?(Integer) )
raise ArgumentError.new('Integer for @hosts_down needed') unless( @hosts_down.is_a?(Integer) )
# calculate host problems adjusted by handled problems
# count togther handled host problems
handled_problems = @hosts_problems_down + @hosts_problems_critical + @hosts_problems_unknown
down_adjusted = @hosts_down - handled_problems
{
handled_problems: handled_problems.to_i,
down_adjusted: down_adjusted.to_i
}
end
# return count of hosts with problems
#
# @example
# count_hosts_with_problems
#
# @return [Integer]
#
def count_hosts_with_problems
host_data = host_objects
host_data = JSON.parse(host_data) if host_data.is_a?(String)
f = host_data.select { |t| t.dig('attrs','state') != 0 && t.dig('attrs','downtime_depth').zero? && t.dig('attrs','acknowledgement').zero? }
f.size
end
# return a list of hosts with problems
#
# @param [Integer] max_items numbers of list entries
#
# @example
# list_hosts_with_problems
#
# @return [Hash]
#
def list_hosts_with_problems( max_items = 5 )
raise ArgumentError.new(format('wrong type. \'max_items\' must be an Integer, given \'%s\'', max_items.class.to_s)) unless( max_items.is_a?(Integer) )
host_problems = {}
host_problems_severity = {}
host_data = host_objects
host_data = JSON.parse( host_data ) if host_data.is_a?(String)
unless( host_data.nil? )
host_data.each do |h,_v|
name = h.dig('name')
state = h.dig('attrs','state')
next if state.to_i.zero?
host_problems[name] = host_severity(h)
end
end
# get the count of problems
#
host_problems.keys[1..max_items].each { |k| host_problems_severity[k] = host_problems[k] } if( host_problems.count != 0 )
host_problems_severity
end
# returns a counter of all hosts
#
# @example
# hosts_all
#
# @return [Integer]
#
def hosts_all
host_objects if( @hosts_all.nil? || @hosts_all.zero? )
@hosts_all
end
# returns data with host problems
#
# @example
# host_objects
# all, down, critical, unknown, handled, adjusted = host_problems.values
#
# p = host_problems
# down = h.dig(:down)
#
# @return [Hash]
# * all
# * down
# * critical
# * unknown
#
def host_problems
cib_data if((Time.now.to_i - @last_cib_data_called).to_i > @last_call_timeout)
host_objects if((Time.now.to_i - @last_host_objects_called).to_i > @last_call_timeout)
raise ArgumentError.new(format('wrong type. \'@hosts_problems_down\' must be an Integer, given \'%s\'', @hosts_problems_down.class.to_s)) unless( @hosts_problems_down.is_a?(Integer) )
raise ArgumentError.new(format('wrong type. \'@hosts_problems_critical\' must be an Integer, given \'%s\'', @hosts_problems_critical.class.to_s)) unless( @hosts_problems_critical.is_a?(Integer) )
raise ArgumentError.new(format('wrong type. \'@hosts_problems_critical\' must be an Integer, given \'%s\'', @hosts_problems_critical.class.to_s)) unless( @hosts_problems_critical.is_a?(Integer) )
raise ArgumentError.new(format('wrong type. \'@hosts_down\' must be an Integer, given \'%s\'', @hosts_down.class.to_s)) unless( @hosts_down.is_a?(Integer) )
problems_all = @hosts_problems.nil? ? 0 : @hosts_problems
problems_down = @hosts_problems_down.nil? ? 0 : @hosts_problems_down
problems_critical = @hosts_problems_critical.nil? ? 0 : @hosts_problems_critical
problems_unknown = @hosts_problems_unknown.nil? ? 0 : @hosts_problems_unknown
# calculate host problems adjusted by handled problems
# count togther handled host problems
problems_handled = @hosts_problems_down + @hosts_problems_critical + @hosts_problems_unknown
problems_adjusted = @hosts_down - problems_handled
{
all: problems_all.to_i,
down: problems_down.to_i,
critical: problems_critical.to_i,
unknown: problems_unknown.to_i,
handled: problems_handled.to_i,
adjusted: problems_adjusted.to_i
}
end
protected
# calculate a host severity
#
# stolen from Icinga Web 2
# ./modules/monitoring/library/Monitoring/Backend/Ido/Query/ServicestatusQuery.php
#
# @param [Hash] params
# @option params [hash] attrs ()
# * state [Float]
# * acknowledgement [Float] (default: 0)
# * downtime_depth [Float] (default: 0)
#
# @api protected
#
# @example
# host_severity( {'attrs' => { 'state' => 0.0, 'acknowledgement' => 0.0, 'downtime_depth' => 0.0 } } )
#
# @return [Integer]
#
def host_severity( params )
raise ArgumentError.new(format('wrong type. \'params\' must be an Hash, given \'%s\'', params.class.to_s)) unless( params.is_a?(Hash) )
raise ArgumentError.new('missing params') if( params.size.zero? )
attrs = params.dig('attrs')
state = validate( attrs, required: true, var: 'state', type: Float )
acknowledgement = validate( attrs, required: false, var: 'acknowledgement', type: Float ) || 0
downtime_depth = validate( attrs, required: false, var: 'downtime_depth', type: Float ) || 0
severity = 0
severity +=
if acknowledgement != 0
2
elsif downtime_depth > 0
1
else
4
end
severity += 16 if object_has_been_checked?(params)
unless state.zero?
severity +=
if state == 1
32
elsif state == 2
64
else
256
end
end
severity
end
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module Reneval
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
end
end
precompile assets for bootstrap
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module Reneval
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.assets.precompile += %w(*.png *.jpg *.jpeg *.gif)
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module EZNotes
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# This ensures bootstrap-sass is compatible with the asset pipeline in rails 4
config.assets.precompile += %w(*.png *.jpg *.jpeg *.gif)
end
end
Update configuration to serve static assets true for production
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module EZNotes
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# This ensures bootstrap-sass is compatible with the asset pipeline in rails 4
config.assets.precompile += %w(*.png *.jpg *.jpeg *.gif *.woff *.eot *.svg *.ttf)
end
end
|
require 'optparse'
require 'active_support/core_ext/class/attribute_accessors'
require 'active_support/core_ext/object/blank'
require 'active_support/core_ext/module/delegation'
require 'active_support/core_ext/string'
require 'active_support/deprecation'
module InfinityTest
module Callbacks
autoload :BaseCallback, 'infinity_test/callbacks/base_callback'
autoload :AfterCallback, 'infinity_test/callbacks/after_callback'
autoload :BeforeCallback, 'infinity_test/callbacks/before_callback'
end
module Core
autoload :Base, 'infinity_test/core/base'
autoload :ConfigurationMerge, 'infinity_test/core/configuration_merge'
autoload :LoadConfiguration, 'infinity_test/core/load_configuration'
autoload :Options, 'infinity_test/core/options'
autoload :Runner, 'infinity_test/core/runner'
end
# This will be removed in the InfinityTest 2.0.1 and Extract to a gem
#
module OldDSL
autoload :Configuration, 'infinity_test/old_dsl/configuration'
end
module Framework
autoload :Base, 'infinity_test/framework/base'
autoload :Padrino, 'infinity_test/framework/padrino'
autoload :Rails, 'infinity_test/framework/rails'
autoload :Rubygems, 'infinity_test/framework/rubygems'
end
module Observer
autoload :Base, 'infinity_test/observer/base'
autoload :Watchr, 'infinity_test/observer/watchr'
end
module Strategy
autoload :AutoDiscover, 'infinity_test/strategy/auto_discover'
autoload :Base, 'infinity_test/strategy/base'
autoload :Rbenv, 'infinity_test/strategy/rbenv'
autoload :Rvm, 'infinity_test/strategy/rvm'
autoload :RubyDefault, 'infinity_test/strategy/ruby_default'
autoload :SharedExample, 'infinity_test/strategy/shared_example'
end
include Callbacks
include Core
end
Create the InfinityTest.setup { |config| ... }
require 'optparse'
require 'active_support/core_ext/class/attribute_accessors'
require 'active_support/core_ext/object/blank'
require 'active_support/core_ext/module/delegation'
require 'active_support/core_ext/string'
require 'active_support/deprecation'
module InfinityTest
module Callbacks
autoload :BaseCallback, 'infinity_test/callbacks/base_callback'
autoload :AfterCallback, 'infinity_test/callbacks/after_callback'
autoload :BeforeCallback, 'infinity_test/callbacks/before_callback'
end
module Core
autoload :Base, 'infinity_test/core/base'
autoload :ConfigurationMerge, 'infinity_test/core/configuration_merge'
autoload :LoadConfiguration, 'infinity_test/core/load_configuration'
autoload :Options, 'infinity_test/core/options'
autoload :Runner, 'infinity_test/core/runner'
end
# This will be removed in the InfinityTest 2.0.* and Extract to a other gem
#
module OldDSL
autoload :Configuration, 'infinity_test/old_dsl/configuration'
end
module Framework
autoload :Base, 'infinity_test/framework/base'
autoload :Padrino, 'infinity_test/framework/padrino'
autoload :Rails, 'infinity_test/framework/rails'
autoload :Rubygems, 'infinity_test/framework/rubygems'
end
module Observer
autoload :Base, 'infinity_test/observer/base'
autoload :Watchr, 'infinity_test/observer/watchr'
end
module Strategy
autoload :AutoDiscover, 'infinity_test/strategy/auto_discover'
autoload :Base, 'infinity_test/strategy/base'
autoload :Rbenv, 'infinity_test/strategy/rbenv'
autoload :Rvm, 'infinity_test/strategy/rvm'
autoload :RubyDefault, 'infinity_test/strategy/ruby_default'
autoload :SharedExample, 'infinity_test/strategy/shared_example'
end
# See Base.setup to more information.
#
def self.setup(&block)
InfinityTest::Base.setup(&block)
end
include Callbacks
include Core
end |
require File.expand_path('../boot', __FILE__)
require 'rails/all'
require 'active_support/dependencies'
# FIXME this silences the warnings about Rails 2.3-style plugins under
# vendor/plugins, which are deprecated. Hiding those warnings makes it easier
# to work for now, but we should really look at putting those plugins away.
ActiveSupport::Deprecation.silenced = true
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Noosfero
class Application < Rails::Application
require 'noosfero/plugin'
# Adds custom attributes to the Set of allowed html attributes for the #sanitize helper
config.action_view.sanitized_allowed_attributes = 'align', 'border', 'alt', 'vspace', 'hspace', 'width', 'heigth', 'value', 'type', 'data', 'style', 'target', 'codebase', 'archive', 'classid', 'code', 'flashvars', 'scrolling', 'frameborder', 'controls', 'autoplay', 'colspan', 'rowspan'
# Adds custom tags to the Set of allowed html tags for the #sanitize helper
config.action_view.sanitized_allowed_tags = 'object', 'embed', 'param', 'table', 'tr', 'th', 'td', 'applet', 'comment', 'iframe', 'audio', 'video', 'source'
config.action_controller.include_all_helpers = false
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W( #{Rails.root.join('app', 'sweepers')} )
config.autoload_paths += Dir["#{config.root}/lib/**/"]
config.autoload_paths += Dir["#{config.root}/app/controllers/**/"]
config.autoload_paths += %W( #{Rails.root.join('test', 'mocks', Rails.env)} )
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# Sweepers are observers
# don't load the sweepers while loading the database
ignore_rake_commands = %w[
db:schema:load
gems:install
clobber
noosfero:translations:compile
makemo
]
if $PROGRAM_NAME =~ /rake$/ && (ignore_rake_commands.include?(ARGV.first))
Noosfero::Plugin.should_load = false
else
config.active_record.observers = :article_sweeper, :role_assignment_sweeper, :friendship_sweeper, :category_sweeper, :block_sweeper
end
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = nil
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
ActiveSupport::JSON::Encoding.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Straight support for assets from a rails 2 pattern
# See also config/initializers/assets.rb
config.assets.paths =
Dir.glob("{base,config/}plugins/*/{assets,public}/{,javascripts,stylesheets}") +
Dir.glob("public/{,javascripts,stylesheets}") +
# no precedence over core
Dir.glob("public/{designs/themes,user_themes}/*/{,javascripts,stylesheets}")
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
config.sass.preferred_syntax = :scss
config.sass.cache = true
config.sass.line_comments = false
def noosfero_session_secret
require 'fileutils'
target_dir = File.join(File.dirname(__FILE__), '../tmp')
FileUtils.mkdir_p(target_dir)
file = File.join(target_dir, 'session.secret')
if !File.exists?(file)
secret = (1..128).map { %w[0 1 2 3 4 5 6 7 8 9 a b c d e f][rand(16)] }.join('')
File.open(file, 'w') do |f|
f.puts secret
end
end
File.read(file).strip
end
# Your secret key for verifying cookie session data integrity.
# If you change this key, all old sessions will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
config.secret_token = noosfero_session_secret
config.action_dispatch.session = {
:key => '_noosfero_session',
}
config.time_zone = File.read('/etc/timezone').split("\n").first
config.active_record.default_timezone = config.time_zone
config.paths['db/migrate'] += Dir.glob "#{Rails.root}/{baseplugins,config/plugins/*}/db/migrate"
config.i18n.load_path += Dir.glob "#{Rails.root}/{baseplugins,config/plugins/*}/locales/*.{rb,yml}"
Noosfero::Plugin.setup(config)
end
end
Fix timezone on activerecord
require File.expand_path('../boot', __FILE__)
require 'rails/all'
require 'active_support/dependencies'
# FIXME this silences the warnings about Rails 2.3-style plugins under
# vendor/plugins, which are deprecated. Hiding those warnings makes it easier
# to work for now, but we should really look at putting those plugins away.
ActiveSupport::Deprecation.silenced = true
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Noosfero
class Application < Rails::Application
require 'noosfero/plugin'
# Adds custom attributes to the Set of allowed html attributes for the #sanitize helper
config.action_view.sanitized_allowed_attributes = 'align', 'border', 'alt', 'vspace', 'hspace', 'width', 'heigth', 'value', 'type', 'data', 'style', 'target', 'codebase', 'archive', 'classid', 'code', 'flashvars', 'scrolling', 'frameborder', 'controls', 'autoplay', 'colspan', 'rowspan'
# Adds custom tags to the Set of allowed html tags for the #sanitize helper
config.action_view.sanitized_allowed_tags = 'object', 'embed', 'param', 'table', 'tr', 'th', 'td', 'applet', 'comment', 'iframe', 'audio', 'video', 'source'
config.action_controller.include_all_helpers = false
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W( #{Rails.root.join('app', 'sweepers')} )
config.autoload_paths += Dir["#{config.root}/lib/**/"]
config.autoload_paths += Dir["#{config.root}/app/controllers/**/"]
config.autoload_paths += %W( #{Rails.root.join('test', 'mocks', Rails.env)} )
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# Sweepers are observers
# don't load the sweepers while loading the database
ignore_rake_commands = %w[
db:schema:load
gems:install
clobber
noosfero:translations:compile
makemo
]
if $PROGRAM_NAME =~ /rake$/ && (ignore_rake_commands.include?(ARGV.first))
Noosfero::Plugin.should_load = false
else
config.active_record.observers = :article_sweeper, :role_assignment_sweeper, :friendship_sweeper, :category_sweeper, :block_sweeper
end
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = nil
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
ActiveSupport::JSON::Encoding.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Straight support for assets from a rails 2 pattern
# See also config/initializers/assets.rb
config.assets.paths =
Dir.glob("{base,config/}plugins/*/{assets,public}/{,javascripts,stylesheets}") +
Dir.glob("public/{,javascripts,stylesheets}") +
# no precedence over core
Dir.glob("public/{designs/themes,user_themes}/*/{,javascripts,stylesheets}")
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
config.sass.preferred_syntax = :scss
config.sass.cache = true
config.sass.line_comments = false
def noosfero_session_secret
require 'fileutils'
target_dir = File.join(File.dirname(__FILE__), '../tmp')
FileUtils.mkdir_p(target_dir)
file = File.join(target_dir, 'session.secret')
if !File.exists?(file)
secret = (1..128).map { %w[0 1 2 3 4 5 6 7 8 9 a b c d e f][rand(16)] }.join('')
File.open(file, 'w') do |f|
f.puts secret
end
end
File.read(file).strip
end
# Your secret key for verifying cookie session data integrity.
# If you change this key, all old sessions will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
config.secret_token = noosfero_session_secret
config.action_dispatch.session = {
:key => '_noosfero_session',
}
config.time_zone = File.read('/etc/timezone').split("\n").first
config.active_record.default_timezone = :local
config.paths['db/migrate'] += Dir.glob "#{Rails.root}/{baseplugins,config/plugins/*}/db/migrate"
config.i18n.load_path += Dir.glob "#{Rails.root}/{baseplugins,config/plugins/*}/locales/*.{rb,yml}"
Noosfero::Plugin.setup(config)
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require *Rails.groups(:assets => %w(development test))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Catarse
class Application < Rails::Application
config.to_prepare do
Devise::Mailer.layout "email" # email.haml or email.erb
end
config.active_record.schema_format = :sql
config.autoload_paths += %W(#{config.root}/lib #{config.root}/lib/** #{config.root}/app/presenters #{config.root}/app/presenters/** #{config.root}/app/business/ #{config.root}/app/business/**)
config.encoding = "utf-8"
config.filter_parameters += [:password, :password_confirmation]
config.time_zone = 'Brasilia'
config.generators do |g|
g.template_engine :haml
g.test_framework :rspec, :fixture => false, :views => false
end
config.active_record.observers = [:backer_observer, :user_observer, :notification_observer, :update_observer, :project_observer, :payment_notification_observer]
# Enable the asset pipeline
config.assets.enabled = true
config.assets.initialize_on_precompile = false
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
end
end
Since Rails 3.2, you don't need to load folders inside app/
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require *Rails.groups(:assets => %w(development test))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Catarse
class Application < Rails::Application
config.to_prepare do
Devise::Mailer.layout "email" # email.haml or email.erb
end
config.active_record.schema_format = :sql
# Since Rails 3.1, all folders inside app/ will be loaded automatically
config.autoload_paths += %W(#{config.root}/lib #{config.root}/lib/**)
# Default encoding for the server
config.encoding = "utf-8"
config.filter_parameters += [:password, :password_confirmation]
config.time_zone = 'Brasilia'
config.generators do |g|
g.test_framework :rspec, :fixture => false, :views => false
end
config.active_record.observers = [
:backer_observer, :user_observer, :notification_observer,
:update_observer, :project_observer, :payment_notification_observer
]
# Enable the asset pipeline
config.assets.enabled = true
# Don't initialize the app when compiling
config.assets.initialize_on_precompile = false
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
end
end
|
require 'net/http'
require 'uri'
require 'json'
require 'timers'
require 'sys/proctable'
include Sys
module Instana
class Agent
attr_accessor :state
attr_accessor :agent_uuid
LOCALHOST = '127.0.0.1'.freeze
MIME_JSON = 'application/json'.freeze
DISCOVERY_PATH = 'com.instana.plugin.ruby.discovery'.freeze
def initialize
# Host agent defaults. Can be configured via Instana.config
@host = LOCALHOST
@port = 42699
# Supported two states (unannounced & announced)
@state = :unannounced
# Store the pid from process boot so we can detect forks
@pid = Process.pid
# Snapshot data is collected once per process but resent
# every 10 minutes along side process metrics.
@snapshot = take_snapshot
# Set last snapshot to just under 10 minutes ago
# so we send a snapshot sooner than later
@last_snapshot = Time.now - 570
# Timestamp of the last successful response from
# entity data reporting.
@entity_last_seen = Time.now
# Two timers, one for each state (unannounced & announced)
@timers = ::Timers::Group.new
@announce_timer = nil
@collect_timer = nil
# Detect platform flags
@is_linux = (RUBY_PLATFORM =~ /linux/i) ? true : false
@is_osx = (RUBY_PLATFORM =~ /darwin/i) ? true : false
# In case we're running in Docker, have the default gateway available
# to check in case we're running in bridged network mode
if @is_linux
@default_gateway = `/sbin/ip route | awk '/default/ { print $3 }'`.chomp
else
@default_gateway = nil
end
# The agent UUID returned from the host agent
@agent_uuid = nil
collect_process_info
end
# Used in class initialization and after a fork, this method
# collects up process information and stores it in @process
#
def collect_process_info
@process = {}
cmdline = ProcTable.ps(Process.pid).cmdline.split("\0")
@process[:name] = cmdline.shift
@process[:arguments] = cmdline
if @is_osx
# Handle OSX bug where env vars show up at the end of process name
# such as MANPATH etc..
@process[:name].gsub!(/[_A-Z]+=\S+/, '')
@process[:name].rstrip!
end
@process[:original_pid] = @pid
# This is usually Process.pid but in the case of docker, the host agent
# will return to us the true host pid in which we use to report data.
@process[:report_pid] = nil
end
# Determine whether the pid has changed since Agent start.
#
# @ return [Boolean] true or false to indicate if forked
#
def forked?
@pid != Process.pid
end
# Used post fork to re-initialize state and restart communications with
# the host agent.
#
def after_fork
::Instana.logger.debug "after_fork hook called. Falling back to unannounced state."
# Re-collect process information post fork
@pid = Process.pid
collect_process_info
# Set last snapshot to 10 minutes ago
# so we send a snapshot sooner than later
@last_snapshot = Time.now - 600
transition_to(:unannounced)
start
end
# Sets up periodic timers and starts the agent in a background thread.
#
def setup
# The announce timer
# We attempt to announce this ruby sensor to the host agent.
# In case of failure, we try again in 30 seconds.
@announce_timer = @timers.now_and_every(30) do
if forked?
after_fork
break
end
if host_agent_ready? && announce_sensor
::Instana.logger.debug "Announce successful. Switching to metrics collection."
transition_to(:announced)
end
end
# The collect timer
# If we are in announced state, send metric data (only delta reporting)
# every ::Instana::Collector.interval seconds.
@collect_timer = @timers.every(::Instana::Collector.interval) do
if @state == :announced
if forked?
after_fork
break
end
unless ::Instana::Collector.collect_and_report
# If report has been failing for more than 1 minute,
# fall back to unannounced state
if (Time.now - @entity_last_seen) > 60
::Instana.logger.debug "Metrics reporting failed for >1 min. Falling back to unannounced state."
transition_to(:unannounced)
end
end
::Instana.processor.send
end
end
end
# Starts the timer loop for the timers that were initialized
# in the setup method. This is blocking and should only be
# called from an already initialized background thread.
#
def start
loop {
if @state == :unannounced
@collect_timer.pause
@announce_timer.resume
else
@announce_timer.pause
@collect_timer.resume
end
@timers.wait
}
end
# Indicates if the agent is ready to send metrics
# or data.
#
def ready?
# In test, we're always ready :-)
return true if ENV['INSTANA_GEM_TEST']
@state == :announced
end
# Returns the PID that we are reporting to
#
def report_pid
@process[:report_pid]
end
# Collect process ID, name and arguments to notify
# the host agent.
#
def announce_sensor
announce_payload = {}
announce_payload[:pid] = pid_namespace? ? get_real_pid : Process.pid
announce_payload[:args] = @process[:arguments]
uri = URI.parse("http://#{@host}:#{@port}/#{DISCOVERY_PATH}")
req = Net::HTTP::Put.new(uri)
req.body = announce_payload.to_json
# ::Instana.logger.debug "Announce: http://#{@host}:#{@port}/#{DISCOVERY_PATH} - payload: #{req.body}"
response = make_host_agent_request(req)
if response && (response.code.to_i == 200)
data = JSON.parse(response.body)
@process[:report_pid] = data['pid']
@agent_uuid = data['agentUuid']
true
else
false
end
rescue => e
Instana.logger.error "#{__method__}:#{File.basename(__FILE__)}:#{__LINE__}: #{e.message}"
Instana.logger.debug e.backtrace.join("\r\n")
return false
end
# Method to report metrics data to the host agent.
#
# @param paylod [Hash] The collection of metrics to report.
#
def report_entity_data(payload)
with_snapshot = false
path = "com.instana.plugin.ruby.#{@process[:report_pid]}"
uri = URI.parse("http://#{@host}:#{@port}/#{path}")
req = Net::HTTP::Post.new(uri)
# Every 5 minutes, send snapshot data as well
if (Time.now - @last_snapshot) > 600
with_snapshot = true
payload.merge!(@snapshot)
# Add in process related that could have changed since
# snapshot was taken.
p = { :pid => @process[:report_pid] }
p[:name] = @process[:name]
p[:exec_args] = @process[:arguments]
payload.merge!(p)
end
req.body = payload.to_json
response = make_host_agent_request(req)
if response
last_entity_response = response.code.to_i
#::Instana.logger.debug "entity http://#{@host}:#{@port}/#{path}: response=#{last_entity_response}: #{payload.to_json}"
if last_entity_response == 200
@entity_last_seen = Time.now
@last_snapshot = Time.now if with_snapshot
return true
end
end
false
rescue => e
Instana.logger.error "#{__method__}:#{File.basename(__FILE__)}:#{__LINE__}: #{e.message}"
Instana.logger.debug e.backtrace.join("\r\n")
end
# Accept and report spans to the host agent.
#
# @param traces [Array] An array of [Span]
# @return [Boolean]
#
def report_spans(spans)
return unless @state == :announced
path = "com.instana.plugin.ruby/traces.#{@process[:report_pid]}"
uri = URI.parse("http://#{@host}:#{@port}/#{path}")
req = Net::HTTP::Post.new(uri)
req.body = spans.to_json
response = make_host_agent_request(req)
if response
last_trace_response = response.code.to_i
#::Instana.logger.debug "traces response #{last_trace_response}: #{spans.to_json}"
if [200, 204].include?(last_trace_response)
return true
end
end
false
rescue => e
Instana.logger.debug "#{__method__}:#{File.basename(__FILE__)}:#{__LINE__}: #{e.message}"
Instana.logger.debug e.backtrace.join("\r\n")
end
# Check that the host agent is available and can be contacted. This will
# first check localhost and if not, then attempt on the default gateway
# for docker in bridged mode. It will save where it found the host agent
# in @host that is used in subsequent HTTP calls.
#
def host_agent_ready?
# Localhost
uri = URI.parse("http://#{LOCALHOST}:#{@port}/")
req = Net::HTTP::Get.new(uri)
response = make_host_agent_request(req)
if response && (response.code.to_i == 200)
@host = LOCALHOST
return true
end
return false unless @is_linux
# We are potentially running on Docker in bridged networking mode.
# Attempt to contact default gateway
uri = URI.parse("http://#{@default_gateway}:#{@port}/")
req = Net::HTTP::Get.new(uri)
response = make_host_agent_request(req)
if response && (response.code.to_i == 200)
@host = @default_gateway
return true
end
false
rescue => e
Instana.logger.error "#{__method__}:#{File.basename(__FILE__)}:#{__LINE__}: #{e.message}"
Instana.logger.debug e.backtrace.join("\r\n")
return false
end
private
# Handles any/all steps required in the transtion
# between states.
#
# @param state [Symbol] Can be 1 of 2 possible states:
# `:announced`, `:unannounced`
#
def transition_to(state)
case state
when :announced
# announce successful; set state
@state = :announced
# Reset the entity timer
@entity_last_seen = Time.now
# Set last snapshot to 10 minutes ago
# so we send a snapshot on first report
@last_snapshot = Time.now - 601
when :unannounced
@state = :unannounced
else
::Instana.logger.warn "Uknown agent state: #{state}"
end
end
# Centralization of the net/http communications
# with the host agent. Pass in a prepared <req>
# of type Net::HTTP::Get|Put|Head
#
# @param req [Net::HTTP::Req] A prepared Net::HTTP request object of the type
# you wish to make (Get, Put, Post etc.)
#
def make_host_agent_request(req)
req['Accept'] = MIME_JSON
req['Content-Type'] = MIME_JSON
response = nil
Net::HTTP.start(req.uri.hostname, req.uri.port, :open_timeout => 1, :read_timeout => 1) do |http|
response = http.request(req)
end
response
rescue Errno::ECONNREFUSED => e
return nil
rescue => e
Instana.logger.error "#{__method__}:#{File.basename(__FILE__)}:#{__LINE__}: #{e.message}"
Instana.logger.debug e.backtrace.join("\r\n")
return nil
end
# Indicates whether we are running in a pid namespace (such as
# Docker).
#
def pid_namespace?
return false unless @is_linux
Process.pid != get_real_pid
end
# Attempts to determine the true process ID by querying the
# /proc/<pid>/sched file. This works on linux currently.
#
def get_real_pid
raise RuntimeError.new("Unsupported platform: get_real_pid") unless @is_linux
v = File.open("/proc/#{Process.pid}/sched", &:readline)
v.match(/\d+/).to_s.to_i
end
# Method to collect up process info for snapshots. This
# is generally used once per process.
#
def take_snapshot
data = {}
data[:sensorVersion] = ::Instana::VERSION
data[:ruby_version] = RUBY_VERSION
# Since a snapshot is only taken on process boot,
# this is ok here.
data[:start_time] = Time.now.to_s
# Framework Detection
if defined?(::RailsLts::VERSION)
data[:framework] = "Rails on Rails LTS-#{::RailsLts::VERSION}"
elsif defined?(::Rails.version)
data[:framework] = "Ruby on Rails #{::Rails.version}"
elsif defined?(::Grape::VERSION)
data[:framework] = "Grape #{::Grape::VERSION}"
elsif defined?(::Padrino::VERSION)
data[:framework] = "Padrino #{::Padrino::VERSION}"
elsif defined?(::Sinatra::VERSION)
data[:framework] = "Sinatra #{::Sinatra::VERSION}"
end
data
rescue => e
::Instana.logger.error "#{__method__}:#{File.basename(__FILE__)}:#{__LINE__}: #{e.message}"
::Instana.logger.debug e.backtrace.join("\r\n")
return data
end
end
end
Improved fork detection and handling.
require 'net/http'
require 'uri'
require 'json'
require 'timers'
require 'sys/proctable'
include Sys
module Instana
class Agent
attr_accessor :state
attr_accessor :agent_uuid
LOCALHOST = '127.0.0.1'.freeze
MIME_JSON = 'application/json'.freeze
DISCOVERY_PATH = 'com.instana.plugin.ruby.discovery'.freeze
def initialize
# Host agent defaults. Can be configured via Instana.config
@host = LOCALHOST
@port = 42699
# Supported two states (unannounced & announced)
@state = :unannounced
# Store the pid from process boot so we can detect forks
@pid = Process.pid
# Snapshot data is collected once per process but resent
# every 10 minutes along side process metrics.
@snapshot = take_snapshot
# Set last snapshot to just under 10 minutes ago
# so we send a snapshot sooner than later
@last_snapshot = Time.now - 570
# Timestamp of the last successful response from
# entity data reporting.
@entity_last_seen = Time.now
# Two timers, one for each state (unannounced & announced)
@timers = ::Timers::Group.new
@announce_timer = nil
@collect_timer = nil
# Detect platform flags
@is_linux = (RUBY_PLATFORM =~ /linux/i) ? true : false
@is_osx = (RUBY_PLATFORM =~ /darwin/i) ? true : false
# In case we're running in Docker, have the default gateway available
# to check in case we're running in bridged network mode
if @is_linux
@default_gateway = `/sbin/ip route | awk '/default/ { print $3 }'`.chomp
else
@default_gateway = nil
end
# The agent UUID returned from the host agent
@agent_uuid = nil
collect_process_info
end
# Used in class initialization and after a fork, this method
# collects up process information and stores it in @process
#
def collect_process_info
@process = {}
cmdline = ProcTable.ps(Process.pid).cmdline.split("\0")
@process[:name] = cmdline.shift
@process[:arguments] = cmdline
if @is_osx
# Handle OSX bug where env vars show up at the end of process name
# such as MANPATH etc..
@process[:name].gsub!(/[_A-Z]+=\S+/, '')
@process[:name].rstrip!
end
@process[:original_pid] = @pid
# This is usually Process.pid but in the case of docker, the host agent
# will return to us the true host pid in which we use to report data.
@process[:report_pid] = nil
end
# Determine whether the pid has changed since Agent start.
#
# @ return [Boolean] true or false to indicate if forked
#
def forked?
@pid != Process.pid
end
# Used post fork to re-initialize state and restart communications with
# the host agent.
#
def after_fork
::Instana.logger.debug "after_fork hook called. Falling back to unannounced state and spawning a new background agent thread."
# Re-collect process information post fork
@pid = Process.pid
collect_process_info
transition_to(:unannounced)
setup
spawn_background_thread
end
# Spawns the background thread and calls start. This method is separated
# out for those who wish to control which thread the background agent will
# run in.
#
# This method can be overridden with the following:
#
# module Instana
# class Agent
# def spawn_background_thread
# # start thread
# start
# end
# end
# end
#
def spawn_background_thread
# The thread calling fork is the only thread in the created child process.
# fork doesn’t copy other threads.
# Restart our background thread
Thread.new do
start
end
end
# Sets up periodic timers and starts the agent in a background thread.
#
def setup
# The announce timer
# We attempt to announce this ruby sensor to the host agent.
# In case of failure, we try again in 30 seconds.
@announce_timer = @timers.now_and_every(30) do
if host_agent_ready? && announce_sensor
::Instana.logger.debug "Announce successful. Switching to metrics collection. pid: #{Process.pid}"
transition_to(:announced)
end
end
# The collect timer
# If we are in announced state, send metric data (only delta reporting)
# every ::Instana::Collector.interval seconds.
@collect_timer = @timers.every(::Instana::Collector.interval) do
if @state == :announced
unless ::Instana::Collector.collect_and_report
# If report has been failing for more than 1 minute,
# fall back to unannounced state
if (Time.now - @entity_last_seen) > 60
::Instana.logger.debug "Metrics reporting failed for >1 min. Falling back to unannounced state."
transition_to(:unannounced)
end
end
::Instana.processor.send
end
end
end
# Starts the timer loop for the timers that were initialized
# in the setup method. This is blocking and should only be
# called from an already initialized background thread.
#
def start
loop do
if @state == :unannounced
@collect_timer.pause
@announce_timer.resume
else
@announce_timer.pause
@collect_timer.resume
end
@timers.wait
end
ensure
Instana.logger.debug "Agent start method exiting. state: #{@state} pid: #{Process.pid}"
end
# Indicates if the agent is ready to send metrics
# and/or data.
#
def ready?
# In test, we're always ready :-)
return true if ENV['INSTANA_GEM_TEST']
if forked?
::Instana.logger.debug "Instana: detected fork. Calling after_fork"
after_fork
end
@state == :announced
rescue => e
Instana.logger.debug "#{__method__}:#{File.basename(__FILE__)}:#{__LINE__}: #{e.message}"
Instana.logger.debug e.backtrace.join("\r\n")
return false
end
# Returns the PID that we are reporting to
#
def report_pid
@process[:report_pid]
end
# Collect process ID, name and arguments to notify
# the host agent.
#
def announce_sensor
announce_payload = {}
announce_payload[:pid] = pid_namespace? ? get_real_pid : Process.pid
announce_payload[:args] = @process[:arguments]
uri = URI.parse("http://#{@host}:#{@port}/#{DISCOVERY_PATH}")
req = Net::HTTP::Put.new(uri)
req.body = announce_payload.to_json
# ::Instana.logger.debug "Announce: http://#{@host}:#{@port}/#{DISCOVERY_PATH} - payload: #{req.body}"
response = make_host_agent_request(req)
if response && (response.code.to_i == 200)
data = JSON.parse(response.body)
@process[:report_pid] = data['pid']
@agent_uuid = data['agentUuid']
true
else
false
end
rescue => e
Instana.logger.error "#{__method__}:#{File.basename(__FILE__)}:#{__LINE__}: #{e.message}"
Instana.logger.debug e.backtrace.join("\r\n")
return false
end
# Method to report metrics data to the host agent.
#
# @param paylod [Hash] The collection of metrics to report.
#
def report_entity_data(payload)
with_snapshot = false
path = "com.instana.plugin.ruby.#{@process[:report_pid]}"
uri = URI.parse("http://#{@host}:#{@port}/#{path}")
req = Net::HTTP::Post.new(uri)
# Every 5 minutes, send snapshot data as well
if (Time.now - @last_snapshot) > 600
with_snapshot = true
payload.merge!(@snapshot)
# Add in process related that could have changed since
# snapshot was taken.
p = { :pid => @process[:report_pid] }
p[:name] = @process[:name]
p[:exec_args] = @process[:arguments]
payload.merge!(p)
end
req.body = payload.to_json
response = make_host_agent_request(req)
if response
last_entity_response = response.code.to_i
#::Instana.logger.debug "entity http://#{@host}:#{@port}/#{path}: response=#{last_entity_response}: #{payload.to_json}"
if last_entity_response == 200
@entity_last_seen = Time.now
@last_snapshot = Time.now if with_snapshot
return true
end
end
false
rescue => e
Instana.logger.error "#{__method__}:#{File.basename(__FILE__)}:#{__LINE__}: #{e.message}"
Instana.logger.debug e.backtrace.join("\r\n")
end
# Accept and report spans to the host agent.
#
# @param traces [Array] An array of [Span]
# @return [Boolean]
#
def report_spans(spans)
return unless @state == :announced
path = "com.instana.plugin.ruby/traces.#{@process[:report_pid]}"
uri = URI.parse("http://#{@host}:#{@port}/#{path}")
req = Net::HTTP::Post.new(uri)
req.body = spans.to_json
response = make_host_agent_request(req)
if response
last_trace_response = response.code.to_i
#::Instana.logger.debug "traces response #{last_trace_response}: #{spans.to_json}"
if [200, 204].include?(last_trace_response)
return true
end
end
false
rescue => e
Instana.logger.debug "#{__method__}:#{File.basename(__FILE__)}:#{__LINE__}: #{e.message}"
Instana.logger.debug e.backtrace.join("\r\n")
end
# Check that the host agent is available and can be contacted. This will
# first check localhost and if not, then attempt on the default gateway
# for docker in bridged mode. It will save where it found the host agent
# in @host that is used in subsequent HTTP calls.
#
def host_agent_ready?
# Localhost
uri = URI.parse("http://#{LOCALHOST}:#{@port}/")
req = Net::HTTP::Get.new(uri)
response = make_host_agent_request(req)
if response && (response.code.to_i == 200)
@host = LOCALHOST
return true
end
return false unless @is_linux
# We are potentially running on Docker in bridged networking mode.
# Attempt to contact default gateway
uri = URI.parse("http://#{@default_gateway}:#{@port}/")
req = Net::HTTP::Get.new(uri)
response = make_host_agent_request(req)
if response && (response.code.to_i == 200)
@host = @default_gateway
return true
end
false
rescue => e
Instana.logger.error "#{__method__}:#{File.basename(__FILE__)}:#{__LINE__}: #{e.message}"
Instana.logger.debug e.backtrace.join("\r\n")
return false
end
private
# Handles any/all steps required in the transtion
# between states.
#
# @param state [Symbol] Can be 1 of 2 possible states:
# `:announced`, `:unannounced`
#
def transition_to(state)
case state
when :announced
# announce successful; set state
@state = :announced
# Reset the entity timer
@entity_last_seen = Time.now
# Set last snapshot to 10 minutes ago
# so we send a snapshot on first report
@last_snapshot = Time.now - 601
when :unannounced
@state = :unannounced
# Set last snapshot to 10 minutes ago
# so we send a snapshot on first report
@last_snapshot = Time.now - 601
else
::Instana.logger.warn "Uknown agent state: #{state}"
end
end
# Centralization of the net/http communications
# with the host agent. Pass in a prepared <req>
# of type Net::HTTP::Get|Put|Head
#
# @param req [Net::HTTP::Req] A prepared Net::HTTP request object of the type
# you wish to make (Get, Put, Post etc.)
#
def make_host_agent_request(req)
req['Accept'] = MIME_JSON
req['Content-Type'] = MIME_JSON
response = nil
Net::HTTP.start(req.uri.hostname, req.uri.port, :open_timeout => 1, :read_timeout => 1) do |http|
response = http.request(req)
end
response
rescue Errno::ECONNREFUSED => e
return nil
rescue => e
Instana.logger.error "#{__method__}:#{File.basename(__FILE__)}:#{__LINE__}: #{e.message}"
Instana.logger.debug e.backtrace.join("\r\n")
return nil
end
# Indicates whether we are running in a pid namespace (such as
# Docker).
#
def pid_namespace?
return false unless @is_linux
Process.pid != get_real_pid
end
# Attempts to determine the true process ID by querying the
# /proc/<pid>/sched file. This works on linux currently.
#
def get_real_pid
raise RuntimeError.new("Unsupported platform: get_real_pid") unless @is_linux
v = File.open("/proc/#{Process.pid}/sched", &:readline)
v.match(/\d+/).to_s.to_i
end
# Method to collect up process info for snapshots. This
# is generally used once per process.
#
def take_snapshot
data = {}
data[:sensorVersion] = ::Instana::VERSION
data[:ruby_version] = RUBY_VERSION
# Since a snapshot is only taken on process boot,
# this is ok here.
data[:start_time] = Time.now.to_s
# Framework Detection
if defined?(::RailsLts::VERSION)
data[:framework] = "Rails on Rails LTS-#{::RailsLts::VERSION}"
elsif defined?(::Rails.version)
data[:framework] = "Ruby on Rails #{::Rails.version}"
elsif defined?(::Grape::VERSION)
data[:framework] = "Grape #{::Grape::VERSION}"
elsif defined?(::Padrino::VERSION)
data[:framework] = "Padrino #{::Padrino::VERSION}"
elsif defined?(::Sinatra::VERSION)
data[:framework] = "Sinatra #{::Sinatra::VERSION}"
end
data
rescue => e
::Instana.logger.error "#{__method__}:#{File.basename(__FILE__)}:#{__LINE__}: #{e.message}"
::Instana.logger.debug e.backtrace.join("\r\n")
return data
end
end
end
|
#
# Copyright 2013 Marin Litoiu, Hongbin Lu, Mark Shtern, Bradlley Simmons, Mike
# Smit
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module PatternDeployer
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
config.exceptions_app = self.routes
#####################################################
# #
# custom configuration #
# #
#####################################################
# The location of chef repository
config.chef_repo_dir = "#{Rails.root}/chef-repo"
# link chef-repo to the installed gem
unless File.exists?(config.chef_repo_dir)
FileUtils.ln_s Gem.loaded_specs["customized-chef-repo"].full_gem_path.strip, config.chef_repo_dir
end
# Amazon EC2
config.ec2 = "ec2"
# Openstack
config.openstack = "openstack"
# The deployment is not on any cloud
config.notcloud = "none"
# The cloud provider this application support
config.supported_clouds = [config.ec2, config.openstack]
# The supported node service this application support.
# Each node service corresponse to a set of scripts that will run to config the node on deployment
#config.supported_node_services = ["openvpn_server", "openvpn_client", "database_server", "web_balancer", "web_server",
# "snort_prepost", "snort", "front_end_balancer", "ossec_client", "virsh",
# "dns_client", "chef_server", "self_install"]
# The path to the schema file that will be used to validate the application topology
config.schema_file = [Rails.root, "lib", "NestedQEMU-schema.xsd"].join("/")
# The location of the bootstrap template file
config.bootstrap_templates_dir = [config.chef_repo_dir, ".chef", "bootstrap"].join("/")
# The location of chef config file
config.chef_config_file = "#{Rails.root}/chef-repo/.chef/knife.rb"
# The deployment of application pattern will stop if the deployment time is more than this.
config.chef_max_deploy_time = 3600
# The timeout for waiting ip address of another deploying instance
config.chef_wait_ip_timeout = 300
# The timeout for waiting the virtual ip address of another deploying instance
config.chef_wait_vpnip_timeout = 600
# Auto allocate floating IP when creating servers in OpenStack
config.openstack_auto_allocate_ip = true
# Auto deallocate floating IP when shuting down servers in OpenStack
config.openstack_auto_deallocate_ip = true
# Get the IP address of current server
begin
query_url ||= "http://169.254.169.254/latest/meta-data/public-ipv4"
ipaddress = Excon.get(query_url, :connect_timeout => 5).body
require 'ipaddr'
config.ipaddress = IPAddr.new(ipaddress).to_s
rescue ArgumentError, Excon::Errors::Timeout
alternative_url = "http://ifconfig.me/ip"
if query_url != alternative_url
query_url = alternative_url
retry
end
# Use Ohai if none of above work
require 'ohai'
ohai = Ohai::System.new
ohai.all_plugins
config.ipaddress = ohai[:ipaddress]
end
# A list of nodes definition. Node declared here can be reference without declared in pattern.
config.nodes = [
"<node id='PDS'>
<is_external>true</is_external>
<server_ip>#{config.ipaddress}</server_ip>
</node>"
]
end
end
Changed PDS to private IP
#
# Copyright 2013 Marin Litoiu, Hongbin Lu, Mark Shtern, Bradlley Simmons, Mike
# Smit
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module PatternDeployer
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
config.exceptions_app = self.routes
#####################################################
# #
# custom configuration #
# #
#####################################################
# The location of chef repository
config.chef_repo_dir = "#{Rails.root}/chef-repo"
# link chef-repo to the installed gem
unless File.exists?(config.chef_repo_dir)
FileUtils.ln_s Gem.loaded_specs["customized-chef-repo"].full_gem_path.strip, config.chef_repo_dir
end
# Amazon EC2
config.ec2 = "ec2"
# Openstack
config.openstack = "openstack"
# The deployment is not on any cloud
config.notcloud = "none"
# The cloud provider this application support
config.supported_clouds = [config.ec2, config.openstack]
# The supported node service this application support.
# Each node service corresponse to a set of scripts that will run to config the node on deployment
#config.supported_node_services = ["openvpn_server", "openvpn_client", "database_server", "web_balancer", "web_server",
# "snort_prepost", "snort", "front_end_balancer", "ossec_client", "virsh",
# "dns_client", "chef_server", "self_install"]
# The path to the schema file that will be used to validate the application topology
config.schema_file = [Rails.root, "lib", "NestedQEMU-schema.xsd"].join("/")
# The location of the bootstrap template file
config.bootstrap_templates_dir = [config.chef_repo_dir, ".chef", "bootstrap"].join("/")
# The location of chef config file
config.chef_config_file = "#{Rails.root}/chef-repo/.chef/knife.rb"
# The deployment of application pattern will stop if the deployment time is more than this.
config.chef_max_deploy_time = 3600
# The timeout for waiting ip address of another deploying instance
config.chef_wait_ip_timeout = 300
# The timeout for waiting the virtual ip address of another deploying instance
config.chef_wait_vpnip_timeout = 600
# Auto allocate floating IP when creating servers in OpenStack
config.openstack_auto_allocate_ip = true
# Auto deallocate floating IP when shuting down servers in OpenStack
config.openstack_auto_deallocate_ip = true
# Get the IP address of current server
begin
query_url ||= "http://169.254.169.254/latest/meta-data/public-ipv4"
public_ip = Excon.get(query_url, :connect_timeout => 5).body
require 'ipaddr'
config.public_ip = IPAddr.new(public_ip).to_s
rescue ArgumentError, Excon::Errors::Timeout
alternative_url = "http://ifconfig.me/ip"
if query_url != alternative_url
query_url = alternative_url
retry
end
# Use Ohai if none of above work
require 'ohai'
ohai = Ohai::System.new
ohai.all_plugins
config.public_ip = ohai[:ipaddress]
end
begin
query_url = "http://169.254.169.254/latest/meta-data/private-ipv4"
private_ip = Excon.get(query_url, :connect_timeout => 5).body
require 'ipaddr'
config.private_ip = IPAddr.new(private_ip).to_s
rescue ArgumentError, Excon::Errors::Timeout
# Use Ohai if none of above work
require 'ohai'
ohai = Ohai::System.new
ohai.all_plugins
config.private_ip = ohai[:ipaddress]
end
# A list of nodes definition. Node declared here can be reference without declared in pattern.
config.nodes = [
"<node id='PDS'>
<is_external>true</is_external>
<server_ip>#{config.private_ip}</server_ip>
</node>"
]
end
end |
#
# Copyright (C) 2013 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
# Put this in config/application.rb
require File.expand_path('../boot', __FILE__)
require_relative '../lib/canvas_yaml'
# Yes, it doesn't seem DRY to list these both in the if and else
# but this used to be "require 'rails/all'" which included sprockets.
# I needed to explicitly opt-out of sprockets but since I'm not sure
# about the other frameworks, I left this so it would be exactly the same
# as "require 'rails/all'" but without sprockets--even though it is a little
# different then the rails 3 else block. If the difference is not intended,
# they can be pulled out of the if/else
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
# require "sprockets/railtie" # Do not enable the Rails Asset Pipeline
require "rails/test_unit/railtie"
Bundler.require(*Rails.groups)
module CanvasRails
class Application < Rails::Application
$LOAD_PATH << config.root.to_s
config.encoding = 'utf-8'
require 'logging_filter'
config.filter_parameters.concat LoggingFilter.filtered_parameters
config.action_dispatch.rescue_responses['AuthenticationMethods::AccessTokenError'] = 401
config.action_dispatch.rescue_responses['AuthenticationMethods::AccessTokenScopeError'] = 401
config.action_dispatch.rescue_responses['AuthenticationMethods::LoggedOutError'] = 401
config.action_dispatch.rescue_responses['CanvasHttp::CircuitBreakerError'] = 502
config.action_dispatch.default_headers.delete('X-Frame-Options')
config.action_dispatch.default_headers['Referrer-Policy'] = 'no-referrer-when-downgrade'
config.action_controller.forgery_protection_origin_check = true
ActiveSupport.to_time_preserves_timezone = true
config.app_generators do |c|
c.test_framework :rspec
c.integration_tool :rspec
c.performance_tool :rspec
end
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Make Time.zone default to the specified zone, and make Active Record store time values
# in the database in UTC, and return them converted to the specified local zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Comment line to use default local time.
config.time_zone = 'UTC'
log_config = File.exist?(Rails.root + 'config/logging.yml') && Rails.application.config_for(:logging)
log_config = { 'logger' => 'rails', 'log_level' => 'debug' }.merge(log_config || {})
opts = {}
require 'canvas_logger'
config.log_level = log_config['log_level']
log_level = ActiveSupport::Logger.const_get(config.log_level.to_s.upcase)
opts[:skip_thread_context] = true if log_config['log_context'] == false
case log_config["logger"]
when "syslog"
require 'syslog_wrapper'
log_config["app_ident"] ||= "canvas-lms"
log_config["daemon_ident"] ||= "canvas-lms-daemon"
facilities = 0
(log_config["facilities"] || []).each do |facility|
facilities |= Syslog.const_get "LOG_#{facility.to_s.upcase}"
end
ident = ENV['RUNNING_AS_DAEMON'] == 'true' ? log_config["daemon_ident"] : log_config["app_ident"]
opts[:include_pid] = true if log_config["include_pid"] == true
config.logger = SyslogWrapper.new(ident, facilities, opts)
config.logger.level = log_level
else
log_path = config.paths['log'].first
if ENV['RUNNING_AS_DAEMON'] == 'true'
log_path = Rails.root+'log/delayed_job.log'
end
config.logger = CanvasLogger.new(log_path, log_level, opts)
end
# Activate observers that should always be running
config.active_record.observers = [:cacher, :stream_item_cache, :live_events_observer ]
config.active_record.allow_unsafe_raw_sql = :disabled
config.active_support.encode_big_decimal_as_string = false
config.paths['lib'].eager_load!
config.paths.add('app/middleware', eager_load: true, autoload_once: true)
# prevent directory->module inference in these directories from wreaking
# havoc on the app (e.g. stylesheets/base -> ::Base)
config.eager_load_paths -= %W(#{Rails.root}/app/coffeescripts
#{Rails.root}/app/stylesheets)
config.middleware.use Rack::Chunked
config.middleware.use Rack::Deflater, if: -> (*) {
::Canvas::DynamicSettings.find(tree: :private)["enable_rack_deflation"]
}
config.middleware.use Rack::Brotli, if: -> (*) {
::Canvas::DynamicSettings.find(tree: :private)["enable_rack_brotli"]
}
config.i18n.load_path << Rails.root.join('config', 'locales', 'locales.yml')
config.to_prepare do
require_dependency 'canvas/plugins/default_plugins'
ActiveSupport::JSON::Encoding.escape_html_entities_in_json = true
end
module PostgreSQLEarlyExtensions
def initialize(connection, logger, connection_parameters, config)
unless config.key?(:prepared_statements)
config = config.dup
config[:prepared_statements] = false
end
connection&.setnonblocking(true) unless CANVAS_RAILS5_2
super(connection, logger, connection_parameters, config)
end
def connect
hosts = Array(@connection_parameters[:host]).presence || [nil]
hosts.each_with_index do |host, index|
begin
connection_parameters = @connection_parameters.dup
connection_parameters[:host] = host
@connection = PG::Connection.connect(connection_parameters)
@connection.setnonblocking(true) unless CANVAS_RAILS5_2
configure_connection
raise "Canvas requires PostgreSQL 9.5 or newer" unless postgresql_version >= 90500
break
rescue ::PG::Error => error
if error.message.include?("does not exist")
raise ActiveRecord::NoDatabaseError.new(error.message)
elsif index == hosts.length - 1
raise
end
# else try next host
end
end
end
end
module TypeMapInitializerExtensions
def query_conditions_for_initial_load
known_type_names = @store.keys.map { |n| "'#{n}'" } + @store.keys.map { |n| "'_#{n}'" }
<<~SQL % [known_type_names.join(", "),]
WHERE
t.typname IN (%s)
SQL
end
end
Autoextend.hook(:"ActiveRecord::ConnectionAdapters::PostgreSQLAdapter",
PostgreSQLEarlyExtensions,
method: :prepend)
Autoextend.hook(:"ActiveRecord::ConnectionAdapters::PostgreSQL::OID::TypeMapInitializer",
TypeMapInitializerExtensions,
method: :prepend)
SafeYAML.singleton_class.send(:attr_accessor, :safe_parsing)
module SafeYAMLWithFlag
def load(*args)
previous, self.safe_parsing = safe_parsing, true
super
ensure
self.safe_parsing = previous
end
end
SafeYAML.singleton_class.prepend(SafeYAMLWithFlag)
Psych.add_domain_type("ruby/object", "Class") do |_type, val|
if SafeYAML.safe_parsing && !Canvas::Migration.valid_converter_classes.include?(val)
raise "Cannot load class #{val} from YAML"
end
val.constantize
end
module PatchThorWarning
# active_model_serializers should be passing `type: :boolean` here:
# https://github.com/rails-api/active_model_serializers/blob/v0.9.0.alpha1/lib/active_model/serializer/generators/serializer/scaffold_controller_generator.rb#L10
# but we don't really care about the warning, it only affects using the rails
# generator for a resource
#
# Easiest way to avoid the warning for now is to patch thor
def validate_default_type!
return if switch_name == "--serializer"
super
end
end
Autoextend.hook(:"Thor::Option", PatchThorWarning, method: :prepend)
# Extend any base classes, even gem classes
Dir.glob("#{Rails.root}/lib/ext/**/*.rb").each { |file| require file }
# tell Rails to use the native XML parser instead of REXML
ActiveSupport::XmlMini.backend = 'Nokogiri'
class NotImplemented < StandardError; end
if defined?(PhusionPassenger)
PhusionPassenger.on_event(:after_installing_signal_handlers) do
Canvas::Reloader.trap_signal
end
PhusionPassenger.on_event(:starting_worker_process) do |forked|
if forked
# We're in smart spawning mode.
# Reset imperium because it's possible to accidentally share an open http
# socket between processes shortly after fork.
Imperium::Agent.reset_default_client
Imperium::Catalog.reset_default_client
Imperium::Client.reset_default_client
Imperium::Events.reset_default_client
Imperium::KV.reset_default_client
# it's really important to reset the default clients
# BEFORE letting dynamic setting pull a new one.
# do not change this order.
Canvas::DynamicSettings.on_fork!
else
# We're in direct spawning mode. We don't need to do anything.
end
end
else
config.to_prepare do
Canvas::Reloader.trap_signal
end
end
# Ensure that the automatic redis reconnection on fork works
# This is the default in redis-rb, but for some reason rails overrides it
# See e.g. https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22704
ActiveSupport::Cache::RedisCacheStore::DEFAULT_REDIS_OPTIONS[:reconnect_attempts] = 1
# don't wrap fields with errors with a <div class="fieldWithErrors" />,
# since that could leak information (e.g. valid vs invalid username on
# login page)
config.action_view.field_error_proc = Proc.new { |html_tag, instance| html_tag }
class ExceptionsApp
def call(env)
req = ActionDispatch::Request.new(env)
res = ApplicationController.make_response!(req)
ApplicationController.dispatch('rescue_action_dispatch_exception', req, res)
end
end
config.exceptions_app = ExceptionsApp.new
config.before_initialize do
config.action_controller.asset_host = Canvas::Cdn.method(:asset_host_for)
end
if config.action_dispatch.rack_cache != false
config.action_dispatch.rack_cache[:ignore_headers] =
%w[Set-Cookie X-Request-Context-Id X-Canvas-User-Id X-Canvas-Meta]
end
def validate_secret_key_config!
# no validation; we don't use Rails' CookieStore session middleware, so we
# don't care about secret_key_base
end
initializer "canvas.init_dynamic_settings", before: "canvas.extend_shard" do
settings = ConfigFile.load("consul")
if settings.present?
begin
Canvas::DynamicSettings.config = settings
rescue Imperium::UnableToConnectError
Rails.logger.warn("INITIALIZATION: can't reach consul, attempts to load DynamicSettings will fail")
end
end
end
initializer "canvas.extend_shard", before: "active_record.initialize_database" do
# have to do this before the default shard loads
Switchman::Shard.serialize :settings, Hash
Switchman.cache = -> { MultiCache.cache }
end
# Newer rails has this in rails proper
attr_writer :credentials
initializer "canvas.init_credentials", before: "active_record.initialize_database" do
self.credentials = Canvas::Credentials.new(credentials)
end
# we don't know what middleware to make SessionsTimeout follow until after
# we've loaded config/initializers/session_store.rb
initializer("extend_middleware_stack", after: :load_config_initializers) do |app|
app.config.middleware.insert_before(config.session_store, LoadAccount)
app.config.middleware.swap(ActionDispatch::RequestId, RequestContextGenerator)
app.config.middleware.insert_after(config.session_store, RequestContextSession)
app.config.middleware.insert_before(Rack::Head, RequestThrottle)
app.config.middleware.insert_before(Rack::MethodOverride, PreventNonMultipartParse)
end
end
end
don't set pg conns to nonblocking
reverts b67ad05fb256361ab57c48db010443c100427fbf
turns out it's not actually necessary, because PG#exec_params _always_
uses "async" behavior when reading responses. and can be a problem
because it does _not_ verify that PQflush is good, so can cause
hangs with data in buffers
Change-Id: I21618f07a508d91491d8c7e623caba2397d3e4d4
Reviewed-on: https://gerrit.instructure.com/c/canvas-lms/+/258978
Reviewed-by: Jacob Burroughs <8ecea6e385af5cf9f53123f5ca17fb5fd6a6d4b2@instructure.com>
Tested-by: Service Cloud Jenkins <9144042a601061f88f1e1d7a1753ea3e2972119d@instructure.com>
QA-Review: Cody Cutrer <c4fe2b1d90ef8f2548c8aeabfa633434316f0305@instructure.com>
Product-Review: Cody Cutrer <c4fe2b1d90ef8f2548c8aeabfa633434316f0305@instructure.com>
#
# Copyright (C) 2013 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
# Put this in config/application.rb
require File.expand_path('../boot', __FILE__)
require_relative '../lib/canvas_yaml'
# Yes, it doesn't seem DRY to list these both in the if and else
# but this used to be "require 'rails/all'" which included sprockets.
# I needed to explicitly opt-out of sprockets but since I'm not sure
# about the other frameworks, I left this so it would be exactly the same
# as "require 'rails/all'" but without sprockets--even though it is a little
# different then the rails 3 else block. If the difference is not intended,
# they can be pulled out of the if/else
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
# require "sprockets/railtie" # Do not enable the Rails Asset Pipeline
require "rails/test_unit/railtie"
Bundler.require(*Rails.groups)
module CanvasRails
class Application < Rails::Application
$LOAD_PATH << config.root.to_s
config.encoding = 'utf-8'
require 'logging_filter'
config.filter_parameters.concat LoggingFilter.filtered_parameters
config.action_dispatch.rescue_responses['AuthenticationMethods::AccessTokenError'] = 401
config.action_dispatch.rescue_responses['AuthenticationMethods::AccessTokenScopeError'] = 401
config.action_dispatch.rescue_responses['AuthenticationMethods::LoggedOutError'] = 401
config.action_dispatch.rescue_responses['CanvasHttp::CircuitBreakerError'] = 502
config.action_dispatch.default_headers.delete('X-Frame-Options')
config.action_dispatch.default_headers['Referrer-Policy'] = 'no-referrer-when-downgrade'
config.action_controller.forgery_protection_origin_check = true
ActiveSupport.to_time_preserves_timezone = true
config.app_generators do |c|
c.test_framework :rspec
c.integration_tool :rspec
c.performance_tool :rspec
end
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Make Time.zone default to the specified zone, and make Active Record store time values
# in the database in UTC, and return them converted to the specified local zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Comment line to use default local time.
config.time_zone = 'UTC'
log_config = File.exist?(Rails.root + 'config/logging.yml') && Rails.application.config_for(:logging)
log_config = { 'logger' => 'rails', 'log_level' => 'debug' }.merge(log_config || {})
opts = {}
require 'canvas_logger'
config.log_level = log_config['log_level']
log_level = ActiveSupport::Logger.const_get(config.log_level.to_s.upcase)
opts[:skip_thread_context] = true if log_config['log_context'] == false
case log_config["logger"]
when "syslog"
require 'syslog_wrapper'
log_config["app_ident"] ||= "canvas-lms"
log_config["daemon_ident"] ||= "canvas-lms-daemon"
facilities = 0
(log_config["facilities"] || []).each do |facility|
facilities |= Syslog.const_get "LOG_#{facility.to_s.upcase}"
end
ident = ENV['RUNNING_AS_DAEMON'] == 'true' ? log_config["daemon_ident"] : log_config["app_ident"]
opts[:include_pid] = true if log_config["include_pid"] == true
config.logger = SyslogWrapper.new(ident, facilities, opts)
config.logger.level = log_level
else
log_path = config.paths['log'].first
if ENV['RUNNING_AS_DAEMON'] == 'true'
log_path = Rails.root+'log/delayed_job.log'
end
config.logger = CanvasLogger.new(log_path, log_level, opts)
end
# Activate observers that should always be running
config.active_record.observers = [:cacher, :stream_item_cache, :live_events_observer ]
config.active_record.allow_unsafe_raw_sql = :disabled
config.active_support.encode_big_decimal_as_string = false
config.paths['lib'].eager_load!
config.paths.add('app/middleware', eager_load: true, autoload_once: true)
# prevent directory->module inference in these directories from wreaking
# havoc on the app (e.g. stylesheets/base -> ::Base)
config.eager_load_paths -= %W(#{Rails.root}/app/coffeescripts
#{Rails.root}/app/stylesheets)
config.middleware.use Rack::Chunked
config.middleware.use Rack::Deflater, if: -> (*) {
::Canvas::DynamicSettings.find(tree: :private)["enable_rack_deflation"]
}
config.middleware.use Rack::Brotli, if: -> (*) {
::Canvas::DynamicSettings.find(tree: :private)["enable_rack_brotli"]
}
config.i18n.load_path << Rails.root.join('config', 'locales', 'locales.yml')
config.to_prepare do
require_dependency 'canvas/plugins/default_plugins'
ActiveSupport::JSON::Encoding.escape_html_entities_in_json = true
end
module PostgreSQLEarlyExtensions
def initialize(connection, logger, connection_parameters, config)
unless config.key?(:prepared_statements)
config = config.dup
config[:prepared_statements] = false
end
super(connection, logger, connection_parameters, config)
end
def connect
hosts = Array(@connection_parameters[:host]).presence || [nil]
hosts.each_with_index do |host, index|
begin
connection_parameters = @connection_parameters.dup
connection_parameters[:host] = host
@connection = PG::Connection.connect(connection_parameters)
configure_connection
raise "Canvas requires PostgreSQL 9.5 or newer" unless postgresql_version >= 90500
break
rescue ::PG::Error => error
if error.message.include?("does not exist")
raise ActiveRecord::NoDatabaseError.new(error.message)
elsif index == hosts.length - 1
raise
end
# else try next host
end
end
end
end
module TypeMapInitializerExtensions
def query_conditions_for_initial_load
known_type_names = @store.keys.map { |n| "'#{n}'" } + @store.keys.map { |n| "'_#{n}'" }
<<~SQL % [known_type_names.join(", "),]
WHERE
t.typname IN (%s)
SQL
end
end
Autoextend.hook(:"ActiveRecord::ConnectionAdapters::PostgreSQLAdapter",
PostgreSQLEarlyExtensions,
method: :prepend)
Autoextend.hook(:"ActiveRecord::ConnectionAdapters::PostgreSQL::OID::TypeMapInitializer",
TypeMapInitializerExtensions,
method: :prepend)
SafeYAML.singleton_class.send(:attr_accessor, :safe_parsing)
module SafeYAMLWithFlag
def load(*args)
previous, self.safe_parsing = safe_parsing, true
super
ensure
self.safe_parsing = previous
end
end
SafeYAML.singleton_class.prepend(SafeYAMLWithFlag)
Psych.add_domain_type("ruby/object", "Class") do |_type, val|
if SafeYAML.safe_parsing && !Canvas::Migration.valid_converter_classes.include?(val)
raise "Cannot load class #{val} from YAML"
end
val.constantize
end
module PatchThorWarning
# active_model_serializers should be passing `type: :boolean` here:
# https://github.com/rails-api/active_model_serializers/blob/v0.9.0.alpha1/lib/active_model/serializer/generators/serializer/scaffold_controller_generator.rb#L10
# but we don't really care about the warning, it only affects using the rails
# generator for a resource
#
# Easiest way to avoid the warning for now is to patch thor
def validate_default_type!
return if switch_name == "--serializer"
super
end
end
Autoextend.hook(:"Thor::Option", PatchThorWarning, method: :prepend)
# Extend any base classes, even gem classes
Dir.glob("#{Rails.root}/lib/ext/**/*.rb").each { |file| require file }
# tell Rails to use the native XML parser instead of REXML
ActiveSupport::XmlMini.backend = 'Nokogiri'
class NotImplemented < StandardError; end
if defined?(PhusionPassenger)
PhusionPassenger.on_event(:after_installing_signal_handlers) do
Canvas::Reloader.trap_signal
end
PhusionPassenger.on_event(:starting_worker_process) do |forked|
if forked
# We're in smart spawning mode.
# Reset imperium because it's possible to accidentally share an open http
# socket between processes shortly after fork.
Imperium::Agent.reset_default_client
Imperium::Catalog.reset_default_client
Imperium::Client.reset_default_client
Imperium::Events.reset_default_client
Imperium::KV.reset_default_client
# it's really important to reset the default clients
# BEFORE letting dynamic setting pull a new one.
# do not change this order.
Canvas::DynamicSettings.on_fork!
else
# We're in direct spawning mode. We don't need to do anything.
end
end
else
config.to_prepare do
Canvas::Reloader.trap_signal
end
end
# Ensure that the automatic redis reconnection on fork works
# This is the default in redis-rb, but for some reason rails overrides it
# See e.g. https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22704
ActiveSupport::Cache::RedisCacheStore::DEFAULT_REDIS_OPTIONS[:reconnect_attempts] = 1
# don't wrap fields with errors with a <div class="fieldWithErrors" />,
# since that could leak information (e.g. valid vs invalid username on
# login page)
config.action_view.field_error_proc = Proc.new { |html_tag, instance| html_tag }
class ExceptionsApp
def call(env)
req = ActionDispatch::Request.new(env)
res = ApplicationController.make_response!(req)
ApplicationController.dispatch('rescue_action_dispatch_exception', req, res)
end
end
config.exceptions_app = ExceptionsApp.new
config.before_initialize do
config.action_controller.asset_host = Canvas::Cdn.method(:asset_host_for)
end
if config.action_dispatch.rack_cache != false
config.action_dispatch.rack_cache[:ignore_headers] =
%w[Set-Cookie X-Request-Context-Id X-Canvas-User-Id X-Canvas-Meta]
end
def validate_secret_key_config!
# no validation; we don't use Rails' CookieStore session middleware, so we
# don't care about secret_key_base
end
initializer "canvas.init_dynamic_settings", before: "canvas.extend_shard" do
settings = ConfigFile.load("consul")
if settings.present?
begin
Canvas::DynamicSettings.config = settings
rescue Imperium::UnableToConnectError
Rails.logger.warn("INITIALIZATION: can't reach consul, attempts to load DynamicSettings will fail")
end
end
end
initializer "canvas.extend_shard", before: "active_record.initialize_database" do
# have to do this before the default shard loads
Switchman::Shard.serialize :settings, Hash
Switchman.cache = -> { MultiCache.cache }
end
# Newer rails has this in rails proper
attr_writer :credentials
initializer "canvas.init_credentials", before: "active_record.initialize_database" do
self.credentials = Canvas::Credentials.new(credentials)
end
# we don't know what middleware to make SessionsTimeout follow until after
# we've loaded config/initializers/session_store.rb
initializer("extend_middleware_stack", after: :load_config_initializers) do |app|
app.config.middleware.insert_before(config.session_store, LoadAccount)
app.config.middleware.swap(ActionDispatch::RequestId, RequestContextGenerator)
app.config.middleware.insert_after(config.session_store, RequestContextSession)
app.config.middleware.insert_before(Rack::Head, RequestThrottle)
app.config.middleware.insert_before(Rack::MethodOverride, PreventNonMultipartParse)
end
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Timeoverflow
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.i18n.default_locale = :es
config.i18n.available_locales = [:es, :ca, :eu, :en, :'pt-BR']
config.i18n.fallbacks = true
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
# This tells Rails to serve error pages from the app itself, rather than using static error pages in public/
config.exceptions_app = self.routes
# Activate the Skylight agent in staging. You need to provision the
# SKYLIGHT_AUTHENTICATION env var for this to work.
config.skylight.environments += ["staging"]
# ActiveJob configuration
config.active_job.queue_adapter = :sidekiq
end
end
add GL to menu
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Timeoverflow
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.i18n.default_locale = :es
config.i18n.available_locales = [:es, :ca, :eu, :gl, :en, :'pt-BR']
config.i18n.fallbacks = true
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
# This tells Rails to serve error pages from the app itself, rather than using static error pages in public/
config.exceptions_app = self.routes
# Activate the Skylight agent in staging. You need to provision the
# SKYLIGHT_AUTHENTICATION env var for this to work.
config.skylight.environments += ["staging"]
# ActiveJob configuration
config.active_job.queue_adapter = :sidekiq
end
end
|
require File.expand_path('boot', __dir__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
Dotenv::Railtie.load
module TPS
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Paris'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = :fr
config.i18n.load_path += Dir[Rails.root.join('config', 'locales', '**', '*.{rb,yml}')]
config.i18n.available_locales = [:fr]
config.paths.add "#{config.root}/lib", eager_load: true
config.paths.add "#{config.root}/app/controllers/concerns", eager_load: true
config.assets.paths << Rails.root.join('app', 'assets', 'javascript')
config.assets.paths << Rails.root.join('app', 'assets', 'fonts')
config.assets.precompile += ['.woff']
config.active_job.queue_adapter = :delayed_job
config.action_view.sanitized_allowed_tags = ActionView::Base.sanitized_allowed_tags + ['u']
config.to_prepare do
# Make main application helpers available in administrate
Administrate::ApplicationController.helper(TPS::Application.helpers)
end
config.middleware.use Rack::Attack
config.middleware.use Flipper::Middleware::Memoizer, preload_all: true
config.ds_weekly_overview = ENV['APP_NAME'] == 'tps'
config.ds_autosave = {
debounce_delay: 3000,
status_visible_duration: 6000
}
config.skylight.probes += [:graphql]
end
end
config: never cache rails-generated pages
This instruct browsers to never cache content directly generated by the
controllers. This includes HTML pages, JSON responses, PDF files, etc.
This is because Some mobile browsers have a behaviour where, although
they will delete the session cookie when the browser shutdowns, they
will still serve a cached version of the page on relaunch.
The CSRF token in the HTML is then mismatched with the CSRF token in the
session cookie (because the session cookie has been cleared). This
causes form submissions to fail with an
"ActionController::InvalidAuthenticityToken" exception.
To prevent this, tell browsers to never cache the HTML of a page.
(This doesn’t affect assets files, which are still sent with the proper
cache headers).
See https://github.com/rails/rails/issues/21948
require File.expand_path('boot', __dir__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
Dotenv::Railtie.load
module TPS
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Paris'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = :fr
config.i18n.load_path += Dir[Rails.root.join('config', 'locales', '**', '*.{rb,yml}')]
config.i18n.available_locales = [:fr]
config.paths.add "#{config.root}/lib", eager_load: true
config.paths.add "#{config.root}/app/controllers/concerns", eager_load: true
config.assets.paths << Rails.root.join('app', 'assets', 'javascript')
config.assets.paths << Rails.root.join('app', 'assets', 'fonts')
config.assets.precompile += ['.woff']
config.active_job.queue_adapter = :delayed_job
config.action_view.sanitized_allowed_tags = ActionView::Base.sanitized_allowed_tags + ['u']
# Some mobile browsers have a behaviour where, although they will delete the session
# cookie when the browser shutdowns, they will still serve a cached version
# of the page on relaunch.
# The CSRF token in the HTML is then mismatched with the CSRF token in the session cookie
# (because the session cookie has been cleared). This causes form submissions to fail with
# a "ActionController::InvalidAuthenticityToken" exception.
# To prevent this, tell browsers to never cache the HTML of a page.
# (This doesn’t affect assets files, which are still sent with the proper cache headers).
#
# See https://github.com/rails/rails/issues/21948
config.action_dispatch.default_headers['Cache-Control'] = 'no-store, no-cache'
config.to_prepare do
# Make main application helpers available in administrate
Administrate::ApplicationController.helper(TPS::Application.helpers)
end
config.middleware.use Rack::Attack
config.middleware.use Flipper::Middleware::Memoizer, preload_all: true
config.ds_weekly_overview = ENV['APP_NAME'] == 'tps'
config.ds_autosave = {
debounce_delay: 3000,
status_visible_duration: 6000
}
config.skylight.probes += [:graphql]
end
end
|
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "active_resource/railtie"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module SAPI
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
end
end
disabled asset pipeline
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "active_resource/railtie"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module SAPI
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = false
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
##
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module SafeHavnApp
class Application < Rails::Application
config.paperclip_defaults = {
:storage => :s3,
:s3_credentials => {
:bucket => ENV["s3_bucket"],
:access_key_id => ENV["s3_access_key_id"],
:secret_access_key => ENV["s3_secret_access_key"],
s3_host_name: "s3-#{ENV['s3_region']}.amazonaws.com",
:s3_region => ENV["s3_region"],
:url => ":s3_host_name"
}
}
config.active_record.raise_in_transactional_callbacks = true
end
end
fix aws?
require File.expand_path('../boot', __FILE__)
require 'rails/all'
##
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module SafeHavnApp
class Application < Rails::Application
config.paperclip_defaults = {
:storage => :s3,
:s3_credentials => {
:bucket => ENV["s3_bucket"],
:access_key_id => ENV["s3_access_key_id"],
:secret_access_key => ENV["s3_secret_access_key"],
s3_host_name: "s3.amazonaws.com",
:s3_region => ENV["s3_region"],
:url => ":s3_host_name"
}
}
config.active_record.raise_in_transactional_callbacks = true
end
end
|
module AbstractImporter
VERSION = "1.4.0"
end
[skip] Bumped version to 1.4.1 (1m)
module AbstractImporter
VERSION = "1.4.1"
end
|
# -*- encoding: utf-8 -*-
require "acme/smileage/matcher"
require "acme/smileage/discography/albums"
require "acme/smileage/discography/tracks"
module Acme
class Smileage
class Discography
def initialize
album Acme::Smileage::Discography::Albums::Amanojaku
album Acme::Smileage::Discography::Albums::AsuhaDateNanoniImasuguKoeGaKikitai
album Acme::Smileage::Discography::Albums::Sukichan
album Acme::Smileage::Discography::Albums::OtonaNiNarutteMuzukashii
album Acme::Smileage::Discography::Albums::Yumemiru15sai
album Acme::Smileage::Discography::Albums::Yumemiru15saiPaxJaponicaGrooveRemix
album Acme::Smileage::Discography::Albums::GambaranakutemoEenende
album Acme::Smileage::Discography::Albums::GambaranakutemoEenendeTopnudeRemixVersion01
album Acme::Smileage::Discography::Albums::OnajiJikyuuDeHatarakuTomodachiNoBijinMama
album Acme::Smileage::Discography::Albums::OnajiJikyuuDeHatarakuTomodachiNoBijinMamaRemixType1
album Acme::Smileage::Discography::Albums::WaruGaki1
album Acme::Smileage::Discography::Albums::Shortcut
album Acme::Smileage::Discography::Albums::KoiNiBooingBoo
album Acme::Smileage::Discography::Albums::UchoutenLove
album Acme::Smileage::Discography::Albums::ShortcutRemixver
album Acme::Smileage::Discography::Albums::UchoutenLoverocketmanMix
album Acme::Smileage::Discography::Albums::Tachiagirl
album Acme::Smileage::Discography::Albums::PleaseMinisukaPostWoman
album Acme::Smileage::Discography::Albums::ChotoMateKudasai
album Acme::Smileage::Discography::Albums::Dotbikini
album Acme::Smileage::Discography::Albums::SmleageBestAlbumComplete1
album Acme::Smileage::Discography::Albums::SukiyoJunjouHankouki
album Acme::Smileage::Discography::Albums::Samuine
album Acme::Smileage::Discography::Albums::TabidachiNoHaruGaKita
album Acme::Smileage::Discography::Albums::SmileSensation
album Acme::Smileage::Discography::Albums::AtarashiiWatashiniNare_Yattaruchan
album Acme::Smileage::Discography::Albums::Eeka_Iiyatsu
album Acme::Smileage::Discography::Albums::MysteryNight_EighteenEmotion
initialize_tracks
end
def initialize_tracks
@tracks = @albums.sort_by {|e| e.release_date }.map{|e| e.tracks }.flatten.uniq
end
def discography
@albums.dup
end
def tracks
@tracks.dup
end
def find_track(name)
find("Track", name, @tracks)
end
def find_discography(name)
find("Discography", name, @albums)
end
private
def find(label, name, targets)
matcher = Matcher.new(name.encode("UTF-8"))
canon_name = matcher.match(targets.map {|e| names(e) }.flatten)
raise ArgumentError, "#{label} not found: #{name}" unless canon_name
targets.find {|e| names(e).include?(canon_name) }
end
def names(obj)
[obj.name, obj.name_romaji] | obj.nicknames
end
def album(album_class)
@albums ||= []
@albums << album_class.instance
end
end
end
end
initialize_tracks を private に変更
# -*- encoding: utf-8 -*-
require "acme/smileage/matcher"
require "acme/smileage/discography/albums"
require "acme/smileage/discography/tracks"
module Acme
class Smileage
class Discography
def initialize
album Acme::Smileage::Discography::Albums::Amanojaku
album Acme::Smileage::Discography::Albums::AsuhaDateNanoniImasuguKoeGaKikitai
album Acme::Smileage::Discography::Albums::Sukichan
album Acme::Smileage::Discography::Albums::OtonaNiNarutteMuzukashii
album Acme::Smileage::Discography::Albums::Yumemiru15sai
album Acme::Smileage::Discography::Albums::Yumemiru15saiPaxJaponicaGrooveRemix
album Acme::Smileage::Discography::Albums::GambaranakutemoEenende
album Acme::Smileage::Discography::Albums::GambaranakutemoEenendeTopnudeRemixVersion01
album Acme::Smileage::Discography::Albums::OnajiJikyuuDeHatarakuTomodachiNoBijinMama
album Acme::Smileage::Discography::Albums::OnajiJikyuuDeHatarakuTomodachiNoBijinMamaRemixType1
album Acme::Smileage::Discography::Albums::WaruGaki1
album Acme::Smileage::Discography::Albums::Shortcut
album Acme::Smileage::Discography::Albums::KoiNiBooingBoo
album Acme::Smileage::Discography::Albums::UchoutenLove
album Acme::Smileage::Discography::Albums::ShortcutRemixver
album Acme::Smileage::Discography::Albums::UchoutenLoverocketmanMix
album Acme::Smileage::Discography::Albums::Tachiagirl
album Acme::Smileage::Discography::Albums::PleaseMinisukaPostWoman
album Acme::Smileage::Discography::Albums::ChotoMateKudasai
album Acme::Smileage::Discography::Albums::Dotbikini
album Acme::Smileage::Discography::Albums::SmleageBestAlbumComplete1
album Acme::Smileage::Discography::Albums::SukiyoJunjouHankouki
album Acme::Smileage::Discography::Albums::Samuine
album Acme::Smileage::Discography::Albums::TabidachiNoHaruGaKita
album Acme::Smileage::Discography::Albums::SmileSensation
album Acme::Smileage::Discography::Albums::AtarashiiWatashiniNare_Yattaruchan
album Acme::Smileage::Discography::Albums::Eeka_Iiyatsu
album Acme::Smileage::Discography::Albums::MysteryNight_EighteenEmotion
initialize_tracks
end
def discography
@albums.dup
end
def tracks
@tracks.dup
end
def find_track(name)
find("Track", name, @tracks)
end
def find_discography(name)
find("Discography", name, @albums)
end
private
def find(label, name, targets)
matcher = Matcher.new(name.encode("UTF-8"))
canon_name = matcher.match(targets.map {|e| names(e) }.flatten)
raise ArgumentError, "#{label} not found: #{name}" unless canon_name
targets.find {|e| names(e).include?(canon_name) }
end
def names(obj)
[obj.name, obj.name_romaji] | obj.nicknames
end
def initialize_tracks
@tracks = @albums.sort_by {|e| e.release_date }.map{|e| e.tracks }.flatten.uniq
end
def album(album_class)
@albums ||= []
@albums << album_class.instance
end
end
end
end
|
require "active_support/concern"
module ActiveRecord
module Slave
module Model
extend ActiveSupport::Concern
included do |model|
private_class_method :generate_class
model.singleton_class.class_eval do
include SingletonClassMethods
alias_method_chain :connection, :slave
end
end
module SingletonClassMethods
def connection_with_slave
if @slave_mode
@class_repository.fetch(@replication_router.slave_connection_name).connection
else
connection_without_slave
end
end
end
module ClassMethods
def use_slave(replication_name)
@enable_slave = true
@slave_mode = false
replication_config = ActiveRecord::Slave.config.fetch_replication_config replication_name
@replication_router = ActiveRecord::Slave::ReplicationRouter.new replication_config
@class_repository = {}
base_class = self
connection_name = replication_config.master_connection_name
establish_connection(connection_name)
replication_config.slave_connection_names.keys.each do |slave_connection_name|
@class_repository[slave_connection_name] = generate_class(base_class, slave_connection_name)
end
end
def slave_for
@class_repository.fetch(@replication_router.slave_connection_name)
end
def generate_class(base_class, connection_name)
model = Class.new(base_class) do
module_eval <<-RUBY, __FILE__, __LINE__ + 1
def self.name
"#{base_class.name}::Slave::#{connection_name}"
end
RUBY
end
model.class_eval { establish_connection(connection_name) }
model
end
end
end
end
end
Fixup commit
require "active_support/concern"
module ActiveRecord
module Slave
module Model
extend ActiveSupport::Concern
included do |model|
private_class_method :generate_class
model.singleton_class.class_eval do
include SingletonClassMethods
alias_method_chain :connection, :slave
end
end
module SingletonClassMethods
def connection_with_slave
if @slave_mode
@class_repository.fetch(@replication_router.slave_connection_name).connection
else
connection_without_slave
end
end
end
module ClassMethods
def use_slave(replication_name)
replication_config = ActiveRecord::Slave.config.fetch_replication_config replication_name
@replication_router = ActiveRecord::Slave::ReplicationRouter.new replication_config
@class_repository = {}
base_class = self
connection_name = replication_config.master_connection_name
establish_connection(connection_name)
replication_config.slave_connection_names.keys.each do |slave_connection_name|
@class_repository[slave_connection_name] = generate_class(base_class, slave_connection_name)
end
end
def slave_for
@class_repository.fetch(@replication_router.slave_connection_name)
end
def generate_class(base_class, connection_name)
model = Class.new(base_class) do
module_eval <<-RUBY, __FILE__, __LINE__ + 1
def self.name
"#{base_class.name}::Slave::#{connection_name}"
end
RUBY
end
model.class_eval { establish_connection(connection_name) }
model
end
end
end
end
end
|
# Copyright (c) 2009 Rick Olson
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
module ActiveRecord
module Transitions
extend ActiveSupport::Concern
included do
include ::Transitions
before_validation :set_initial_state
validates_presence_of :state
end
protected
def write_state(state_machine, state)
update_attribute(:state, state.to_s)
end
def read_state(state_machine)
self.state.to_sym
end
def set_initial_state
self.state ||= self.class.state_machine.initial_state.to_s
end
end
end
state inclusion validator added (now there is no possibility to arbitrary set state to value different, than state list
# Copyright (c) 2009 Rick Olson
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
module ActiveRecord
module Transitions
extend ActiveSupport::Concern
included do
include ::Transitions
before_validation :set_initial_state
validates_presence_of :state
validate :state_inclusion
end
protected
def write_state(state_machine, state)
update_attribute(:state, state.to_s)
end
def read_state(state_machine)
self.state.to_sym
end
def set_initial_state
self.state ||= self.class.state_machine.initial_state.to_s
end
def state_inclusion
unless self.class.state_machine.states.map{|s| s.name.to_s }.include?(self.state.to_s)
self.errors.add(:state, :inclusion, :value => self.state)
end
end
end
end
|
# -*- encoding : utf-8 -*-
module ActiveRecordNot
VERSION = "1.0.1"
end
bump version
# -*- encoding : utf-8 -*-
module ActiveRecordNot
VERSION = "1.0.2"
end |
class ActiveScaffold::Tableless < ActiveRecord::Base # rubocop:disable Rails/ApplicationRecord
if Rails.version >= '6.0'
class_attribute :implicit_order_column, instance_accessor: false
end
class AssociationScope < ActiveRecord::Associations::AssociationScope
INSTANCE = create
def self.scope(association, connection)
INSTANCE.scope association, connection
end
if Rails.version < '5.0.0'
def column_for(table_name, column_name, alias_tracker = nil)
klass = alias_tracker ? alias_tracker.connection.klass : self.klass
if table_name == klass.table_name
klass.columns_hash[column_name]
elsif alias_tracker && (klass = alias_tracker.instance_variable_get(:@assoc_klass))
klass.columns_hash[column_name]
else # rails < 4.1
association.klass.columns_hash[column_name]
end
end
def add_constraints(scope, owner, assoc_klass, refl, tracker)
tracker.instance_variable_set(:@assoc_klass, assoc_klass)
super
end
end
end
class Connection < ActiveRecord::ConnectionAdapters::AbstractAdapter
attr_reader :klass
def initialize(klass, *args)
super(nil, *args)
@klass = klass
end
def columns(table_name)
klass.columns
end
end
class Column < ActiveRecord::ConnectionAdapters::Column
if Rails.version >= '5.0.0'
def initialize(name, default, sql_type = nil, null = true)
metadata = ActiveRecord::Base.connection.send :fetch_type_metadata, sql_type
super(name, default, metadata, null)
end
else
def initialize(name, default, sql_type = nil, null = true)
cast_type = ActiveRecord::Base.connection.send :lookup_cast_type, sql_type
super(name, default, cast_type, sql_type, null)
end
end
end
module Tableless
if Rails.version < '5.2.0'
def skip_statement_cache?
klass < ActiveScaffold::Tableless ? true : super
end
def association_scope
@association_scope ||= AssociationScope.scope(self, klass.connection) if klass < ActiveScaffold::Tableless
super
end
else
def skip_statement_cache?(scope)
klass < ActiveScaffold::Tableless ? true : super
end
end
def target_scope
super.tap do |scope|
if klass < ActiveScaffold::Tableless
class << scope; include RelationExtension; end
end
end
end
end
module Association
def self.included(base)
base.prepend Tableless
end
end
module TablelessCollectionAssociation
def get_records # rubocop:disable Naming/AccessorMethodName
klass < ActiveScaffold::Tableless ? scope.to_a : super
end
end
module CollectionAssociation
def self.included(base)
base.prepend TablelessCollectionAssociation
end
end
module TablelessSingularAssociation
def get_records # rubocop:disable Naming/AccessorMethodName
klass < ActiveScaffold::Tableless ? scope.limit(1).to_a : super
end
end
module SingularAssociation
def self.included(base)
base.prepend TablelessSingularAssociation
end
end
module RelationExtension
attr_reader :conditions
def initialize(klass, *)
super
@conditions ||= []
end
def initialize_copy(other)
@conditions = @conditions&.dup || []
super
end
def where(opts, *rest)
if opts.present?
opts = opts.with_indifferent_access if opts.is_a? Hash
@conditions << (rest.empty? ? opts : [opts, *rest])
end
self
end
def merge(rel)
super.tap do |merged|
merged.conditions.concat rel.conditions unless rel.nil? || rel.is_a?(Array)
end
end
def except(*skips)
super.tap do |new_relation|
new_relation.conditions = conditions unless skips.include? :where
end
end
def to_a
@klass.find_all(self)
end
def find_one(id)
@klass.find_one(id, self) || raise(ActiveRecord::RecordNotFound)
end
def execute_simple_calculation(operation, column_name, distinct)
@klass.execute_simple_calculation(self, operation, column_name, distinct)
end
def implicit_order_column
@klass.implicit_order_column
end
end
class Relation < ::ActiveRecord::Relation
include RelationExtension
end
class << self
private
def relation
args = [self]
if Rails.version < '5.2.0'
args << arel_table
args << predicate_builder if Rails.version >= '5.0.0'
end
ActiveScaffold::Tableless::Relation.new(*args)
end
if Rails.version >= '5.2'
def cached_find_by_statement(key, &block)
StatementCache.new(key, self, &block)
end
end
end
class StatementCache
def initialize(key, model = nil)
@key = key
@model = model
end
if Rails.version < '5.2' # 5.0 and 5.1
def execute(values, model, connection)
model.where(@key => values)
end
else
def execute(values, connection)
@model.where(@key => values)
end
end
end
def self.columns_hash
if self < ActiveScaffold::Tableless
@columns_hash ||= Hash[columns.map { |c| [c.name, c] }]
else
super
end
end
if Rails.version < '5.0' # 4.2.x
def self.initialize_find_by_cache
self.find_by_statement_cache = Hash.new { |h, k| h[k] = StatementCache.new(k) } # rubocop:disable Rails/DynamicFindBy
end
elsif Rails.version < '5.2' # 5.0 and 5.1
def self.initialize_find_by_cache
@find_by_statement_cache = {
true => Hash.new { |h, k| h[k] = StatementCache.new(k) },
false => Hash.new { |h, k| h[k] = StatementCache.new(k) }
}
end
end
def self.columns
@tableless_columns ||= []
end
def self.table_name
@table_name ||= ActiveModel::Naming.plural(self)
end
def self.table_exists?
true
end
self.abstract_class = true
def self.connection
@connection ||= Connection.new(self)
end
def self.column(name, sql_type = nil, options = {})
column = Column.new(name.to_s, options[:default], sql_type.to_s, options.key?(:null) ? options[:null] : true)
column.tap { columns << column }
end
def self.find_all(relation)
raise 'self.find_all must be implemented in a Tableless model'
end
def self.find_one(id, relation)
raise 'self.find_one must be implemented in a Tableless model'
end
def self.execute_simple_calculation(relation, operation, column_name, distinct)
unless operation == 'count' && [relation.klass.primary_key, :all].include?(column_name)
raise "self.execute_simple_calculation must be implemented in a Tableless model to support #{operation} #{column_name}#{' distinct' if distinct} columns"
end
find_all(relation).size
end
def destroy
raise 'destroy must be implemented in a Tableless model'
end
def _create_record #:nodoc:
run_callbacks(:create) {}
end
def _update_record(*) #:nodoc:
run_callbacks(:update) {}
end
end
remove unneeded line, class attribute is defined as tableless inherits from ActiveRecord::Base
class ActiveScaffold::Tableless < ActiveRecord::Base # rubocop:disable Rails/ApplicationRecord
class AssociationScope < ActiveRecord::Associations::AssociationScope
INSTANCE = create
def self.scope(association, connection)
INSTANCE.scope association, connection
end
if Rails.version < '5.0.0'
def column_for(table_name, column_name, alias_tracker = nil)
klass = alias_tracker ? alias_tracker.connection.klass : self.klass
if table_name == klass.table_name
klass.columns_hash[column_name]
elsif alias_tracker && (klass = alias_tracker.instance_variable_get(:@assoc_klass))
klass.columns_hash[column_name]
else # rails < 4.1
association.klass.columns_hash[column_name]
end
end
def add_constraints(scope, owner, assoc_klass, refl, tracker)
tracker.instance_variable_set(:@assoc_klass, assoc_klass)
super
end
end
end
class Connection < ActiveRecord::ConnectionAdapters::AbstractAdapter
attr_reader :klass
def initialize(klass, *args)
super(nil, *args)
@klass = klass
end
def columns(table_name)
klass.columns
end
end
class Column < ActiveRecord::ConnectionAdapters::Column
if Rails.version >= '5.0.0'
def initialize(name, default, sql_type = nil, null = true)
metadata = ActiveRecord::Base.connection.send :fetch_type_metadata, sql_type
super(name, default, metadata, null)
end
else
def initialize(name, default, sql_type = nil, null = true)
cast_type = ActiveRecord::Base.connection.send :lookup_cast_type, sql_type
super(name, default, cast_type, sql_type, null)
end
end
end
module Tableless
if Rails.version < '5.2.0'
def skip_statement_cache?
klass < ActiveScaffold::Tableless ? true : super
end
def association_scope
@association_scope ||= AssociationScope.scope(self, klass.connection) if klass < ActiveScaffold::Tableless
super
end
else
def skip_statement_cache?(scope)
klass < ActiveScaffold::Tableless ? true : super
end
end
def target_scope
super.tap do |scope|
if klass < ActiveScaffold::Tableless
class << scope; include RelationExtension; end
end
end
end
end
module Association
def self.included(base)
base.prepend Tableless
end
end
module TablelessCollectionAssociation
def get_records # rubocop:disable Naming/AccessorMethodName
klass < ActiveScaffold::Tableless ? scope.to_a : super
end
end
module CollectionAssociation
def self.included(base)
base.prepend TablelessCollectionAssociation
end
end
module TablelessSingularAssociation
def get_records # rubocop:disable Naming/AccessorMethodName
klass < ActiveScaffold::Tableless ? scope.limit(1).to_a : super
end
end
module SingularAssociation
def self.included(base)
base.prepend TablelessSingularAssociation
end
end
module RelationExtension
attr_reader :conditions
def initialize(klass, *)
super
@conditions ||= []
end
def initialize_copy(other)
@conditions = @conditions&.dup || []
super
end
def where(opts, *rest)
if opts.present?
opts = opts.with_indifferent_access if opts.is_a? Hash
@conditions << (rest.empty? ? opts : [opts, *rest])
end
self
end
def merge(rel)
super.tap do |merged|
merged.conditions.concat rel.conditions unless rel.nil? || rel.is_a?(Array)
end
end
def except(*skips)
super.tap do |new_relation|
new_relation.conditions = conditions unless skips.include? :where
end
end
def to_a
@klass.find_all(self)
end
def find_one(id)
@klass.find_one(id, self) || raise(ActiveRecord::RecordNotFound)
end
def execute_simple_calculation(operation, column_name, distinct)
@klass.execute_simple_calculation(self, operation, column_name, distinct)
end
def implicit_order_column
@klass.implicit_order_column
end
end
class Relation < ::ActiveRecord::Relation
include RelationExtension
end
class << self
private
def relation
args = [self]
if Rails.version < '5.2.0'
args << arel_table
args << predicate_builder if Rails.version >= '5.0.0'
end
ActiveScaffold::Tableless::Relation.new(*args)
end
if Rails.version >= '5.2'
def cached_find_by_statement(key, &block)
StatementCache.new(key, self, &block)
end
end
end
class StatementCache
def initialize(key, model = nil)
@key = key
@model = model
end
if Rails.version < '5.2' # 5.0 and 5.1
def execute(values, model, connection)
model.where(@key => values)
end
else
def execute(values, connection)
@model.where(@key => values)
end
end
end
def self.columns_hash
if self < ActiveScaffold::Tableless
@columns_hash ||= Hash[columns.map { |c| [c.name, c] }]
else
super
end
end
if Rails.version < '5.0' # 4.2.x
def self.initialize_find_by_cache
self.find_by_statement_cache = Hash.new { |h, k| h[k] = StatementCache.new(k) } # rubocop:disable Rails/DynamicFindBy
end
elsif Rails.version < '5.2' # 5.0 and 5.1
def self.initialize_find_by_cache
@find_by_statement_cache = {
true => Hash.new { |h, k| h[k] = StatementCache.new(k) },
false => Hash.new { |h, k| h[k] = StatementCache.new(k) }
}
end
end
def self.columns
@tableless_columns ||= []
end
def self.table_name
@table_name ||= ActiveModel::Naming.plural(self)
end
def self.table_exists?
true
end
self.abstract_class = true
def self.connection
@connection ||= Connection.new(self)
end
def self.column(name, sql_type = nil, options = {})
column = Column.new(name.to_s, options[:default], sql_type.to_s, options.key?(:null) ? options[:null] : true)
column.tap { columns << column }
end
def self.find_all(relation)
raise 'self.find_all must be implemented in a Tableless model'
end
def self.find_one(id, relation)
raise 'self.find_one must be implemented in a Tableless model'
end
def self.execute_simple_calculation(relation, operation, column_name, distinct)
unless operation == 'count' && [relation.klass.primary_key, :all].include?(column_name)
raise "self.execute_simple_calculation must be implemented in a Tableless model to support #{operation} #{column_name}#{' distinct' if distinct} columns"
end
find_all(relation).size
end
def destroy
raise 'destroy must be implemented in a Tableless model'
end
def _create_record #:nodoc:
run_callbacks(:create) {}
end
def _update_record(*) #:nodoc:
run_callbacks(:update) {}
end
end
|
module ActsAsArchive
module Migration
def self.included(base)
unless base.included_modules.include?(InstanceMethods)
base.send :extend, ClassMethods
base.class_eval do
class <<self
alias_method :method_missing_without_archive, :method_missing
alias_method :method_missing, :method_missing_with_archive
end
end
end
end
module ClassMethods
def method_missing_with_archive(method, *arguments, &block)
args = Marshal.load(Marshal.dump(arguments))
method_missing_without_archive(method, *arguments, &block)
supported = [
:add_column, :add_timestamps, :change_column,
:change_column_default, :change_table,
:drop_table, :remove_column, :remove_columns,
:remove_timestamps, :rename_column, :rename_table
]
if args.include?(:deleted_at) || args.include?('deleted_at')
# Don't change the archive's deleted_at column
return
end
if !args.empty? && supported.include?(method)
connection = ActiveRecord::Base.connection
args[0] = "archived_" + ActiveRecord::Migrator.proper_table_name(args[0])
if method == :rename_table
args[1] = "archived_" + args[1]
end
if connection.table_exists?(args[0])
connection.send(method, *args, &block)
end
end
end
end
module InstanceMethods
end
end
end
fix for rename_table with symbols as arguments
module ActsAsArchive
module Migration
def self.included(base)
unless base.included_modules.include?(InstanceMethods)
base.send :extend, ClassMethods
base.class_eval do
class <<self
alias_method :method_missing_without_archive, :method_missing
alias_method :method_missing, :method_missing_with_archive
end
end
end
end
module ClassMethods
def method_missing_with_archive(method, *arguments, &block)
args = Marshal.load(Marshal.dump(arguments))
method_missing_without_archive(method, *arguments, &block)
supported = [
:add_column, :add_timestamps, :change_column,
:change_column_default, :change_table,
:drop_table, :remove_column, :remove_columns,
:remove_timestamps, :rename_column, :rename_table
]
if args.include?(:deleted_at) || args.include?('deleted_at')
# Don't change the archive's deleted_at column
return
end
if !args.empty? && supported.include?(method)
connection = ActiveRecord::Base.connection
args[0] = "archived_" + ActiveRecord::Migrator.proper_table_name(args[0])
if method == :rename_table
args[1] = "archived_" + args[1].to_s
end
if connection.table_exists?(args[0])
connection.send(method, *args, &block)
end
end
end
end
module InstanceMethods
end
end
end
|
#!/usr/bin/env ruby
#
# = itunes
#
# Copyright 2016 Richard Lyon
# Distributed under the MIT license
#
require 'plist' # https://github.com/bleything/plist
class Album
attr_reader :artist
attr_reader :album
attr_accessor :grouping
attr_accessor :genre
attr_reader :album_hash
def initialize album_hash
@album_hash = album_hash
parse_album_hash
end
def parse_album_hash
track_id, track_hash = album_hash.first
@artist = track_hash["Artist"]
@album = track_hash["Album"]
@grouping = track_hash["Grouping"]
@genre = track_hash["Genre"]
end
def genre=( new_genre )
new_album_hash = {}
@album_hash.each do |track_id, track_hash|
track_hash["Genre"] = new_genre
new_album_hash[track_id] = track_hash
end
@album_hash = new_album_hash
parse_album_hash
# update iTunes. Somehow :(
end
def grouping=( new_grouping )
new_album_hash = {}
@album_hash.each do |track_id, track_hash|
track_hash["Grouping"] = new_grouping
new_album_hash[track_id] = track_hash
end
@album_hash = new_album_hash
parse_album_hash
# update iTunes. Somehow :(
end
end
class Itunes
TEST_ITUNES_PATH = '/Users/richlyon/Coding/Ruby/development/rjl_itunes/features/assets/test/iTunes Music Library.xml'
ITUNES_PATH = '/Users/richlyon/Music/iTunes/iTunes Music Library.xml'
LIVE = '/Users/richlyon/Music/iTunes LIVE DO NOT DELETE/iTunes Music Library.xml'
# attr_reader :album
attr_reader :albums
attr_reader :tracks_hash
attr_reader :itunes_plist
attr_reader :itunes_path
attr_reader :itunes_hash
def initialize( itunes_path = TEST_ITUNES_PATH )
@itunes_path = itunes_path
@itunes_hash = Plist::parse_xml( ITUNES_PATH )
@tracks_hash = get_audio_tracks
@albums = get_albums
end
def get_audio_tracks
tracks = []
audio_files_hash = @itunes_hash["Tracks"].reject { |key, hash| !audio_file?( hash )}
audio_files_hash.each do |track_id, track_hash|
tracks << track_hash
end
return audio_files_hash
end
def get_albums
titles = []
@tracks_hash.first(1).each do |album_id, album_hash|
titles << album_hash["Album"] if !titles.include? album_hash["Album"]
end
albums_list = []
titles.each do |title|
tracks = {}
@tracks_hash.each do |album_id, album_hash|
tracks[album_id] = album_hash if title == album_hash["Album"]
end
albums_list << Album.new(tracks)
end
return albums_list
end
def update_album( track_id, property, value )
# get all of the tracks in that album
tracks = get_related_tracks( track_id )
# For each track
tracks.each do |track_id, track_hash|
# change the property
@itunes_hash["Tracks"][track_id.to_s][property] = value
end
end
# Return all of the tracks in the same album as the given track
def get_related_tracks( track_id )
album_title = @itunes_hash["Tracks"][track_id.to_s]["Album"]
tracks = @itunes_hash["Tracks"].reject { |key, hash| hash["Album"] != album_title}
return tracks
end
# Return True if all tracks in the same album as the track have the same property
# Used to check all genres and groupings are the same
def same?( track_id, property )
tracks = get_related_tracks track_id
groupings = []
tracks.each do |track_id, hash|
groupings << hash["Grouping"]
end
return groupings.all? {|x| x == groupings[0]}
end
def save
File.open(@itunes_path, 'w') {|f| f.write( @itunes_hash.to_plist) }
end
def album ( album_id )
return @tracks_hash[album_id.to_s]
end
def valid?
return !@itunes_hash.nil?
end
def audio_file?( track_hash )
return track_hash["Kind"].include? "audio file"
end
def debug( message )
puts "="*100
puts message
puts "="*100
end
def niceprint( album_id )
properties = ["Artist", "Album", "Name", "Grouping", "Genre"]
puts "="*50
properties.each do |property|
puts "#{property}: #{@itunes_hash["Tracks"][album_id.to_s][property]}"
end
end
end
Clear out dead code
#!/usr/bin/env ruby
#
# = itunes
#
# Copyright 2016 Richard Lyon
# Distributed under the MIT license
#
require 'plist' # https://github.com/bleything/plist
class Album
attr_reader :artist
attr_reader :album
attr_accessor :grouping
attr_accessor :genre
attr_reader :album_hash
def initialize album_hash
@album_hash = album_hash
parse_album_hash
end
def parse_album_hash
track_id, track_hash = album_hash.first
@artist = track_hash["Artist"]
@album = track_hash["Album"]
@grouping = track_hash["Grouping"]
@genre = track_hash["Genre"]
end
def genre=( new_genre )
new_album_hash = {}
@album_hash.each do |track_id, track_hash|
track_hash["Genre"] = new_genre
new_album_hash[track_id] = track_hash
end
@album_hash = new_album_hash
parse_album_hash
# update iTunes. Somehow :(
end
def grouping=( new_grouping )
new_album_hash = {}
@album_hash.each do |track_id, track_hash|
track_hash["Grouping"] = new_grouping
new_album_hash[track_id] = track_hash
end
@album_hash = new_album_hash
parse_album_hash
# update iTunes. Somehow :(
end
end
class Itunes
TEST_ITUNES_PATH = '/Users/richlyon/Coding/Ruby/development/rjl_itunes/features/assets/test/iTunes Music Library.xml'
ITUNES_PATH = '/Users/richlyon/Music/iTunes/iTunes Music Library.xml'
LIVE = '/Users/richlyon/Music/iTunes LIVE DO NOT DELETE/iTunes Music Library.xml'
# attr_reader :album
attr_reader :albums
attr_reader :tracks_hash
attr_reader :itunes_plist
attr_reader :itunes_path
attr_reader :itunes_hash
def initialize( itunes_path = TEST_ITUNES_PATH )
@itunes_path = itunes_path
@itunes_hash = Plist::parse_xml( ITUNES_PATH )
@tracks_hash = get_audio_tracks
@albums = get_albums
end
def get_audio_tracks
tracks = []
audio_files_hash = @itunes_hash["Tracks"].reject { |key, hash| !audio_file?( hash )}
audio_files_hash.each do |track_id, track_hash|
tracks << track_hash
end
return audio_files_hash
end
def get_albums
titles = []
@tracks_hash.first(1).each do |album_id, album_hash|
titles << album_hash["Album"] if !titles.include? album_hash["Album"]
end
albums_list = []
titles.each do |title|
tracks = {}
@tracks_hash.each do |album_id, album_hash|
tracks[album_id] = album_hash if title == album_hash["Album"]
end
albums_list << Album.new(tracks)
end
return albums_list
end
def save
File.open(@itunes_path, 'w') {|f| f.write( @itunes_hash.to_plist) }
end
def valid?
return !@itunes_hash.nil?
end
def audio_file?( track_hash )
return track_hash["Kind"].include? "audio file"
end
def debug( message )
puts "="*100
puts message
puts "="*100
end
end
|
#!/usr/bin/env ruby
#
# = itunes
#
# Copyright 2016 Richard Lyon
# Distributed under the MIT license
#
require 'plist' # https://github.com/bleything/plist
class Itunes
attr_reader :albums # for debugging - likely won't require this
attr_reader :album
attr_reader :tracks_hash
attr_reader :itunes_plist
ITUNES_PATH = '/Users/richlyon/Music/iTunes/iTunes Music Library.xml'
LIVE = '/Users/richlyon/Music/iTunes LIVE DO NOT DELETE/iTunes Music Library.xml'
attr_reader :itunes_path
def initialize( itunes_path = ITUNES_PATH )
@itunes_path = itunes_path
@itunes_hash = Plist::parse_xml( ITUNES_PATH )
@tracks_hash = get_audio_tracks
@albums = get_albums
end
def save
File.open(@filepath, 'w') {|f| f.write( itunes.to_plist) }
end
def album ( album_id )
return @tracks_hash[album_id.to_s]
end
def get_audio_tracks
tracks = []
audio_files_hash = @itunes_hash["Tracks"].reject { |key, hash| !audio_file?( hash )}
audio_files_hash.each do |track_id, track_hash|
tracks << track_hash
end
return audio_files_hash
end
def valid?
return !@itunes_hash.nil?
end
def audio_file?( track_hash )
return track_hash["Kind"].include? "audio file"
end
def get_albums
album = {
"artist" => "ABBA",
"album" => "Gold"
}
return [album]
end
def get_first_album
return "ACDC"
end
end
Remove dead code
#!/usr/bin/env ruby
#
# = itunes
#
# Copyright 2016 Richard Lyon
# Distributed under the MIT license
#
require 'plist' # https://github.com/bleything/plist
class Itunes
attr_reader :album
attr_reader :tracks_hash
attr_reader :itunes_plist
ITUNES_PATH = '/Users/richlyon/Music/iTunes/iTunes Music Library.xml'
LIVE = '/Users/richlyon/Music/iTunes LIVE DO NOT DELETE/iTunes Music Library.xml'
attr_reader :itunes_path
def initialize( itunes_path = ITUNES_PATH )
@itunes_path = itunes_path
@itunes_hash = Plist::parse_xml( ITUNES_PATH )
@tracks_hash = get_audio_tracks
end
def save
File.open(@filepath, 'w') {|f| f.write( itunes.to_plist) }
end
def album ( album_id )
return @tracks_hash[album_id.to_s]
end
def get_audio_tracks
tracks = []
audio_files_hash = @itunes_hash["Tracks"].reject { |key, hash| !audio_file?( hash )}
audio_files_hash.each do |track_id, track_hash|
tracks << track_hash
end
return audio_files_hash
end
def valid?
return !@itunes_hash.nil?
end
def audio_file?( track_hash )
return track_hash["Kind"].include? "audio file"
end
def get_albums
album = {
"artist" => "ABBA",
"album" => "Gold"
}
return [album]
end
def get_first_album
return "ACDC"
end
end
|
class ApplicationGroupBuilder
attr_reader :application_group
attr_reader :save_list
def initialize(param, person_mapper)
@save_list = []
@is_update = true # we assume that this is a update existing application group workflow
@applicants_params = param[:applicants]
param = param.slice(:e_case_id, :submitted_at, :e_status_code, :application_type)
@person_mapper = person_mapper
@application_group = ApplicationGroup.where(e_case_id: param[:e_case_id]).first
if @application_group.nil?
@application_group = ApplicationGroup.new(param) #we create a new application group from the xml
@is_update = false # means this is a create
add_irsgroup({}) # we need a atleast 1 irsgroup hence adding a blank one
end
@application_group.updated_by = "curam_system_service"
get_household
end
def add_applicant(applicant_params)
# puts "applicant_params[:is_primary_applicant] #{applicant_params[:is_primary_applicant]}"
if @application_group.applicants.map(&:person_id).include? applicant_params[:person].id
# puts "Added already existing applicant"
applicant = @application_group.applicants.where(person_id: applicant_params[:person].id).first
else
# puts "Added a new applicant"
if applicant_params[:is_primary_applicant] == "true"
reset_exisiting_primary_applicant
end
applicant = @application_group.applicants.build(filter_applicant_params(applicant_params))
member = applicant.person.members.select do |m|
m.authority?
end.first
set_person_demographics(member, applicant_params[:person_demographics])
@save_list << member
@save_list << applicant
# puts "applicant_params[:is_primary_applicant] #{applicant_params[:is_primary_applicant]} @application_group.applicants #{applicant.inspect}"
end
applicant
end
def reset_exisiting_primary_applicant
@application_group.applicants.each do |applicant|
applicant.is_primary_applicant = false
end
end
def set_person_demographics(member, person_demographics_params)
member.dob = person_demographics_params["dob"] if person_demographics_params["dob"]
member.death_date = person_demographics_params["death_date"] if person_demographics_params["death_date"]
member.ssn = person_demographics_params["ssn"] if person_demographics_params["ssn"]
member.gender = person_demographics_params["gender"] if person_demographics_params["gender"]
member.ethnicity = person_demographics_params["ethnicity"] if person_demographics_params["ethnicity"]
member.race = person_demographics_params["race"] if person_demographics_params["race"]
member.marital_status = person_demographics_params["marital_status"] if person_demographics_params["marital_status"]
end
def filter_applicant_params(applicant_params)
applicant_params = applicant_params.slice(
:is_primary_applicant,
:is_coverage_applicant,
:person)
applicant_params.delete_if do |k, v|
v.nil?
end
end
def get_household
return @household if @household
if !@is_update
# puts "New Application Group Case"
@household = self.application_group.households.build #if new application group then create new household
@save_list << @household
elsif have_applicants_changed?
# puts "Update Application Group Case - Applicants have changed. Creating new household"
@household = self.application_group.households.build #if applicants have changed then create new household
@save_list << @household
else
# puts "Update Application Group Case. Using latest household."
#TODO to use .is_active household instead of .last
@household = self.application_group.households.last #if update and applicants haven't changed then use the latest household in use
end
# puts "return @household"
return @household
end
def have_applicants_changed?
current_list = @application_group.applicants.map do |applicant|
applicant.person_id
end.sort
new_list = @applicants_params.map do |applicants_param|
applicants_param[:person].id
end.sort
#puts current_list.inspect
#puts new_list.inspect
if current_list == new_list
return false
else
return true
end
end
def add_coverage_household
coverage_household = @household.coverage_households.build({submitted_at: Time.now})
@application_group.applicants.each do |applicant|
if applicant.is_coverage_applicant
coverage_household_member = coverage_household.coverage_household_members.build
coverage_household_member.applicant_id = applicant.id
end
end
end
def primary_applicant_employee_applicant
employee_applicant = @application_group.primary_applicant.employee_applicant
employee_applicant = @application_group.primary_applicant.employee_applicant.build unless employee_applicant
employee_applicant.employer = @application_group.primary_applicant.person.employer
end
def add_hbx_enrollment
# puts @application_group.primary_applicant
@application_group.primary_applicant.person.policies.each do |policy|
hbx_enrollement = @household.hbx_enrollments.build
hbx_enrollement.policy = policy
@application_group.primary_applicant.broker_id = Broker.find(policy.broker_id) unless policy.broker_id.blank?
#hbx_enrollement.employer = Employer.find(policy.employer_id) unless policy.employer_id.blank?
#hbx_enrollement.broker = Broker.find(policy.broker_id) unless policy.broker_id.blank?
#hbx_enrollement.primary_applicant = alpha_person
#hbx_enrollement.allocated_aptc_in_dollars = policy.allocated_aptc
hbx_enrollement.enrollment_group_id = policy.eg_id
hbx_enrollement.elected_aptc_in_dollars = policy.elected_aptc
hbx_enrollement.applied_aptc_in_dollars = policy.applied_aptc
hbx_enrollement.submitted_at = Time.now
hbx_enrollement.kind = "employer_sponsored" unless policy.employer_id.blank?
hbx_enrollement.kind = "unassisted_qhp" if (hbx_enrollement.applied_aptc_in_cents == 0 && policy.employer.blank?)
hbx_enrollement.kind = "insurance_assisted_qhp" if (hbx_enrollement.applied_aptc_in_cents > 0 && policy.employer.blank?)
policy.enrollees.each do |enrollee|
begin
person = Person.find_for_member_id(enrollee.m_id)
@application_group.applicants << Applicant.new(person: person) unless @application_group.person_is_applicant?(person)
applicant = @application_group.find_applicant_by_person(person)
hbx_enrollement_member = hbx_enrollement.hbx_enrollment_members.build({applicant: applicant,
premium_amount_in_cents: enrollee.pre_amt})
hbx_enrollement_member.is_subscriber = true if (enrollee.rel_code == "self")
rescue FloatDomainError
# puts "Error: invalid premium amount for enrollee: #{enrollee.inspect}"
next
end
end
end
end
def add_irsgroup(irs_group_params)
@application_group.irs_groups.build()
end
#TODO - method not implemented properly using .build(params)
def add_irsgroups(irs_groups_params)
irs_groups_params.map do |irs_group_params|
add_irsgroup(irs_group_params)
end
end
def add_tax_households(tax_households_params, eligibility_determinations_params)
tax_households_params.map do |tax_household_params|
tax_household = @household.tax_households.build(filter_tax_household_params(tax_household_params))
tax_household_params[:tax_household_members].map do |tax_household_member_params|
tax_household_member = tax_household.tax_household_members.build(filter_tax_household_member_params(tax_household_member_params))
person_uri = @person_mapper.alias_map[tax_household_member_params[:id]]
person_obj = @person_mapper.people_map[person_uri].first
new_applicant = get_applicant(person_obj)
new_applicant = verify_person_id(new_applicant)
tax_household_member.applicant_id = new_applicant.id
tax_household_member.applicant = new_applicant
end
end
eligibility_determinations_params.each do |eligibility_determination_params|
#TODO assuming only 1tax_household. needs to be corrected later
@household.tax_households.first.eligibility_determinations.build(eligibility_determination_params)
end
end
def verify_person_id(applicant)
if applicant.id.to_s.include? "concern_role"
end
applicant
end
def filter_tax_household_member_params(tax_household_member_params)
tax_household_member_params.delete_if do |k, v|
v.nil?
end
end
def filter_tax_household_params(tax_household_params)
tax_household_params = tax_household_params.slice(:id, :primary_applicant_id, :total_count, :total_incomes_by_year)
tax_household_params.delete_if do |k, v|
v.nil?
end
end
## Fetches the applicant object either from application_group or person_mapper
def get_applicant(person_obj)
new_applicant = self.application_group.applicants.find do |applicant|
applicant.id == @person_mapper.applicant_map[person_obj.id].id
end
new_applicant = @person_mapper.applicant_map[person_obj.id] unless new_applicant
end
def add_financial_statements(applicants_params)
applicants_params.map do |applicant_params|
applicant_params[:financial_statements].each do |financial_statement_params|
tax_household_member = find_tax_household_member(@person_mapper.applicant_map[applicant_params[:person].id])
financial_statement = tax_household_member.financial_statements.build(filter_financial_statement_params(financial_statement_params))
financial_statement_params[:incomes].each do |income_params|
financial_statement.incomes.build(income_params)
end
financial_statement_params[:deductions].each do |deduction_params|
financial_statement.deductions.build(deduction_params)
end
financial_statement_params[:alternative_benefits].each do |alternative_benefit_params|
financial_statement.alternate_benefits.build(alternative_benefit_params)
end
end
end
end
def filter_financial_statement_params(financial_statement_params)
financial_statement_params = financial_statement_params.slice(:type, :is_tax_filing_together, :tax_filing_status)
financial_statement_params.delete_if do |k, v|
v.nil?
end
end
def find_tax_household_member(applicant)
tax_household_members = self.application_group.households.flat_map(&:tax_households).flat_map(&:tax_household_members)
tax_household_member = tax_household_members.find do |tax_household_member|
tax_household_member.applicant_id == applicant.id
end
tax_household_member
end
def save
primary_applicant_employee_applicant
id = @application_group.save!
save_save_list
@application_group.id #return the id of saved application group
end
#save objects in save list
def save_save_list
save_list.each do |obj|
obj.save!
end
end
end
filter_tax_household_member_params now operating on cloned hash
class ApplicationGroupBuilder
attr_reader :application_group
attr_reader :save_list
def initialize(param, person_mapper)
@save_list = []
@is_update = true # we assume that this is a update existing application group workflow
@applicants_params = param[:applicants]
filtered_param = param.slice(:e_case_id, :submitted_at, :e_status_code, :application_type)
@person_mapper = person_mapper
@application_group = ApplicationGroup.where(e_case_id: filtered_param[:e_case_id]).first
if @application_group.nil?
@application_group = ApplicationGroup.new(filtered_param) #we create a new application group from the xml
@is_update = false # means this is a create
end
add_irsgroups([{}]) # we need a atleast 1 irsgroup hence adding a blank one
@application_group.updated_by = "curam_system_service"
get_household
end
def add_applicant(applicant_params)
# puts "applicant_params[:is_primary_applicant] #{applicant_params[:is_primary_applicant]}"
if @application_group.applicants.map(&:person_id).include? applicant_params[:person].id
# puts "Added already existing applicant"
applicant = @application_group.applicants.where(person_id: applicant_params[:person].id).first
else
# puts "Added a new applicant"
if applicant_params[:is_primary_applicant] == "true"
reset_exisiting_primary_applicant
end
applicant = @application_group.applicants.build(filter_applicant_params(applicant_params))
member = applicant.person.members.select do |m|
m.authority?
end.first
set_person_demographics(member, applicant_params[:person_demographics])
@save_list << member
@save_list << applicant
# puts "applicant_params[:is_primary_applicant] #{applicant_params[:is_primary_applicant]} @application_group.applicants #{applicant.inspect}"
end
applicant
end
def reset_exisiting_primary_applicant
@application_group.applicants.each do |applicant|
applicant.is_primary_applicant = false
end
end
def set_person_demographics(member, person_demographics_params)
member.dob = person_demographics_params["dob"] if person_demographics_params["dob"]
member.death_date = person_demographics_params["death_date"] if person_demographics_params["death_date"]
member.ssn = person_demographics_params["ssn"] if person_demographics_params["ssn"]
member.gender = person_demographics_params["gender"] if person_demographics_params["gender"]
member.ethnicity = person_demographics_params["ethnicity"] if person_demographics_params["ethnicity"]
member.race = person_demographics_params["race"] if person_demographics_params["race"]
member.marital_status = person_demographics_params["marital_status"] if person_demographics_params["marital_status"]
end
def filter_applicant_params(applicant_params)
applicant_params = applicant_params.slice(
:is_primary_applicant,
:is_coverage_applicant,
:person)
applicant_params.delete_if do |k, v|
v.nil?
end
end
def get_household
return @household if @household
if !@is_update
# puts "New Application Group Case"
@household = self.application_group.households.build #if new application group then create new household
@save_list << @household
elsif have_applicants_changed?
# puts "Update Application Group Case - Applicants have changed. Creating new household"
@household = self.application_group.households.build #if applicants have changed then create new household
@save_list << @household
else
# puts "Update Application Group Case. Using latest household."
#TODO to use .is_active household instead of .last
@household = self.application_group.households.last #if update and applicants haven't changed then use the latest household in use
end
# puts "return @household"
return @household
end
def have_applicants_changed?
current_list = @application_group.applicants.map do |applicant|
applicant.person_id
end.sort
new_list = @applicants_params.map do |applicants_param|
applicants_param[:person].id
end.sort
if current_list == new_list
return false
else
return true
end
end
def add_coverage_household
coverage_household = @household.coverage_households.build({submitted_at: Time.now})
@application_group.applicants.each do |applicant|
if applicant.is_coverage_applicant
coverage_household_member = coverage_household.coverage_household_members.build
coverage_household_member.applicant_id = applicant.id
end
end
end
def primary_applicant_employee_applicant
employee_applicant = @application_group.primary_applicant.employee_applicant
employee_applicant = @application_group.primary_applicant.employee_applicant.build unless employee_applicant
employee_applicant.employer = @application_group.primary_applicant.person.employer
end
def add_hbx_enrollment
# puts @application_group.primary_applicant
@application_group.primary_applicant.person.policies.each do |policy|
hbx_enrollement = @household.hbx_enrollments.build
hbx_enrollement.policy = policy
@application_group.primary_applicant.broker_id = Broker.find(policy.broker_id) unless policy.broker_id.blank?
#hbx_enrollement.employer = Employer.find(policy.employer_id) unless policy.employer_id.blank?
#hbx_enrollement.broker = Broker.find(policy.broker_id) unless policy.broker_id.blank?
#hbx_enrollement.primary_applicant = alpha_person
#hbx_enrollement.allocated_aptc_in_dollars = policy.allocated_aptc
hbx_enrollement.enrollment_group_id = policy.eg_id
hbx_enrollement.elected_aptc_in_dollars = policy.elected_aptc
hbx_enrollement.applied_aptc_in_dollars = policy.applied_aptc
hbx_enrollement.submitted_at = Time.now
hbx_enrollement.kind = "employer_sponsored" unless policy.employer_id.blank?
hbx_enrollement.kind = "unassisted_qhp" if (hbx_enrollement.applied_aptc_in_cents == 0 && policy.employer.blank?)
hbx_enrollement.kind = "insurance_assisted_qhp" if (hbx_enrollement.applied_aptc_in_cents > 0 && policy.employer.blank?)
policy.enrollees.each do |enrollee|
begin
person = Person.find_for_member_id(enrollee.m_id)
@application_group.applicants << Applicant.new(person: person) unless @application_group.person_is_applicant?(person)
applicant = @application_group.find_applicant_by_person(person)
hbx_enrollement_member = hbx_enrollement.hbx_enrollment_members.build({applicant: applicant,
premium_amount_in_cents: enrollee.pre_amt})
hbx_enrollement_member.is_subscriber = true if (enrollee.rel_code == "self")
rescue FloatDomainError
# puts "Error: invalid premium amount for enrollee: #{enrollee.inspect}"
next
end
end
end
end
def add_irsgroup(irs_group_params)
puts irs_group_params.inspect
@application_group.irs_groups.build(irs_group_params)
end
#TODO - method not implemented properly using .build(params)
def add_irsgroups(irs_groups_params)
irs_groups_params.map do |irs_group_params|
add_irsgroup(irs_group_params)
end
end
def add_tax_households(tax_households_params)
tax_households_params.map do |tax_household_params|
tax_household = @household.tax_households.build(filter_tax_household_params(tax_household_params))
eligibility_determinations_params = tax_household_params[:eligibility_determinations]
eligibility_determinations_params.each do |eligibility_determination_params|
tax_household.eligibility_determinations.build(eligibility_determination_params)
end
tax_household_params[:tax_household_members].map do |tax_household_member_params|
tax_household_member = tax_household.tax_household_members.build(filter_tax_household_member_params(tax_household_member_params))
person_uri = @person_mapper.alias_map[tax_household_member_params[:person_id]]
person_obj = @person_mapper.people_map[person_uri].first
new_applicant = get_applicant(person_obj)
new_applicant = verify_person_id(new_applicant)
tax_household_member.applicant_id = new_applicant.id
tax_household_member.applicant = new_applicant
end
end
end
def verify_person_id(applicant)
if applicant.id.to_s.include? "concern_role"
end
applicant
end
def filter_tax_household_member_params(tax_household_member_params)
tax_household_member_params_clone = tax_household_member_params.clone
tax_household_member_params_clone = tax_household_member_params_clone.slice(:is_ia_eligible, :is_medicaid_chip_eligible, :is_subscriber)
tax_household_member_params_clone.delete_if do |k, v|
v.nil?
end
tax_household_member_params_clone
end
def filter_tax_household_params(tax_household_params)
tax_household_params = tax_household_params.slice(:id, :total_count, :total_incomes_by_year)
tax_household_params.delete_if do |k, v|
v.nil?
end
end
## Fetches the applicant object either from application_group or person_mapper
def get_applicant(person_obj)
new_applicant = self.application_group.applicants.find do |applicant|
applicant.id == @person_mapper.applicant_map[person_obj.id].id
end
new_applicant = @person_mapper.applicant_map[person_obj.id] unless new_applicant
end
def add_financial_statements(applicants_params)
applicants_params.map do |applicant_params|
applicant_params[:financial_statements].each do |financial_statement_params|
tax_household_member = find_tax_household_member(@person_mapper.applicant_map[applicant_params[:person].id])
financial_statement = tax_household_member.financial_statements.build(filter_financial_statement_params(financial_statement_params))
financial_statement_params[:incomes].each do |income_params|
financial_statement.incomes.build(income_params)
end
financial_statement_params[:deductions].each do |deduction_params|
financial_statement.deductions.build(deduction_params)
end
financial_statement_params[:alternative_benefits].each do |alternative_benefit_params|
financial_statement.alternate_benefits.build(alternative_benefit_params)
end
end
end
end
def filter_financial_statement_params(financial_statement_params)
financial_statement_params = financial_statement_params.slice(:type, :is_tax_filing_together, :tax_filing_status)
financial_statement_params.delete_if do |k, v|
v.nil?
end
end
def find_tax_household_member(applicant)
tax_household_members = self.application_group.households.flat_map(&:tax_households).flat_map(&:tax_household_members)
tax_household_member = tax_household_members.find do |tax_household_member|
tax_household_member.applicant_id == applicant.id
end
tax_household_member
end
def save
#primary_applicant_employee_applicant
id = @application_group.save!
save_save_list
@application_group.id #return the id of saved application group
end
#save objects in save list
def save_save_list
save_list.each do |obj|
obj.save!
end
end
end
|
puts "hello"
Dir.glob('lib/capistrano/tasks/*.rb').each { |r| p r; require r }
:panda_face: Fixed the laod files pattern.
rake_files_pattern = File.expand_path("./j-cap-recipes/tasks/*.rake", File.dirname(__FILE__))
Dir.glob(rake_files_pattern).each { |r| import r }
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'crystal/rails/version'
Gem::Specification.new do |spec|
spec.name = "crystal-rails"
spec.version = Crystal::Rails::VERSION
spec.authors = ["Michail"]
spec.email = ["xbiznet@gmail.com"]
spec.summary = %q{TODO: Write a short summary, because Rubygems requires one.}
spec.description = %q{TODO: Write a longer description or delete this line.}
spec.homepage = "TODO: Put your gem's website or public repo URL here."
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org by setting 'allowed_push_host', or
# delete this section to allow pushing this gem to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'"
else
raise "RubyGems 2.0 or newer is required to protect against public gem pushes."
end
spec.files = `git ls-files`.split("\n").reject { |f| f =~ /^testapp|^jquery-ui/ }
spec.require_paths = ["lib"]
end
corrected gemspec to spec submodule file
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'crystal/rails/version'
Gem::Specification.new do |spec|
spec.name = "crystal-rails"
spec.version = Crystal::Rails::VERSION
spec.authors = ["Michail"]
spec.email = ["xbiznet@gmail.com"]
spec.summary = %q{TODO: Write a short summary, because Rubygems requires one.}
spec.description = %q{TODO: Write a longer description or delete this line.}
spec.homepage = "TODO: Put your gem's website or public repo URL here."
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org by setting 'allowed_push_host', or
# delete this section to allow pushing this gem to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'"
else
raise "RubyGems 2.0 or newer is required to protect against public gem pushes."
end
spec.files = `git ls-files`.split("\n").reject { |f| f =~ /^testapp|^jquery-ui/ }
spec.require_paths = ["lib"]
# get an array of submodule dirs by executing 'pwd' inside each submodule
gem_dir = File.expand_path(File.dirname(__FILE__)) + "/"
`git submodule --quiet foreach pwd`.split($\).each do |submodule_path|
Dir.chdir(submodule_path) do
submodule_relative_path = submodule_path.sub gem_dir, ""
# issue git ls-files in submodule's directory and
# prepend the submodule path to create absolute file paths
`git ls-files`.split($\).each do |filename|
s.files << "#{submodule_relative_path}/#{filename}"
end
end
end
end
|
class ApplicationGroupBuilder
attr_reader :application_group
attr_reader :save_list
def initialize(param, person_mapper)
@save_list = [] #it is observed that some embedded objects are not saved.
# We add all embedded/associated objects to this list and save the explicitly
@is_update = true # we assume that this is a update existing application group workflow
@applicants_params = param[:applicants]
filtered_param = param.slice(:e_case_id, :submitted_at, :e_status_code, :application_type)
@person_mapper = person_mapper
@application_group = ApplicationGroup.where(e_case_id: filtered_param[:e_case_id]).first
if @application_group.nil?
@application_group = ApplicationGroup.new(filtered_param) #we create a new application group from the xml
@is_update = false # means this is a create
end
add_irsgroups([{}]) # we need a atleast 1 irsgroup hence adding a blank one
@application_group.updated_by = "curam_system_service"
get_household
end
def add_applicant(applicant_params)
# puts "applicant_params[:is_primary_applicant] #{applicant_params[:is_primary_applicant]}"
if @application_group.applicants.map(&:person_id).include? applicant_params[:person].id
# puts "Added already existing applicant"
applicant = @application_group.applicants.where(person_id: applicant_params[:person].id).first
else
# puts "Added a new applicant"
if applicant_params[:is_primary_applicant] == "true"
reset_exisiting_primary_applicant
end
applicant = @application_group.applicants.build(filter_applicant_params(applicant_params))
member = applicant.person.members.select do |m|
m.authority?
end.first
set_person_demographics(member, applicant_params[:person_demographics])
@save_list << member
@save_list << applicant
# puts "applicant_params[:is_primary_applicant] #{applicant_params[:is_primary_applicant]} @application_group.applicants #{applicant.inspect}"
end
applicant
end
def reset_exisiting_primary_applicant
@application_group.applicants.each do |applicant|
applicant.is_primary_applicant = false
end
end
def set_person_demographics(member, person_demographics_params)
member.dob = person_demographics_params["dob"] if person_demographics_params["dob"]
member.death_date = person_demographics_params["death_date"] if person_demographics_params["death_date"]
member.ssn = person_demographics_params["ssn"] if person_demographics_params["ssn"]
member.gender = person_demographics_params["gender"] if person_demographics_params["gender"]
member.ethnicity = person_demographics_params["ethnicity"] if person_demographics_params["ethnicity"]
member.race = person_demographics_params["race"] if person_demographics_params["race"]
member.marital_status = person_demographics_params["marital_status"] if person_demographics_params["marital_status"]
end
def filter_applicant_params(applicant_params)
applicant_params = applicant_params.slice(
:is_primary_applicant,
:is_coverage_applicant,
:person)
applicant_params.delete_if do |k, v|
v.nil?
end
end
def get_household
return @household if @household
if !@is_update
# puts "New Application Group Case"
@household = self.application_group.households.build #if new application group then create new household
@save_list << @household
elsif have_applicants_changed?
# puts "Update Application Group Case - Applicants have changed. Creating new household"
@household = self.application_group.households.build #if applicants have changed then create new household
@save_list << @household
else
# puts "Update Application Group Case. Using latest household."
#TODO to use .is_active household instead of .last
@household = self.application_group.households.last #if update and applicants haven't changed then use the latest household in use
end
# puts "return @household"
return @household
end
def have_applicants_changed?
current_list = @application_group.applicants.map do |applicant|
applicant.person_id
end.sort
new_list = @applicants_params.map do |applicants_param|
applicants_param[:person].id
end.sort
if current_list == new_list
return false
else
return true
end
end
def add_coverage_household
coverage_household = @household.coverage_households.build({submitted_at: Time.now})
@application_group.applicants.each do |applicant|
if applicant.is_coverage_applicant
coverage_household_member = coverage_household.coverage_household_members.build
coverage_household_member.applicant_id = applicant.id
end
end
end
def add_primary_applicant_employee_applicant
#TODO verify from Dan if this logic is right
if application_group.primary_applicant.person.employer
employee_applicant = @application_group.primary_applicant.employee_applicants.build
employee_applicant.employer = @application_group.primary_applicant.person.employer
@save_list << employee_applicant
end
end
def add_hbx_enrollment
# puts @application_group.primary_applicant
@application_group.primary_applicant.person.policies.each do |policy|
hbx_enrollement = @household.hbx_enrollments.build
hbx_enrollement.policy = policy
@application_group.primary_applicant.broker_id = Broker.find(policy.broker_id) unless policy.broker_id.blank?
#hbx_enrollement.employer = Employer.find(policy.employer_id) unless policy.employer_id.blank?
#hbx_enrollement.broker = Broker.find(policy.broker_id) unless policy.broker_id.blank?
#hbx_enrollement.primary_applicant = alpha_person
#hbx_enrollement.allocated_aptc_in_dollars = policy.allocated_aptc
hbx_enrollement.enrollment_group_id = policy.eg_id
hbx_enrollement.elected_aptc_in_dollars = policy.elected_aptc
hbx_enrollement.applied_aptc_in_dollars = policy.applied_aptc
hbx_enrollement.submitted_at = Time.now
hbx_enrollement.kind = "employer_sponsored" unless policy.employer_id.blank?
hbx_enrollement.kind = "unassisted_qhp" if (hbx_enrollement.applied_aptc_in_cents == 0 && policy.employer.blank?)
hbx_enrollement.kind = "insurance_assisted_qhp" if (hbx_enrollement.applied_aptc_in_cents > 0 && policy.employer.blank?)
policy.enrollees.each do |enrollee|
begin
person = Person.find_for_member_id(enrollee.m_id)
@application_group.applicants << Applicant.new(person: person) unless @application_group.person_is_applicant?(person)
applicant = @application_group.find_applicant_by_person(person)
hbx_enrollement_member = hbx_enrollement.hbx_enrollment_members.build({applicant: applicant,
premium_amount_in_cents: enrollee.pre_amt})
hbx_enrollement_member.is_subscriber = true if (enrollee.rel_code == "self")
rescue FloatDomainError
# puts "Error: invalid premium amount for enrollee: #{enrollee.inspect}"
next
end
end
end
end
def add_irsgroup(irs_group_params)
puts irs_group_params.inspect
@application_group.irs_groups.build(irs_group_params)
end
#TODO - method not implemented properly using .build(params)
def add_irsgroups(irs_groups_params)
irs_groups_params.map do |irs_group_params|
add_irsgroup(irs_group_params)
end
end
def add_tax_households(tax_households_params)
tax_households_params.map do |tax_household_params|
tax_household = @household.tax_households.build(filter_tax_household_params(tax_household_params))
eligibility_determinations_params = tax_household_params[:eligibility_determinations]
eligibility_determinations_params.each do |eligibility_determination_params|
tax_household.eligibility_determinations.build(eligibility_determination_params)
end
tax_household_params[:tax_household_members].map do |tax_household_member_params|
tax_household_member = tax_household.tax_household_members.build(filter_tax_household_member_params(tax_household_member_params))
person_uri = @person_mapper.alias_map[tax_household_member_params[:person_id]]
person_obj = @person_mapper.people_map[person_uri].first
new_applicant = get_applicant(person_obj)
new_applicant = verify_person_id(new_applicant)
tax_household_member.applicant_id = new_applicant.id
tax_household_member.applicant = new_applicant
end
end
end
def verify_person_id(applicant)
if applicant.id.to_s.include? "concern_role"
end
applicant
end
def filter_tax_household_member_params(tax_household_member_params)
tax_household_member_params_clone = tax_household_member_params.clone
tax_household_member_params_clone = tax_household_member_params_clone.slice(:is_ia_eligible, :is_medicaid_chip_eligible, :is_subscriber)
tax_household_member_params_clone.delete_if do |k, v|
v.nil?
end
tax_household_member_params_clone
end
def filter_tax_household_params(tax_household_params)
tax_household_params = tax_household_params.slice(:id, :total_count, :total_incomes_by_year)
tax_household_params.delete_if do |k, v|
v.nil?
end
end
## Fetches the applicant object either from application_group or person_mapper
def get_applicant(person_obj)
new_applicant = self.application_group.applicants.find do |applicant|
applicant.id == @person_mapper.applicant_map[person_obj.id].id
end
new_applicant = @person_mapper.applicant_map[person_obj.id] unless new_applicant
end
def add_financial_statements(applicants_params)
applicants_params.map do |applicant_params|
applicant_params[:financial_statements].each do |financial_statement_params|
tax_household_member = find_tax_household_member(@person_mapper.applicant_map[applicant_params[:person].id])
financial_statement = tax_household_member.financial_statements.build(filter_financial_statement_params(financial_statement_params))
financial_statement_params[:incomes].each do |income_params|
financial_statement.incomes.build(income_params)
end
financial_statement_params[:deductions].each do |deduction_params|
financial_statement.deductions.build(deduction_params)
end
financial_statement_params[:alternative_benefits].each do |alternative_benefit_params|
financial_statement.alternate_benefits.build(alternative_benefit_params)
end
end
end
end
def filter_financial_statement_params(financial_statement_params)
financial_statement_params = financial_statement_params.slice(:type, :is_tax_filing_together, :tax_filing_status)
financial_statement_params.delete_if do |k, v|
v.nil?
end
end
def find_tax_household_member(applicant)
tax_household_members = self.application_group.households.flat_map(&:tax_households).flat_map(&:tax_household_members)
tax_household_member = tax_household_members.find do |tax_household_member|
tax_household_member.applicant_id == applicant.id
end
tax_household_member
end
def save
add_primary_applicant_employee_applicant
id = @application_group.save!
save_save_list
@application_group.id #return the id of saved application group
end
#save objects in save list
def save_save_list
save_list.each do |obj|
obj.save!
end
end
end
removed total_incomes_by_year from filter_tax_household_params
class ApplicationGroupBuilder
attr_reader :application_group
attr_reader :save_list
def initialize(param, person_mapper)
@save_list = [] #it is observed that some embedded objects are not saved.
# We add all embedded/associated objects to this list and save the explicitly
@is_update = true # we assume that this is a update existing application group workflow
@applicants_params = param[:applicants]
filtered_param = param.slice(:e_case_id, :submitted_at, :e_status_code, :application_type)
@person_mapper = person_mapper
@application_group = ApplicationGroup.where(e_case_id: filtered_param[:e_case_id]).first
if @application_group.nil?
@application_group = ApplicationGroup.new(filtered_param) #we create a new application group from the xml
@is_update = false # means this is a create
end
add_irsgroups([{}]) # we need a atleast 1 irsgroup hence adding a blank one
@application_group.updated_by = "curam_system_service"
get_household
end
def add_applicant(applicant_params)
# puts "applicant_params[:is_primary_applicant] #{applicant_params[:is_primary_applicant]}"
if @application_group.applicants.map(&:person_id).include? applicant_params[:person].id
# puts "Added already existing applicant"
applicant = @application_group.applicants.where(person_id: applicant_params[:person].id).first
else
# puts "Added a new applicant"
if applicant_params[:is_primary_applicant] == "true"
reset_exisiting_primary_applicant
end
applicant = @application_group.applicants.build(filter_applicant_params(applicant_params))
member = applicant.person.members.select do |m|
m.authority?
end.first
set_person_demographics(member, applicant_params[:person_demographics])
@save_list << member
@save_list << applicant
# puts "applicant_params[:is_primary_applicant] #{applicant_params[:is_primary_applicant]} @application_group.applicants #{applicant.inspect}"
end
applicant
end
def reset_exisiting_primary_applicant
@application_group.applicants.each do |applicant|
applicant.is_primary_applicant = false
end
end
def set_person_demographics(member, person_demographics_params)
member.dob = person_demographics_params["dob"] if person_demographics_params["dob"]
member.death_date = person_demographics_params["death_date"] if person_demographics_params["death_date"]
member.ssn = person_demographics_params["ssn"] if person_demographics_params["ssn"]
member.gender = person_demographics_params["gender"] if person_demographics_params["gender"]
member.ethnicity = person_demographics_params["ethnicity"] if person_demographics_params["ethnicity"]
member.race = person_demographics_params["race"] if person_demographics_params["race"]
member.marital_status = person_demographics_params["marital_status"] if person_demographics_params["marital_status"]
end
def filter_applicant_params(applicant_params)
applicant_params = applicant_params.slice(
:is_primary_applicant,
:is_coverage_applicant,
:person)
applicant_params.delete_if do |k, v|
v.nil?
end
end
def get_household
return @household if @household
if !@is_update
# puts "New Application Group Case"
@household = self.application_group.households.build #if new application group then create new household
@save_list << @household
elsif have_applicants_changed?
# puts "Update Application Group Case - Applicants have changed. Creating new household"
@household = self.application_group.households.build #if applicants have changed then create new household
@save_list << @household
else
# puts "Update Application Group Case. Using latest household."
#TODO to use .is_active household instead of .last
@household = self.application_group.households.last #if update and applicants haven't changed then use the latest household in use
end
# puts "return @household"
return @household
end
def have_applicants_changed?
current_list = @application_group.applicants.map do |applicant|
applicant.person_id
end.sort
new_list = @applicants_params.map do |applicants_param|
applicants_param[:person].id
end.sort
if current_list == new_list
return false
else
return true
end
end
def add_coverage_household
coverage_household = @household.coverage_households.build({submitted_at: Time.now})
@application_group.applicants.each do |applicant|
if applicant.is_coverage_applicant
coverage_household_member = coverage_household.coverage_household_members.build
coverage_household_member.applicant_id = applicant.id
end
end
end
def add_primary_applicant_employee_applicant
#TODO verify from Dan if this logic is right
if application_group.primary_applicant.person.employer
employee_applicant = @application_group.primary_applicant.employee_applicants.build
employee_applicant.employer = @application_group.primary_applicant.person.employer
@save_list << employee_applicant
end
end
def add_hbx_enrollment
# puts @application_group.primary_applicant
@application_group.primary_applicant.person.policies.each do |policy|
hbx_enrollement = @household.hbx_enrollments.build
hbx_enrollement.policy = policy
@application_group.primary_applicant.broker_id = Broker.find(policy.broker_id) unless policy.broker_id.blank?
#hbx_enrollement.employer = Employer.find(policy.employer_id) unless policy.employer_id.blank?
#hbx_enrollement.broker = Broker.find(policy.broker_id) unless policy.broker_id.blank?
#hbx_enrollement.primary_applicant = alpha_person
#hbx_enrollement.allocated_aptc_in_dollars = policy.allocated_aptc
hbx_enrollement.enrollment_group_id = policy.eg_id
hbx_enrollement.elected_aptc_in_dollars = policy.elected_aptc
hbx_enrollement.applied_aptc_in_dollars = policy.applied_aptc
hbx_enrollement.submitted_at = Time.now
hbx_enrollement.kind = "employer_sponsored" unless policy.employer_id.blank?
hbx_enrollement.kind = "unassisted_qhp" if (hbx_enrollement.applied_aptc_in_cents == 0 && policy.employer.blank?)
hbx_enrollement.kind = "insurance_assisted_qhp" if (hbx_enrollement.applied_aptc_in_cents > 0 && policy.employer.blank?)
policy.enrollees.each do |enrollee|
begin
person = Person.find_for_member_id(enrollee.m_id)
@application_group.applicants << Applicant.new(person: person) unless @application_group.person_is_applicant?(person)
applicant = @application_group.find_applicant_by_person(person)
hbx_enrollement_member = hbx_enrollement.hbx_enrollment_members.build({applicant: applicant,
premium_amount_in_cents: enrollee.pre_amt})
hbx_enrollement_member.is_subscriber = true if (enrollee.rel_code == "self")
rescue FloatDomainError
# puts "Error: invalid premium amount for enrollee: #{enrollee.inspect}"
next
end
end
end
end
def add_irsgroup(irs_group_params)
puts irs_group_params.inspect
@application_group.irs_groups.build(irs_group_params)
end
#TODO - method not implemented properly using .build(params)
def add_irsgroups(irs_groups_params)
irs_groups_params.map do |irs_group_params|
add_irsgroup(irs_group_params)
end
end
def add_tax_households(tax_households_params)
tax_households_params.map do |tax_household_params|
tax_household = @household.tax_households.build(filter_tax_household_params(tax_household_params))
eligibility_determinations_params = tax_household_params[:eligibility_determinations]
eligibility_determinations_params.each do |eligibility_determination_params|
tax_household.eligibility_determinations.build(eligibility_determination_params)
end
tax_household_params[:tax_household_members].map do |tax_household_member_params|
tax_household_member = tax_household.tax_household_members.build(filter_tax_household_member_params(tax_household_member_params))
person_uri = @person_mapper.alias_map[tax_household_member_params[:person_id]]
person_obj = @person_mapper.people_map[person_uri].first
new_applicant = get_applicant(person_obj)
new_applicant = verify_person_id(new_applicant)
tax_household_member.applicant_id = new_applicant.id
tax_household_member.applicant = new_applicant
end
end
end
def verify_person_id(applicant)
if applicant.id.to_s.include? "concern_role"
end
applicant
end
def filter_tax_household_member_params(tax_household_member_params)
tax_household_member_params_clone = tax_household_member_params.clone
tax_household_member_params_clone = tax_household_member_params_clone.slice(:is_ia_eligible, :is_medicaid_chip_eligible, :is_subscriber)
tax_household_member_params_clone.delete_if do |k, v|
v.nil?
end
tax_household_member_params_clone
end
def filter_tax_household_params(tax_household_params)
tax_household_params = tax_household_params.slice(:id, :total_count)
tax_household_params.delete_if do |k, v|
v.nil?
end
end
## Fetches the applicant object either from application_group or person_mapper
def get_applicant(person_obj)
new_applicant = self.application_group.applicants.find do |applicant|
applicant.id == @person_mapper.applicant_map[person_obj.id].id
end
new_applicant = @person_mapper.applicant_map[person_obj.id] unless new_applicant
end
def add_financial_statements(applicants_params)
applicants_params.map do |applicant_params|
applicant_params[:financial_statements].each do |financial_statement_params|
tax_household_member = find_tax_household_member(@person_mapper.applicant_map[applicant_params[:person].id])
financial_statement = tax_household_member.financial_statements.build(filter_financial_statement_params(financial_statement_params))
financial_statement_params[:incomes].each do |income_params|
financial_statement.incomes.build(income_params)
end
financial_statement_params[:deductions].each do |deduction_params|
financial_statement.deductions.build(deduction_params)
end
financial_statement_params[:alternative_benefits].each do |alternative_benefit_params|
financial_statement.alternate_benefits.build(alternative_benefit_params)
end
end
end
end
def filter_financial_statement_params(financial_statement_params)
financial_statement_params = financial_statement_params.slice(:type, :is_tax_filing_together, :tax_filing_status)
financial_statement_params.delete_if do |k, v|
v.nil?
end
end
def find_tax_household_member(applicant)
tax_household_members = self.application_group.households.flat_map(&:tax_households).flat_map(&:tax_household_members)
tax_household_member = tax_household_members.find do |tax_household_member|
tax_household_member.applicant_id == applicant.id
end
tax_household_member
end
def save
add_primary_applicant_employee_applicant
id = @application_group.save!
save_save_list
@application_group.id #return the id of saved application group
end
#save objects in save list
def save_save_list
save_list.each do |obj|
obj.save!
end
end
end
|
module Jober
VERSION = "0.2"
end
next 0.3
module Jober
VERSION = "0.3"
end
|
module Junoser
class Ruler
OFFSET = ' '
def initialize(input)
@rule = input
end
def to_rule
rule_header << rule << rule_footer
end
def rule
str = @rule.read
str = process_reserved_element(str)
str = str.split(/\n/).map {|l| format(process_line(l)) }.join("\n")
end
private
def process_line(str)
return str if str =~ /^(.* do|end)$/
str.gsub!(/("[^"]+")/) { "str(#$1)" } # "foo" -> str("foo")
str.gsub!(/^(\s*)arg(\.as\(:\S+\))? \($/) { "#{$1}b(arg#$2," } # arg ( -> b(arg,
str.gsub!(/^(\s*)(str\(\S+\)) ([^ \t\n\r\f\(|,]+)(\.as\(:\S+\))?(,?)$/) { "#{$1}a(#$2, #$3)#$4#$5" } # str("foo") bar -> a(str("foo"), bar)
str.gsub!(/^(\s*)(str\(\S+\)) \((.*)\)(,?)$/) { "#{$1}a(#$2, #$3)#$4" } # str("foo") (a | b) -> a(str("foo"), a | b)
str.gsub!(/^(\s*)(str\(\S+\)) \($/) { "#{$1}b(#$2," } # str("foo") ( -> b(str("foo"),
str.gsub!(/^(\s*)(\(.*\))(\.as\(:\S\))? \($/) { "#{$1}b(#$2#$3," } # (a | b) ( -> b((a | b),
str.gsub!(/^(\s*)(str\(\S+\)) ([^ \t\n\r\f\(|,]+) \($/) { "#{$1}b(a(#$2, #$3)," } # str("foo") bar ( -> b(a(str("foo"), bar),
str.gsub!(/^(\s*)(str\(\S+\)) \((.*)\) \($/) { "#{$1}a(#$2, #$3," } # str("foo") (a | b) ( -> a(str("foo"), a | b,
str
end
def process_reserved_element(str)
str.gsub! /"\$\S+"/, 'arg'
%w[as-number confederation-as metric-value limit-threshold filename filter-name class-name classifier-name link-subscription per-traffic-class-bandwidth].each do |key|
str.gsub! %["#{key}" arg], 'arg'
end
str.gsub! '"equal-literal"', '"="'
str.gsub! '"plus-literal"', '"+"'
str.gsub! '"minus-literal"', '"-"'
str.gsub!(/\((.*) \| "name"\)/) { "(#$1 | arg)" }
str.gsub! '"vlan" ("id-name" | "all")', '"vlan" ("all" | arg)'
str.gsub!(/("ssh-\S+") arg/) { "#$1 (quote | arg)" }
str.gsub! '"description" arg', '"description" (quote | arg)'
str.gsub! '"as-path-prepend" arg', '"as-path-prepend" (quote | arg)'
str.gsub!(/(s\(\s*)"address" \(\s*arg\s*\)/) { "#{$1}arg" }
str.gsub!(/^(\s*"idle-timeout" \(\s*c\(\s*c\(\s*"forever",\s*)"timeout" arg/) { "#{$1}arg" }
str = omit_label(str, 'contents', 'syslog_object')
str = omit_label(str, 'interface', 'cos_interfaces_type')
str = omit_label(str, 'interface', 'ir_interfaces_type')
str = omit_label(str, 'interface', 'interfaces_type')
str = omit_label(str, 'client-address-list', 'client_address_object')
str = omit_label(str, 'prefix-list-item', 'prefix_list_items')
str = omit_label(str, 'instance', 'juniper_routing_instance')
str = omit_label(str, 'vlan', 'vlan_type')
str.gsub!(/"icmp"(.*)"icmp6"/) { %["icmpv6"#$1"icmp"] }
str.gsub!(/"http"(.*)"https"/) { %["https"#$1"http"] }
str.gsub!(/"snmp"(.*)"snmptrap"/) { %["snmptrap"#$1"snmp"] }
str.gsub!(/"cspf"(.*)"cspf-link"/) { %["cspf-link"#$1"cspf"] }
str.gsub!(/"route-filter" (\(\s*control_route_filter_type\s*\))/) { %["route-filter" arg #{$1}.as(:oneline)] }
str.gsub!(/"source-address-filter" (\(\s*control_source_address_filter_type\s*\))/) { %["source-adress-filter" arg #{$1}.as(:oneline)] }
str.gsub!(/("next-hop" \(\s*c\(\s*c\(\s*[^)]*)"address" \(\s*ipaddr\s*\)/) { "#{$1}ipaddr" }
%w[metric metric2 metric3 metric4 tag tag2 preference preference2 color color2 local-preference].each do |key|
str.gsub!(/^(\s*"#{key}" \(\s*c\(\s*c\(\s*)"#{key}" arg/) { "#{$1}arg" }
end
str.gsub!(/^(\s*"vrf-target" \(\s*)c\(\s*"community" arg,/) { "#{$1}ca(" }
str.gsub!(/^(\s*)"priority" \(\s*c\(\s*"setup-priority" arg,\s*"reservation-priority" arg\s*\)\s*\)/) { %[#{$1}a("priority", a(arg, arg)).as(:oneline)] }
%w[teardown hold-time stub].each do |key|
str.gsub!(/^(\s*"#{key}" \(\s*)c\(/) { "#{$1}sc(" }
end
%w[file confederation].each do |key|
str.gsub!(/^(\s*"#{key}" \(\s*)c\(\s*arg,/) { "#{$1}sca(" }
end
str.gsub!(/^(\s*)"inline-services"/) do
format(['"inline-services" (',
' "bandwidth" ("1g" | "10g")',
')'], $1)
end
str.gsub!(/^(\s*)"ieee-802.3ad" \(\s*c\(\s*"lacp" \(\s*c\(/) do
format(['"802.3ad" (',
' ca(',
' "lacp" (',
' c(',
' "force-up",'], $1)
end
str.gsub!(/^(\s*)"as-path" \(\s*c\(\s*"path" arg,/) do
format(['"as-path" (',
' ca('], $1)
end
str.gsub!(/^(\s*)"as-path" arg \(\s*c\(\s*"path" arg\s*\)/) do
format(['"as-path" arg (',
' c(',
' quote,',
' arg',
' )'], $1)
end
str.gsub!(/^(\s*)"ribgroup-name" arg$/) do
format(['arg (',
' arg',
')'], $1)
end
str.gsub!(/^rule\(:regular_expression\) do\s*((?!end).)*\s*end/) do
format(['rule(:regular_expression) do',
' (quote | arg).as(:arg)',
'end'])
end
str.gsub!(/^rule\(:login_user_object\) do\s*arg\.as\(:arg\) \(\s*c\(\s*"full-name" arg,/) do
format(['rule(:login_user_object) do',
' arg.as(:arg) (',
' sc(',
' "full-name" (quote | arg),'])
end
str.gsub!(/(rule\(:juniper_policy_options\) do\s*)c\(/) { "#{$1}c(" }
str.gsub!(/(rule\(:control_route_filter_type\) do\s*)s\(\s*arg,/) { "#{$1}b(" }
str.gsub!(/(rule\(:control_source_address_filter_type\) do\s*)s\(\s*arg,/) { "#{$1}b(" }
str.gsub!(/^(rule\(:trace_file_type\) do\s*)c\(\s*arg,/) { "#{$1}sca(" }
str.gsub!(/^(rule\(:archive_object\) do\s*)c\(/) { "#{$1}sc(" }
str.gsub!(/^(\s*)c\(\s*arg,$/) { "#{$1}ca(" }
str
end
def omit_label(str, label, content)
str.gsub(/(\s*)"#{label}" \(\s*#{content}\s*\)/) { "#{$1}#{content}" }
end
def format(str, offset=OFFSET)
case str
when String
str.empty? ? '' : offset + str
when Array
str.map {|s| s.empty? ? '' : offset + s.to_s }.join("\n")
end
end
def rule_header
<<-EOS
require 'parslet'
module Junoser
class Parser < Parslet::Parser
# block with children maybe
def b(object, *children)
children.inject(object) {|rule, child| rule.as(:label) >> (space >> child.as(:child) | eos) }
end
# with an argument, and children maybe
def a(object, arg, *children)
b(object.as(:statement) >> space >> arg.as(:argument), *children)
end
# choice
def c(*objects)
objects.inject {|rule, object| rule | object }
end
def ca(*objects)
objects.inject {|rule, object| rule | object } | arg
end
# sequence
def s(*objects)
# TODO: eval "minOccurs" attribute of choice element
objects.inject {|rule, object| rule >> (space >> object).maybe }
end
# sequential choice
def sc(*objects)
(c(*objects) >> space.maybe).repeat(0)
end
def sca(*objects)
(c(*objects, arg) >> space.maybe).repeat(0)
end
rule(:arg) { match('\\S').repeat(1) }
rule(:space) { match('\\s').repeat(1) }
rule(:any) { match('.').repeat(1) }
rule(:eos) { match('$') }
rule(:dotted) { match('[^. \\t\\n\\r\\f]').repeat(1) >> str('.') >> match('[^. \\t\\n\\r\\f]').repeat(1) }
rule(:quote) { str('"') >> match('[^"]').repeat(1) >> str('"') }
rule(:address) { match('[0-9a-fA-F:\.]').repeat(1) }
rule(:prefix ) { address >> (str('/') >> match('[0-9]').repeat(1)).maybe }
root(:set)
rule(:set) { (str('set') | str('deactivate')) >> space >> configuration.as(:config) >> comment.maybe }
rule(:comment) { space.maybe >> (hash_comment | slash_asterisk) }
rule(:hash_comment) { str('#') >> any.maybe }
rule(:slash_asterisk) { str('/*') >> match('(?!\\*\\/).').repeat(0) >> str('*/') }
EOS
end
def rule_footer
<<-EOS
end
end
EOS
end
end
end
Fix: set protocols mpls path xxx yyy
module Junoser
class Ruler
OFFSET = ' '
def initialize(input)
@rule = input
end
def to_rule
rule_header << rule << rule_footer
end
def rule
str = @rule.read
str = process_reserved_element(str)
str = str.split(/\n/).map {|l| format(process_line(l)) }.join("\n")
end
private
def process_line(str)
return str if str =~ /^(.* do|end)$/
str.gsub!(/("[^"]+")/) { "str(#$1)" } # "foo" -> str("foo")
str.gsub!(/^(\s*)arg(\.as\(:\S+\))? \($/) { "#{$1}b(arg#$2," } # arg ( -> b(arg,
str.gsub!(/^(\s*)(str\(\S+\)) ([^ \t\n\r\f\(|,]+)(\.as\(:\S+\))?(,?)$/) { "#{$1}a(#$2, #$3)#$4#$5" } # str("foo") bar -> a(str("foo"), bar)
str.gsub!(/^(\s*)(str\(\S+\)) \((.*)\)(,?)$/) { "#{$1}a(#$2, #$3)#$4" } # str("foo") (a | b) -> a(str("foo"), a | b)
str.gsub!(/^(\s*)(str\(\S+\)) \($/) { "#{$1}b(#$2," } # str("foo") ( -> b(str("foo"),
str.gsub!(/^(\s*)(\(.*\))(\.as\(:\S\))? \($/) { "#{$1}b(#$2#$3," } # (a | b) ( -> b((a | b),
str.gsub!(/^(\s*)(str\(\S+\)) ([^ \t\n\r\f\(|,]+) \($/) { "#{$1}b(a(#$2, #$3)," } # str("foo") bar ( -> b(a(str("foo"), bar),
str.gsub!(/^(\s*)(str\(\S+\)) \((.*)\) \($/) { "#{$1}a(#$2, #$3," } # str("foo") (a | b) ( -> a(str("foo"), a | b,
str
end
def process_reserved_element(str)
str.gsub! /"\$\S+"/, 'arg'
%w[as-number confederation-as metric-value limit-threshold filename filter-name class-name classifier-name link-subscription per-traffic-class-bandwidth].each do |key|
str.gsub! %["#{key}" arg], 'arg'
end
str.gsub! '"equal-literal"', '"="'
str.gsub! '"plus-literal"', '"+"'
str.gsub! '"minus-literal"', '"-"'
str.gsub!(/\((.*) \| "name"\)/) { "(#$1 | arg)" }
str.gsub! '"vlan" ("id-name" | "all")', '"vlan" ("all" | arg)'
str.gsub!(/("ssh-\S+") arg/) { "#$1 (quote | arg)" }
str.gsub! '"description" arg', '"description" (quote | arg)'
str.gsub! '"as-path-prepend" arg', '"as-path-prepend" (quote | arg)'
str.gsub! '"path-list" arg (', 'b(ipaddr,'
str.gsub!(/(s\(\s*)"address" \(\s*arg\s*\)/) { "#{$1}arg" }
str.gsub!(/^(\s*"idle-timeout" \(\s*c\(\s*c\(\s*"forever",\s*)"timeout" arg/) { "#{$1}arg" }
str = omit_label(str, 'contents', 'syslog_object')
str = omit_label(str, 'interface', 'cos_interfaces_type')
str = omit_label(str, 'interface', 'ir_interfaces_type')
str = omit_label(str, 'interface', 'interfaces_type')
str = omit_label(str, 'client-address-list', 'client_address_object')
str = omit_label(str, 'prefix-list-item', 'prefix_list_items')
str = omit_label(str, 'instance', 'juniper_routing_instance')
str = omit_label(str, 'vlan', 'vlan_type')
str.gsub!(/"icmp"(.*)"icmp6"/) { %["icmpv6"#$1"icmp"] }
str.gsub!(/"http"(.*)"https"/) { %["https"#$1"http"] }
str.gsub!(/"snmp"(.*)"snmptrap"/) { %["snmptrap"#$1"snmp"] }
str.gsub!(/"cspf"(.*)"cspf-link"/) { %["cspf-link"#$1"cspf"] }
str.gsub!(/"route-filter" (\(\s*control_route_filter_type\s*\))/) { %["route-filter" arg #{$1}.as(:oneline)] }
str.gsub!(/"source-address-filter" (\(\s*control_source_address_filter_type\s*\))/) { %["source-adress-filter" arg #{$1}.as(:oneline)] }
str.gsub!(/("next-hop" \(\s*c\(\s*c\(\s*[^)]*)"address" \(\s*ipaddr\s*\)/) { "#{$1}ipaddr" }
%w[metric metric2 metric3 metric4 tag tag2 preference preference2 color color2 local-preference].each do |key|
str.gsub!(/^(\s*"#{key}" \(\s*c\(\s*c\(\s*)"#{key}" arg/) { "#{$1}arg" }
end
str.gsub!(/^(\s*"vrf-target" \(\s*)c\(\s*"community" arg,/) { "#{$1}ca(" }
str.gsub!(/^(\s*)"priority" \(\s*c\(\s*"setup-priority" arg,\s*"reservation-priority" arg\s*\)\s*\)/) { %[#{$1}a("priority", a(arg, arg)).as(:oneline)] }
%w[teardown hold-time stub].each do |key|
str.gsub!(/^(\s*"#{key}" \(\s*)c\(/) { "#{$1}sc(" }
end
%w[file confederation].each do |key|
str.gsub!(/^(\s*"#{key}" \(\s*)c\(\s*arg,/) { "#{$1}sca(" }
end
str.gsub!(/^(\s*)"inline-services"/) do
format(['"inline-services" (',
' "bandwidth" ("1g" | "10g")',
')'], $1)
end
str.gsub!(/^(\s*)"ieee-802.3ad" \(\s*c\(\s*"lacp" \(\s*c\(/) do
format(['"802.3ad" (',
' ca(',
' "lacp" (',
' c(',
' "force-up",'], $1)
end
str.gsub!(/^(\s*)"as-path" \(\s*c\(\s*"path" arg,/) do
format(['"as-path" (',
' ca('], $1)
end
str.gsub!(/^(\s*)"as-path" arg \(\s*c\(\s*"path" arg\s*\)/) do
format(['"as-path" arg (',
' c(',
' quote,',
' arg',
' )'], $1)
end
str.gsub!(/^(\s*)"ribgroup-name" arg$/) do
format(['arg (',
' arg',
')'], $1)
end
str.gsub!(/^rule\(:regular_expression\) do\s*((?!end).)*\s*end/) do
format(['rule(:regular_expression) do',
' (quote | arg).as(:arg)',
'end'])
end
str.gsub!(/^rule\(:login_user_object\) do\s*arg\.as\(:arg\) \(\s*c\(\s*"full-name" arg,/) do
format(['rule(:login_user_object) do',
' arg.as(:arg) (',
' sc(',
' "full-name" (quote | arg),'])
end
str.gsub!(/(rule\(:juniper_policy_options\) do\s*)c\(/) { "#{$1}c(" }
str.gsub!(/(rule\(:control_route_filter_type\) do\s*)s\(\s*arg,/) { "#{$1}b(" }
str.gsub!(/(rule\(:control_source_address_filter_type\) do\s*)s\(\s*arg,/) { "#{$1}b(" }
str.gsub!(/^(rule\(:trace_file_type\) do\s*)c\(\s*arg,/) { "#{$1}sca(" }
str.gsub!(/^(rule\(:archive_object\) do\s*)c\(/) { "#{$1}sc(" }
str.gsub!(/^(\s*)c\(\s*arg,$/) { "#{$1}ca(" }
str
end
def omit_label(str, label, content)
str.gsub(/(\s*)"#{label}" \(\s*#{content}\s*\)/) { "#{$1}#{content}" }
end
def format(str, offset=OFFSET)
case str
when String
str.empty? ? '' : offset + str
when Array
str.map {|s| s.empty? ? '' : offset + s.to_s }.join("\n")
end
end
def rule_header
<<-EOS
require 'parslet'
module Junoser
class Parser < Parslet::Parser
# block with children maybe
def b(object, *children)
children.inject(object) {|rule, child| rule.as(:label) >> (space >> child.as(:child) | eos) }
end
# with an argument, and children maybe
def a(object, arg, *children)
b(object.as(:statement) >> space >> arg.as(:argument), *children)
end
# choice
def c(*objects)
objects.inject {|rule, object| rule | object }
end
def ca(*objects)
objects.inject {|rule, object| rule | object } | arg
end
# sequence
def s(*objects)
# TODO: eval "minOccurs" attribute of choice element
objects.inject {|rule, object| rule >> (space >> object).maybe }
end
# sequential choice
def sc(*objects)
(c(*objects) >> space.maybe).repeat(0)
end
def sca(*objects)
(c(*objects, arg) >> space.maybe).repeat(0)
end
rule(:arg) { match('\\S').repeat(1) }
rule(:space) { match('\\s').repeat(1) }
rule(:any) { match('.').repeat(1) }
rule(:eos) { match('$') }
rule(:dotted) { match('[^. \\t\\n\\r\\f]').repeat(1) >> str('.') >> match('[^. \\t\\n\\r\\f]').repeat(1) }
rule(:quote) { str('"') >> match('[^"]').repeat(1) >> str('"') }
rule(:address) { match('[0-9a-fA-F:\.]').repeat(1) }
rule(:prefix ) { address >> (str('/') >> match('[0-9]').repeat(1)).maybe }
root(:set)
rule(:set) { (str('set') | str('deactivate')) >> space >> configuration.as(:config) >> comment.maybe }
rule(:comment) { space.maybe >> (hash_comment | slash_asterisk) }
rule(:hash_comment) { str('#') >> any.maybe }
rule(:slash_asterisk) { str('/*') >> match('(?!\\*\\/).').repeat(0) >> str('*/') }
EOS
end
def rule_footer
<<-EOS
end
end
EOS
end
end
end
|
module AttributeFu
module Associations #:nodoc:
def self.included(base) #:nodoc:
base.class_eval do
extend ClassMethods
class << self; alias_method_chain :has_many, :association_option; end
class_inheritable_accessor :managed_association_attributes
write_inheritable_attribute :managed_association_attributes, {}
after_update :save_managed_associations
end
end
def method_missing(method_name, *args) #:nodoc:
if method_name.to_s =~ /.+?\_attributes=/
association_name = method_name.to_s.gsub '_attributes=', ''
association = managed_association_attributes.keys.detect { |element| element == association_name.to_sym } || managed_association_attributes.keys.detect { |element| element == association_name.pluralize.to_sym }
unless association.nil?
has_many_attributes association, args.first
return
end
end
super
end
private
def has_many_attributes(association_id, attributes) #:nodoc:
association = send(association_id)
attributes = {} unless attributes.is_a? Hash
attributes.symbolize_keys!
if attributes.has_key?(:new)
new_attrs = attributes.delete(:new)
new_attrs = new_attrs.sort do |a,b|
value = lambda { |i| i < 0 ? i.abs + new_attrs.length : i }
value.call(a.first.to_i) <=> value.call(b.first.to_i)
end
new_attrs.each { |i, new_attrs| association.build new_attrs }
end
attributes.stringify_keys!
instance_variable_set removal_variable_name(association_id), association.reject { |object| object.new_record? || attributes.has_key?(object.id.to_s) }.map(&:id)
attributes.each do |id, object_attrs|
object = association.detect { |associated| associated.id.to_s == id }
object.attributes = object_attrs unless object.nil?
end
# discard blank attributes if discard_if proc exists
unless (discard = managed_association_attributes[association_id][:discard_if]).nil?
association.reject! { |object| object.new_record? && discard.call(object) }
association.delete(*association.select { |object| discard.call(object) })
end
end
def save_managed_associations #:nodoc:
managed_association_attributes.keys.each do |association_id|
association = send(association_id)
association.each(&:save)
unless (objects_to_remove = instance_variable_get removal_variable_name(association_id)).nil?
objects_to_remove.each { |remove_id| association.delete association.detect { |obj| obj.id.to_s == remove_id.to_s } }
instance_variable_set removal_variable_name(association_id), nil
end
end
end
def removal_variable_name(association_id) #:nodoc:
"@#{association_id.to_s.pluralize}_to_remove"
end
module ClassMethods
# Behaves identically to the regular has_many, except adds the option <tt>:attributes</tt>, which, if true, creates
# a method called association_id_attributes (i.e. task_attributes, or comment_attributes) for setting the attributes
# of a collection of associated models. It also adds the option <tt>:discard_if</tt>, which accepts a proc. If the proc
# evaluates to true, the child model will be discarded.
#
# e.g.
#
# :discard_if => proc { |comment| comment.title.blank? }
#
# The format is as follows:
#
# @project.task_attributes = {
# @project.tasks.first.id => {:title => "A new title for an existing task"},
# :new => {
# "0" => {:title => "A new task"}
# }
# }
#
# Any existing tasks that are not present in the attributes hash will be removed from the association when the (parent) model
# is saved.
#
def has_many_with_association_option(association_id, options = {}, &extension)
unless (config = options.delete(:attributes)).nil?
managed_association_attributes[association_id] = {}
if options.has_key?(:discard_if)
discard_if = options.delete(:discard_if)
discard_if = discard_if.to_proc if discard_if.is_a?(Symbol)
managed_association_attributes[association_id][:discard_if] = discard_if
end
end
has_many_without_association_option(association_id, options, &extension)
end
end
end # Associations
end # AttributeFu
update the has_many_with_association_option to reflect discard_if accepting a symbol
git-svn-id: d855b23fe57f630cf8ea5bf309b9e2fd69bac385@69 80b79608-713f-0410-8737-d8c0d0c1b50c
module AttributeFu
module Associations #:nodoc:
def self.included(base) #:nodoc:
base.class_eval do
extend ClassMethods
class << self; alias_method_chain :has_many, :association_option; end
class_inheritable_accessor :managed_association_attributes
write_inheritable_attribute :managed_association_attributes, {}
after_update :save_managed_associations
end
end
def method_missing(method_name, *args) #:nodoc:
if method_name.to_s =~ /.+?\_attributes=/
association_name = method_name.to_s.gsub '_attributes=', ''
association = managed_association_attributes.keys.detect { |element| element == association_name.to_sym } || managed_association_attributes.keys.detect { |element| element == association_name.pluralize.to_sym }
unless association.nil?
has_many_attributes association, args.first
return
end
end
super
end
private
def has_many_attributes(association_id, attributes) #:nodoc:
association = send(association_id)
attributes = {} unless attributes.is_a? Hash
attributes.symbolize_keys!
if attributes.has_key?(:new)
new_attrs = attributes.delete(:new)
new_attrs = new_attrs.sort do |a,b|
value = lambda { |i| i < 0 ? i.abs + new_attrs.length : i }
value.call(a.first.to_i) <=> value.call(b.first.to_i)
end
new_attrs.each { |i, new_attrs| association.build new_attrs }
end
attributes.stringify_keys!
instance_variable_set removal_variable_name(association_id), association.reject { |object| object.new_record? || attributes.has_key?(object.id.to_s) }.map(&:id)
attributes.each do |id, object_attrs|
object = association.detect { |associated| associated.id.to_s == id }
object.attributes = object_attrs unless object.nil?
end
# discard blank attributes if discard_if proc exists
unless (discard = managed_association_attributes[association_id][:discard_if]).nil?
association.reject! { |object| object.new_record? && discard.call(object) }
association.delete(*association.select { |object| discard.call(object) })
end
end
def save_managed_associations #:nodoc:
managed_association_attributes.keys.each do |association_id|
association = send(association_id)
association.each(&:save)
unless (objects_to_remove = instance_variable_get removal_variable_name(association_id)).nil?
objects_to_remove.each { |remove_id| association.delete association.detect { |obj| obj.id.to_s == remove_id.to_s } }
instance_variable_set removal_variable_name(association_id), nil
end
end
end
def removal_variable_name(association_id) #:nodoc:
"@#{association_id.to_s.pluralize}_to_remove"
end
module ClassMethods
# Behaves identically to the regular has_many, except adds the option <tt>:attributes</tt>, which, if true, creates
# a method called association_id_attributes (i.e. task_attributes, or comment_attributes) for setting the attributes
# of a collection of associated models.
#
# It also adds the option <tt>:discard_if</tt>, which accepts a proc or a symbol. If the proc evaluates to true, the
# child model will be discarded. The symbol is sent as a message to the child model instance; if it returns true,
# the child model will be discarded.
#
# e.g.
#
# :discard_if => proc { |comment| comment.title.blank? }
# or
# :discard_if => :blank? # where blank is defined in Comment
#
#
# The format is as follows:
#
# @project.task_attributes = {
# @project.tasks.first.id => {:title => "A new title for an existing task"},
# :new => {
# "0" => {:title => "A new task"}
# }
# }
#
# Any existing tasks that are not present in the attributes hash will be removed from the association when the (parent) model
# is saved.
#
def has_many_with_association_option(association_id, options = {}, &extension)
unless (config = options.delete(:attributes)).nil?
managed_association_attributes[association_id] = {}
if options.has_key?(:discard_if)
discard_if = options.delete(:discard_if)
discard_if = discard_if.to_proc if discard_if.is_a?(Symbol)
managed_association_attributes[association_id][:discard_if] = discard_if
end
end
has_many_without_association_option(association_id, options, &extension)
end
end
end # Associations
end # AttributeFu
|
another new file
require 'pp'
$LOAD_PATH.unshift File.expand_path('./lib', File.dirname(__FILE__))
require 'active_record'
require 'models'
require 'scan/snmp'
require 'scan/passive'
#ActiveRecord::Base.establish_connection(
# :adapter => 'sqlite3',
# :database => './jarvis.sqlite'
#)
ActiveRecord::Base.establish_connection(
adapter: 'postgresql',
database: 'jarvis',
username: 'postgres'
)
snmp_scanner = Scan::SNMP.new
snmp_scanner.perform
passive_scanner = Scan::Passive.new
passive_scanner.perform
|
# vim: syntax=ruby:expandtab:shiftwidth=2:softtabstop=2:tabstop=2
# Copyright 2013-present Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'between_meals/cmd'
require 'tempfile'
module BetweenMeals
class Repo
class Hg < BetweenMeals::Repo
class Cmd < BetweenMeals::Cmd
def rev(rev)
cmd("log -r #{rev}")
end
def log(template, rev = '.')
cmd("log -r #{rev} -l 1 -T '{#{template}}'")
end
def clone(url, repo_path)
cmd("clone #{url} #{repo_path}")
end
def pull
cmd('pull --rebase')
end
def manifest
cmd('manifest')
end
def username
cmd('config ui.username')
end
def amend(msg)
f = Tempfile.new('between_meals.hg.amend')
begin
f.write(msg)
cmd("commit --amend -l #{f.path}")
ensure
f.close
f.unlink
end
end
def status(start_ref = nil, end_ref = nil)
if start_ref && end_ref
cmd("status --rev #{start_ref} --rev #{end_ref}")
elsif start_ref
cmd("status --rev #{start_ref}")
else
cmd('status')
end
end
end
end
end
end
Fix hg amend
File cannot be empty, otherwise hg kicks off vi interoactive session, in the background, which just hangs.
# vim: syntax=ruby:expandtab:shiftwidth=2:softtabstop=2:tabstop=2
# Copyright 2013-present Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'between_meals/cmd'
require 'tempfile'
module BetweenMeals
class Repo
class Hg < BetweenMeals::Repo
class Cmd < BetweenMeals::Cmd
def rev(rev)
cmd("log -r #{rev}")
end
def log(template, rev = '.')
cmd("log -r #{rev} -l 1 -T '{#{template}}'")
end
def clone(url, repo_path)
cmd("clone #{url} #{repo_path}")
end
def pull
cmd('pull --rebase')
end
def manifest
cmd('manifest')
end
def username
cmd('config ui.username')
end
def amend(msg)
f = Tempfile.new('between_meals.hg.amend')
begin
f.write(msg)
f.flush
cmd("commit --amend -l #{f.path}")
ensure
f.close
f.unlink
end
end
def status(start_ref = nil, end_ref = nil)
if start_ref && end_ref
cmd("status --rev #{start_ref} --rev #{end_ref}")
elsif start_ref
cmd("status --rev #{start_ref}")
else
cmd('status')
end
end
end
end
end
end
|
require "blinkbox/common_messaging/version"
require "bunny"
require "uri"
require "active_support/core_ext/hash/keys"
require "active_support/core_ext/hash/deep_merge"
require "active_support/core_ext/string/inflections"
require "ruby_units"
require "forwardable"
require "json-schema"
require "securerandom"
require "logger"
require "blinkbox/common_messaging/header_detectors"
module Blinkbox
# A group of methods and classes which enable the delivery of messages through the
# blinkbox Books ecosystem via AMQP.
#
# `CommonMessaging.configure!` should be used to set up connection details first, then
# every subsequent call to `CommonMessaging::Queue.new` will create a `Bunny::Queue` object
# using the connection details that were present at the time.
module CommonMessaging
# The default RabbitMQ connection details, in the format that Bunny needs them.
DEFAULT_CONFIG = {
bunny: {
host: "localhost",
port: 5672,
user: "guest",
pass: "guest",
vhost: "/",
log_level: Logger::WARN,
automatically_recover: true,
threaded: true,
continuation_timeout: 4000
},
retry_interval: {
initial: Unit("5 seconds"),
max: Unit("5 seconds")
},
logger: Logger.new(nil)
}
# This method only stores connection details for calls to `CommonMessaging::Queue.new`.
# Any queues already created will not be affected by subsequent calls to this method.
#
# This method converts the given options from the blinkbox Books common config format
# to the format required for Bunny so that calls like the following are possible:
#
# @example Using with CommonConfig
# require "blinkbox/common_config"
# require "blinkbox/common_messaging"
#
# config = Blinkbox::CommonConfig.new
# Blinkbox::CommonMessaging.configure!(config.tree(:rabbitmq))
#
# @param [Hash] config The configuration options needed for an MQ connection.
# @option config [String] :url The URL to the RabbitMQ server, eg. amqp://user:pass@host.name:1234/virtual_host
# @option config [Unit] :initialRetryInterval The interval at which re-connection attempts should be made when a RabbitMQ failure first occurs.
# @option config [Unit] :maxRetryInterval The maximum interval at which RabbitMQ reconnection attempts should back off to.
# @param [#debug, #info, #warn, #error, #fatal] logger The logger instance which should be used by Bunny
def self.configure!(config, logger = nil)
@@config = DEFAULT_CONFIG
unless config[:url].nil?
uri = URI.parse(config[:url])
@@config.deep_merge!(
bunny: {
host: uri.host,
port: uri.port,
user: uri.user,
pass: uri.password,
vhost: uri.path
}
)
end
%i{initialRetryInterval maxRetryInterval}.each do |unit_key|
if config[unit_key]
config[unit_key] = Unit(config[unit_key]) unless config[unit_key].is_a?(Unit)
@@config.deep_merge!(
retry_interval: {
unit_key.to_s.sub('RetryInterval', '').to_sym => config[unit_key]
}
)
end
end
self.logger = logger unless logger.nil?
end
# Returns the current config being used (as used by Bunny)
#
# @return [Hash]
def self.config
@@config rescue DEFAULT_CONFIG
end
# Sets the logger delivered to Bunny when new connections are made
#
# @param [] logger The object to which log messages should be sent.
def self.logger=(logger)
%i{debug info warn error fatal level= level}.each do |m|
raise ArgumentError, "The logger did not respond to '#{m}'" unless logger.respond_to?(m)
end
@@config[:logger] = logger
end
# Returns (and starts if necessary) the connection to the RabbitMQ server as specified by the current
# config. Will keep only one connection per configuration at any time and will return or create a new connection
# as necessary. Channels are created with publisher confirmations.
#
# Application code should not need to use this method.
#
# @return [Bunny::Session]
def self.connection
@@connections ||= {}
@@connections[config] ||= Bunny.new(config[:bunny])
@@connections[config].start
@@connections[config]
end
# Blocks until all the open connections have been closed, calling the block with any message_ids which haven't been delivered
#
# @param [Boolean] block_until_confirms Force the method to block until all messages have been acked or nacked.
# @yield [message_id] Calls the given block for any message that was undeliverable (if block_until_confirms was `true`)
# @yieldparam [String] message_id The message_id of the message which could not be delivered
def self.close_connections(block_until_confirms: true)
@@connections.each do |k, c|
if block_until_confirms && !c.wait_for_confirms
c.nacked_set.each do |message_id|
yield message_id if block_given?
end
end
c.close
end
end
# A proxy class for generating queues and binding them to exchanges using Bunny. In the
# format expected from blinkbox Books services.
class Queue
extend Forwardable
def_delegators :@queue, :status
# Create a queue object for subscribing to messages with.
#
# NB. There is no way to know what bindings have already been made for a queue, so all code
# subscribing to a queue should cope with receiving messages it's not expecting.
#
# @param [String] queue_name The name of the queue which should be used and (if necessary) created.
# @param [String] exchange The name of the Exchange to bind to. The default value should be avoided for production uses.
# @param [String] dlx The name of the Dead Letter Exchange to send nacked messages to.
# @param [Array,Hash] bindings An array of hashes, each on detailing the parameters for a new binding.
# @raise [Bunny::NotFound] If the exchange does not exist.
# @return [Bunny::Queue] A blinkbox managed Bunny Queue object
def initialize(queue_name, exchange: "amq.headers", dlx: "#{exchange}.DLX", bindings: [])
connection = CommonMessaging.connection
@logger = CommonMessaging.config[:logger]
# We create one channel per queue because it means that any issues are isolated
# and we can start a new channel and resume efforts in a segregated manner.
@channel = connection.create_channel
@queue = @channel.queue(
queue_name,
durable: true,
auto_delete: false,
exclusive: false,
arguments: {
"x-dead-letter-exchange" => dlx
}
)
@exchange = @channel.headers(
exchange,
durable: true,
auto_delete: false,
passive: true
)
Kernel.warn "No bindings were given, the queue is unlikely to receive any messages" if bindings.empty?
bindings.each do |binding|
@queue.bind(@exchange, arguments: binding)
end
end
# Defines a new block for handling exceptions which occur when processing an incoming message. Cases where this might occur include:
#
# * A message which doesn't have a recognised content-type (ie. one which has been 'init'ed)
# * An invalid JSON message
# * A valid JSON message which doesn't pass schema validation
#
# @example Sending excepted messages to a log, then nack them
# log = Logger.new(STDOUT)
# queue = Blinkbox::CommonMessaging::Queue.new("My.Queue")
# queue.on_exception do |e, delivery_info, metadata, payload|
# log.error e
# channel.reject(delivery_info[:delivery_tag], false)
# end
#
# @yield [exception, channel, delivery)info, metadata, payload] Yields for each exception which occurs.
# @yieldparam [Exception] exception The exception which was raised.
# @yieldparam [Bunny::Connection] exception The channel this exchnage is using (useful for nacking).
# @yieldparam [Hash] delivery_info The RabbitMQ delivery info for the message (useful for nacking).
# @yieldparam [Hash] metadata The metadata delivered from the RabbitMQ server (parameters and headers).
# @yieldparam [String] payload The message that was received
def on_exception(&block)
raise ArgumentError, "Please specify a block to call when an exception is raised" unless block_given?
@on_exception = block
end
# Emits the metadata and objectified payload for every message which appears on the queue. Any message with a content-type
# not 'init'ed will be rejected (without retry) automatically.
#
# * Returning `true` or `:ack` from the block will acknowledge and remove the message from the queue
# * Returning `false` or `:reject` from the block will send the message to the DLQ
# * Returning `:retry` will put the message back on the queue to be tried again later.
#
# @example Subscribing to messages
# queue = Blinkbox::CommonMessaging::Queue.new("catch-all", exchange_name: "Marvin", [{}])
# queue.subscribe(block:true) do |metadata, obj|
# puts "Messge received."
# puts "Headers: #{metadata[:headers].to_json}"
# puts "Body: #{obj.to_json}"
# end
#
# @param [Hash] options Options sent to Bunny's subscribe method
# @option options [Boolean] :block Should this method block while being executed (true, default) or spawn a new thread? (false)
# @yield [metadata, payload_object] A block to execute for each message which is received on this queue.
# @yieldparam metadata [Hash] The properties and headers (in [:headers]) delivered with the message.
# @yieldparam payload_object [Blinkbox::CommonMessaging::JsonSchemaPowered] An object representing the validated JSON payload.
# @yieldreturn [Boolean, :ack, :reject, :retry]
def subscribe(options = {})
raise ArgumentError, "Please give a block to run when a message is received" unless block_given?
@queue.subscribe(
block: options[:block] || true,
manual_ack: true
) do |delivery_info, metadata, payload|
begin
klass = Blinkbox::CommonMessaging.class_from_content_type(metadata[:headers]['content-type'])
object = klass.new(JSON.parse(payload))
response = yield metadata, object
case response
when :ack, true
@channel.ack(delivery_info[:delivery_tag])
when :reject, false
@channel.reject(delivery_info[:delivery_tag], false)
when :retry
@channel.reject(delivery_info[:delivery_tag], true)
else
fail "Unknown response from subscribe block: #{response}"
end
rescue Exception => e
(@on_exception || method(:default_on_exception)).call(e, @channel, delivery_info, metadata, payload)
end
end
end
private
# The default handler for exceptions which occur when processing a message.
def default_on_exception(exception, channel, delivery_info, metadata, payload)
@logger.error exception
channel.reject(delivery_info[:delivery_tag], false)
end
end
class Exchange
extend Forwardable
def_delegators :@exchange, :on_return
# A wrapped class for Bunny::Exchange. Wrapped so we can take care of message validation and header
# conventions in the blinkbox Books format.
#
# @param [String] exchange_name The name of the Exchange to connect to.
# @param [String] facility The name of the app or service (we've adopted the GELF naming term across ruby)
# @param [String] facility_version The version of the app or service which sent the message.
# @raise [Bunny::NotFound] If the exchange does not exist.
def initialize(exchange_name, facility: File.basename($0, '.rb'), facility_version: "0.0.0-unknown")
@app_id = "#{facility}:v#{facility_version}"
connection = CommonMessaging.connection
channel = connection.create_channel
channel.confirm_select
@exchange = channel.headers(
exchange_name,
durable: true,
auto_delete: false,
passive: true
)
end
# Publishes a message to the exchange with blinkbox Books default message headers and properties.
#
# Worth noting that because of a quirk of the RabbitMQ Headers Exchange you cannot route on properties
# so, in order to facilitate routing on content-type, that key is written to the headers by default as
# well as to the properties.
#
# @param [Blinkbox::CommonMessaging::JsonSchemaPowered] data The information which will be sent as the payload of the message. An instance of any class generated by Blinkbox::CommonMessaging.init_from_schema_at.
# @param [Hash] headers A hash of string keys and string values which will be sent as headers with the message. Used for matching.
# @param [Array<String>] message_id_chain Optional. The message_id_chain of the message which was received in order to prompt this one.
# @param [Boolean] confirm Will block this method until the MQ server has confirmed the message has been persisted and routed.
# @return [String] The correlation_id of the message which was delivered.
def publish(data, headers: {}, message_id_chain: [], confirm: true)
raise ArgumentError, "All published messages must be validated. Please see Blinkbox::CommonMessaging.init_from_schema_at for details." unless data.class.included_modules.include?(JsonSchemaPowered)
raise ArgumentError, "message_id_chain must be an array of strings" unless message_id_chain.is_a?(Array)
message_id = generate_message_id
message_id_chain = message_id_chain.dup << message_id
correlation_id = message_id_chain.first
hd = Blinkbox::CommonMessaging::HeaderDetectors.new(data)
@exchange.publish(
data.to_json,
persistent: true,
content_type: data.content_type,
correlation_id: correlation_id,
message_id: message_id,
app_id: @app_id,
timestamp: Time.now.to_i,
headers: hd.modified_headers({
"content-type" => data.content_type,
"message_id_chain" => message_id_chain
}.merge(headers))
)
if confirm && !@exchange.channel.wait_for_confirms
message_id = @exchange.channel.nacked_set.first
raise UndeliverableMessageError, "Message #{message_id} was returned as undeliverable by RabbitMQ."
end
message_id
end
private
def generate_message_id
SecureRandom.hex(8) # 8 generates a 16 byte string
end
end
module JsonSchemaPowered
extend Forwardable
def_delegators :@data, :responds_to?, :to_json, :[]
def method_missing(m, *args, &block)
@data.send(m, *args, &block)
end
def to_hash
@data
end
def to_s
classification_string = @data["classification"].map do |cl|
"#{cl["realm"]}:#{cl["id"]}"
end.join(", ")
"<#{self.class.name.split("::").last}: #{classification_string}>"
rescue
@data.to_json
end
end
class UndeliverableMessageError < RuntimeError; end
# Generates ruby classes representing blinkbox Books messages from the schema files at the
# given path.
#
# @example Initialising CommonMessaging for sending
# Blinkbox::CommonMessaging.init_from_schema_at("ingestion.book.metatdata.v2.schema.json")
# msg = Blinkbox::CommonMessaging::IngestionBookMetadataV2.new(title: "A title")
# exchange.publish(msg)
#
# @example Using the root path
# Blinkbox::CommonMessaging.init_from_schema_at("./schema/ingestion/book/metatdata/v2.schema.json")
# # => [Blinkbox::CommonMessaging::SchemaIngestionBookMetadataV2]
#
# Blinkbox::CommonMessaging.init_from_schema_at("./schema/ingestion/book/metatdata/v2.schema.json", "./schema")
# # => [Blinkbox::CommonMessaging::IngestionBookMetadataV2]
#
# @param [String] path The path to a (or a folder of) json-schema file(s) in the blinkbox Books format.
# @param [String] root The root path from which namespaces will be calculated.
# @return Array of class names generated
def self.init_from_schema_at(path, root = path)
fail "The path #{path} does not exist" unless File.exist?(path)
return Dir[File.join(path, "**/*.schema.json")].map { |file| init_from_schema_at(file, root) }.flatten if File.directory?(path)
root = File.dirname(root) if root =~ /\.schema\.json$/
schema_name = path.sub(%r{^(?:\./)?#{root}/?(.+)\.schema\.json$}, "\\1").tr("/",".")
class_name = class_name_from_schema_name(schema_name)
# We will re-declare these classes if required, rather than raise an error.
remove_const(class_name) if constants.include?(class_name.to_sym)
const_set(class_name, Class.new {
include JsonSchemaPowered
def initialize(data = {})
@data = data.stringify_keys
JSON::Validator.validate!(self.class.const_get("SCHEMA_FILE"), @data, insert_defaults: true)
end
def content_type
self.class.const_get("CONTENT_TYPE")
end
})
klass = const_get(class_name)
klass.const_set('CONTENT_TYPE', "application/vnd.blinkbox.books.#{schema_name}+json")
klass.const_set('SCHEMA_FILE', path)
klass
end
def self.class_from_content_type(content_type)
fail "No content type was given" if content_type.nil? || content_type.empty?
begin
schema_name = content_type.sub(%r{^application/vnd\.blinkbox\.books\.(.+)\+json$}, '\1')
const_get(class_name_from_schema_name(schema_name))
rescue
raise "The schema for the #{content_type} content type has not been loaded"
end
end
def self.class_name_from_schema_name(schema_name)
schema_name.tr("./", "_").camelcase
end
end
end
Improve variable names
require "blinkbox/common_messaging/version"
require "bunny"
require "uri"
require "active_support/core_ext/hash/keys"
require "active_support/core_ext/hash/deep_merge"
require "active_support/core_ext/string/inflections"
require "ruby_units"
require "forwardable"
require "json-schema"
require "securerandom"
require "logger"
require "blinkbox/common_messaging/header_detectors"
module Blinkbox
# A group of methods and classes which enable the delivery of messages through the
# blinkbox Books ecosystem via AMQP.
#
# `CommonMessaging.configure!` should be used to set up connection details first, then
# every subsequent call to `CommonMessaging::Queue.new` will create a `Bunny::Queue` object
# using the connection details that were present at the time.
module CommonMessaging
# The default RabbitMQ connection details, in the format that Bunny needs them.
DEFAULT_CONFIG = {
bunny: {
host: "localhost",
port: 5672,
user: "guest",
pass: "guest",
vhost: "/",
log_level: Logger::WARN,
automatically_recover: true,
threaded: true,
continuation_timeout: 4000
},
retry_interval: {
initial: Unit("5 seconds"),
max: Unit("5 seconds")
},
logger: Logger.new(nil)
}
# This method only stores connection details for calls to `CommonMessaging::Queue.new`.
# Any queues already created will not be affected by subsequent calls to this method.
#
# This method converts the given options from the blinkbox Books common config format
# to the format required for Bunny so that calls like the following are possible:
#
# @example Using with CommonConfig
# require "blinkbox/common_config"
# require "blinkbox/common_messaging"
#
# config = Blinkbox::CommonConfig.new
# Blinkbox::CommonMessaging.configure!(config.tree(:rabbitmq))
#
# @param [Hash] config The configuration options needed for an MQ connection.
# @option config [String] :url The URL to the RabbitMQ server, eg. amqp://user:pass@host.name:1234/virtual_host
# @option config [Unit] :initialRetryInterval The interval at which re-connection attempts should be made when a RabbitMQ failure first occurs.
# @option config [Unit] :maxRetryInterval The maximum interval at which RabbitMQ reconnection attempts should back off to.
# @param [#debug, #info, #warn, #error, #fatal] logger The logger instance which should be used by Bunny
def self.configure!(config, logger = nil)
@@config = DEFAULT_CONFIG
unless config[:url].nil?
uri = URI.parse(config[:url])
@@config.deep_merge!(
bunny: {
host: uri.host,
port: uri.port,
user: uri.user,
pass: uri.password,
vhost: uri.path
}
)
end
%i{initialRetryInterval maxRetryInterval}.each do |unit_key|
if config[unit_key]
config[unit_key] = Unit(config[unit_key]) unless config[unit_key].is_a?(Unit)
@@config.deep_merge!(
retry_interval: {
unit_key.to_s.sub('RetryInterval', '').to_sym => config[unit_key]
}
)
end
end
self.logger = logger unless logger.nil?
end
# Returns the current config being used (as used by Bunny)
#
# @return [Hash]
def self.config
@@config rescue DEFAULT_CONFIG
end
# Sets the logger delivered to Bunny when new connections are made
#
# @param [] logger The object to which log messages should be sent.
def self.logger=(logger)
%i{debug info warn error fatal level= level}.each do |m|
raise ArgumentError, "The logger did not respond to '#{m}'" unless logger.respond_to?(m)
end
@@config[:logger] = logger
end
# Returns (and starts if necessary) the connection to the RabbitMQ server as specified by the current
# config. Will keep only one connection per configuration at any time and will return or create a new connection
# as necessary. Channels are created with publisher confirmations.
#
# Application code should not need to use this method.
#
# @return [Bunny::Session]
def self.connection
@@connections ||= {}
@@connections[config] ||= Bunny.new(config[:bunny])
@@connections[config].start
@@connections[config]
end
# Blocks until all the open connections have been closed, calling the block with any message_ids which haven't been delivered
#
# @param [Boolean] block_until_confirms Force the method to block until all messages have been acked or nacked.
# @yield [message_id] Calls the given block for any message that was undeliverable (if block_until_confirms was `true`)
# @yieldparam [String] message_id The message_id of the message which could not be delivered
def self.close_connections(block_until_confirms: true)
@@connections.each do |k, c|
if block_until_confirms && !c.wait_for_confirms
c.nacked_set.each do |message_id|
yield message_id if block_given?
end
end
c.close
end
end
# A proxy class for generating queues and binding them to exchanges using Bunny. In the
# format expected from blinkbox Books services.
class Queue
extend Forwardable
def_delegators :@queue, :status
# Create a queue object for subscribing to messages with.
#
# NB. There is no way to know what bindings have already been made for a queue, so all code
# subscribing to a queue should cope with receiving messages it's not expecting.
#
# @param [String] queue_name The name of the queue which should be used and (if necessary) created.
# @param [String] exchange The name of the Exchange to bind to. The default value should be avoided for production uses.
# @param [String] dlx The name of the Dead Letter Exchange to send nacked messages to.
# @param [Array,Hash] bindings An array of hashes, each on detailing the parameters for a new binding.
# @raise [Bunny::NotFound] If the exchange does not exist.
# @return [Bunny::Queue] A blinkbox managed Bunny Queue object
def initialize(queue_name, exchange: "amq.headers", dlx: "#{exchange}.DLX", bindings: [])
connection = CommonMessaging.connection
@logger = CommonMessaging.config[:logger]
# We create one channel per queue because it means that any issues are isolated
# and we can start a new channel and resume efforts in a segregated manner.
@channel = connection.create_channel
@queue = @channel.queue(
queue_name,
durable: true,
auto_delete: false,
exclusive: false,
arguments: {
"x-dead-letter-exchange" => dlx
}
)
@exchange = @channel.headers(
exchange,
durable: true,
auto_delete: false,
passive: true
)
Kernel.warn "No bindings were given, the queue is unlikely to receive any messages" if bindings.empty?
bindings.each do |binding|
@queue.bind(@exchange, arguments: binding)
end
end
# Defines a new block for handling exceptions which occur when processing an incoming message. Cases where this might occur include:
#
# * A message which doesn't have a recognised content-type (ie. one which has been 'init'ed)
# * An invalid JSON message
# * A valid JSON message which doesn't pass schema validation
#
# @example Sending excepted messages to a log, then nack them
# log = Logger.new(STDOUT)
# queue = Blinkbox::CommonMessaging::Queue.new("My.Queue")
# queue.on_exception do |e, delivery_info, metadata, payload|
# log.error e
# channel.reject(delivery_info[:delivery_tag], false)
# end
#
# @yield [exception, channel, delivery)info, metadata, payload] Yields for each exception which occurs.
# @yieldparam [Exception] exception The exception which was raised.
# @yieldparam [Bunny::Connection] exception The channel this exchnage is using (useful for nacking).
# @yieldparam [Hash] delivery_info The RabbitMQ delivery info for the message (useful for nacking).
# @yieldparam [Hash] metadata The metadata delivered from the RabbitMQ server (parameters and headers).
# @yieldparam [String] payload The message that was received
def on_exception(&block)
raise ArgumentError, "Please specify a block to call when an exception is raised" unless block_given?
@on_exception = block
end
# Emits the metadata and objectified payload for every message which appears on the queue. Any message with a content-type
# not 'init'ed will be rejected (without retry) automatically.
#
# * Returning `true` or `:ack` from the block will acknowledge and remove the message from the queue
# * Returning `false` or `:reject` from the block will send the message to the DLQ
# * Returning `:retry` will put the message back on the queue to be tried again later.
#
# @example Subscribing to messages
# queue = Blinkbox::CommonMessaging::Queue.new("catch-all", exchange_name: "Marvin", [{}])
# queue.subscribe(block:true) do |metadata, obj|
# puts "Messge received."
# puts "Headers: #{metadata[:headers].to_json}"
# puts "Body: #{obj.to_json}"
# end
#
# @param [Hash] options Options sent to Bunny's subscribe method
# @option options [Boolean] :block Should this method block while being executed (true, default) or spawn a new thread? (false)
# @yield [metadata, payload_object] A block to execute for each message which is received on this queue.
# @yieldparam metadata [Hash] The properties and headers (in [:headers]) delivered with the message.
# @yieldparam payload_object [Blinkbox::CommonMessaging::JsonSchemaPowered] An object representing the validated JSON payload.
# @yieldreturn [Boolean, :ack, :reject, :retry]
def subscribe(options = {})
raise ArgumentError, "Please give a block to run when a message is received" unless block_given?
@queue.subscribe(
block: options[:block] || true,
manual_ack: true
) do |delivery_info, metadata, payload|
begin
klass = Blinkbox::CommonMessaging.class_from_content_type(metadata[:headers]['content-type'])
object = klass.new(JSON.parse(payload))
response = yield metadata, object
case response
when :ack, true
@channel.ack(delivery_info[:delivery_tag])
when :reject, false
@channel.reject(delivery_info[:delivery_tag], false)
when :retry
@channel.reject(delivery_info[:delivery_tag], true)
else
fail "Unknown response from subscribe block: #{response}"
end
rescue Exception => e
(@on_exception || method(:default_on_exception)).call(e, @channel, delivery_info, metadata, payload)
end
end
end
private
# The default handler for exceptions which occur when processing a message.
def default_on_exception(exception, channel, delivery_info, metadata, payload)
@logger.error exception
channel.reject(delivery_info[:delivery_tag], false)
end
end
class Exchange
extend Forwardable
def_delegators :@exchange, :on_return
# A wrapped class for Bunny::Exchange. Wrapped so we can take care of message validation and header
# conventions in the blinkbox Books format.
#
# @param [String] exchange_name The name of the Exchange to connect to.
# @param [String] facility The name of the app or service (we've adopted the GELF naming term across ruby)
# @param [String] facility_version The version of the app or service which sent the message.
# @raise [Bunny::NotFound] If the exchange does not exist.
def initialize(exchange_name, facility: File.basename($0, '.rb'), facility_version: "0.0.0-unknown")
@app_id = "#{facility}:v#{facility_version}"
connection = CommonMessaging.connection
channel = connection.create_channel
channel.confirm_select
@exchange = channel.headers(
exchange_name,
durable: true,
auto_delete: false,
passive: true
)
end
# Publishes a message to the exchange with blinkbox Books default message headers and properties.
#
# Worth noting that because of a quirk of the RabbitMQ Headers Exchange you cannot route on properties
# so, in order to facilitate routing on content-type, that key is written to the headers by default as
# well as to the properties.
#
# @param [Blinkbox::CommonMessaging::JsonSchemaPowered] data The information which will be sent as the payload of the message. An instance of any class generated by Blinkbox::CommonMessaging.init_from_schema_at.
# @param [Hash] headers A hash of string keys and string values which will be sent as headers with the message. Used for matching.
# @param [Array<String>] message_id_chain Optional. The message_id_chain of the message which was received in order to prompt this one.
# @param [Boolean] confirm Will block this method until the MQ server has confirmed the message has been persisted and routed.
# @return [String] The correlation_id of the message which was delivered.
def publish(data, headers: {}, message_id_chain: [], confirm: true)
raise ArgumentError, "All published messages must be validated. Please see Blinkbox::CommonMessaging.init_from_schema_at for details." unless data.class.included_modules.include?(JsonSchemaPowered)
raise ArgumentError, "message_id_chain must be an array of strings" unless message_id_chain.is_a?(Array)
message_id = generate_message_id
new_message_id_chain = message_id_chain.dup << message_id
correlation_id = new_message_id_chain.first
hd = Blinkbox::CommonMessaging::HeaderDetectors.new(data)
@exchange.publish(
data.to_json,
persistent: true,
content_type: data.content_type,
correlation_id: correlation_id,
message_id: message_id,
app_id: @app_id,
timestamp: Time.now.to_i,
headers: hd.modified_headers({
"content-type" => data.content_type,
"message_id_chain" => new_message_id_chain
}.merge(headers))
)
if confirm && !@exchange.channel.wait_for_confirms
message_id = @exchange.channel.nacked_set.first
raise UndeliverableMessageError, "Message #{message_id} was returned as undeliverable by RabbitMQ."
end
message_id
end
private
def generate_message_id
SecureRandom.hex(8) # 8 generates a 16 byte string
end
end
module JsonSchemaPowered
extend Forwardable
def_delegators :@data, :responds_to?, :to_json, :[]
def method_missing(m, *args, &block)
@data.send(m, *args, &block)
end
def to_hash
@data
end
def to_s
classification_string = @data["classification"].map do |cl|
"#{cl["realm"]}:#{cl["id"]}"
end.join(", ")
"<#{self.class.name.split("::").last}: #{classification_string}>"
rescue
@data.to_json
end
end
class UndeliverableMessageError < RuntimeError; end
# Generates ruby classes representing blinkbox Books messages from the schema files at the
# given path.
#
# @example Initialising CommonMessaging for sending
# Blinkbox::CommonMessaging.init_from_schema_at("ingestion.book.metatdata.v2.schema.json")
# msg = Blinkbox::CommonMessaging::IngestionBookMetadataV2.new(title: "A title")
# exchange.publish(msg)
#
# @example Using the root path
# Blinkbox::CommonMessaging.init_from_schema_at("./schema/ingestion/book/metatdata/v2.schema.json")
# # => [Blinkbox::CommonMessaging::SchemaIngestionBookMetadataV2]
#
# Blinkbox::CommonMessaging.init_from_schema_at("./schema/ingestion/book/metatdata/v2.schema.json", "./schema")
# # => [Blinkbox::CommonMessaging::IngestionBookMetadataV2]
#
# @param [String] path The path to a (or a folder of) json-schema file(s) in the blinkbox Books format.
# @param [String] root The root path from which namespaces will be calculated.
# @return Array of class names generated
def self.init_from_schema_at(path, root = path)
fail "The path #{path} does not exist" unless File.exist?(path)
return Dir[File.join(path, "**/*.schema.json")].map { |file| init_from_schema_at(file, root) }.flatten if File.directory?(path)
root = File.dirname(root) if root =~ /\.schema\.json$/
schema_name = path.sub(%r{^(?:\./)?#{root}/?(.+)\.schema\.json$}, "\\1").tr("/",".")
class_name = class_name_from_schema_name(schema_name)
# We will re-declare these classes if required, rather than raise an error.
remove_const(class_name) if constants.include?(class_name.to_sym)
const_set(class_name, Class.new {
include JsonSchemaPowered
def initialize(data = {})
@data = data.stringify_keys
JSON::Validator.validate!(self.class.const_get("SCHEMA_FILE"), @data, insert_defaults: true)
end
def content_type
self.class.const_get("CONTENT_TYPE")
end
})
klass = const_get(class_name)
klass.const_set('CONTENT_TYPE', "application/vnd.blinkbox.books.#{schema_name}+json")
klass.const_set('SCHEMA_FILE', path)
klass
end
def self.class_from_content_type(content_type)
fail "No content type was given" if content_type.nil? || content_type.empty?
begin
schema_name = content_type.sub(%r{^application/vnd\.blinkbox\.books\.(.+)\+json$}, '\1')
const_get(class_name_from_schema_name(schema_name))
rescue
raise "The schema for the #{content_type} content type has not been loaded"
end
end
def self.class_name_from_schema_name(schema_name)
schema_name.tr("./", "_").camelcase
end
end
end
|
require 'fog/compute/models/aws/server'
require 'fog/compute/models/aws/image'
module Fog
module AWS
class Compute::Server
def name
breeze_data['name'] || tags['Name']
end
def display_name
name || public_ip_address || "#{state} #{flavor_id} #{id}"
end
def running? ; current_state == 'running' ; end
def stopped? ; current_state == 'stopped' ; end
# Get or set meta data that is saved in a tag.
def breeze_data(new_values=nil)
if new_values
tags['breeze-data'] = new_values.map{ |k,v| v.nil? ? v : "#{k}:#{v}" }.compact.join(';')
# thor("server:tag:create #{id} breeze-data '#{tags['breeze-data']}'")
Breeze::Server::Tag.new.create(id, 'breeze-data', tags['breeze-data'])
else
Hash[tags['breeze-data'].to_s.split(';').map{ |s| s.split(':') }]
end
end
def spare_for_rollback!
breeze_state('spare_for_rollback')
end
def spare_for_rollback?
breeze_state == 'spare_for_rollback'
end
# Get or set the state tag.
def breeze_state(new_state=nil)
if new_state
breeze_data(breeze_data.merge('state' => new_state))
else
breeze_data['state']
end
end
private
def current_state
reload
state
end
end
class Compute::Image
def display_name
name or location
end
def full_type
"#{type}, #{architecture}, #{root_device_type}"
end
end
end
end
state in display_name for servers that are not running
require 'fog/compute/models/aws/server'
require 'fog/compute/models/aws/image'
module Fog
module AWS
class Compute::Server
def name
breeze_data['name'] || tags['Name']
end
def display_name
return "#{state}:#{name}" if name and state != 'running'
name || public_ip_address || "#{state} #{flavor_id} #{id}"
end
def running? ; current_state == 'running' ; end
def stopped? ; current_state == 'stopped' ; end
# Get or set meta data that is saved in a tag.
def breeze_data(new_values=nil)
if new_values
tags['breeze-data'] = new_values.map{ |k,v| v.nil? ? v : "#{k}:#{v}" }.compact.join(';')
# thor("server:tag:create #{id} breeze-data '#{tags['breeze-data']}'")
Breeze::Server::Tag.new.create(id, 'breeze-data', tags['breeze-data'])
else
Hash[tags['breeze-data'].to_s.split(';').map{ |s| s.split(':') }]
end
end
def spare_for_rollback!
breeze_state('spare_for_rollback')
end
def spare_for_rollback?
breeze_state == 'spare_for_rollback'
end
# Get or set the state tag.
def breeze_state(new_state=nil)
if new_state
breeze_data(breeze_data.merge('state' => new_state))
else
breeze_data['state']
end
end
private
def current_state
reload
state
end
end
class Compute::Image
def display_name
name or location
end
def full_type
"#{type}, #{architecture}, #{root_device_type}"
end
end
end
end
|
# frozen_string_literal: true
module BrowseEverything
VERSION = '0.16.1'
end
Updating the version to 1.0.0.RC1
# frozen_string_literal: true
module BrowseEverything
VERSION = '1.0.0.rc1'
end
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
BuildrPlus::FeatureManager.feature(:libs) do |f|
f.enhance(:Config) do
def mustache
%w(com.github.spullara.mustache.java:compiler:jar:0.8.15)
end
def javacsv
%w(net.sourceforge.javacsv:javacsv:jar:2.1)
end
def geotools_for_geolatte
%w(org.geotools:gt-main:jar:9.4 org.geotools:gt-metadata:jar:9.4 org.geotools:gt-api:jar:9.4 org.geotools:gt-epsg-wkt:jar:9.4 org.geotools:gt-opengis:jar:9.4 org.geotools:gt-transform:jar:9.4 org.geotools:gt-geometry:jar:9.4 org.geotools:gt-jts-wrapper:jar:9.4 org.geotools:gt-referencing:jar:9.4 net.java.dev.jsr-275:jsr-275:jar:1.0-beta-2 java3d:vecmath:jar:1.3.2 javax.media:jai_core:jar:1.1.3)
end
def jts
%w(com.vividsolutions:jts:jar:1.13)
end
# Support geo libraries for geolatte
def geolatte_support
self.jts + self.slf4j
end
def geolatte_geom
%w(org.geolatte:geolatte-geom:jar:0.13)
end
def geolatte_geom_jpa
%w(org.realityforge.geolatte.jpa:geolatte-geom-jpa:jar:0.2)
end
def findbugs_provided
%w(com.google.code.findbugs:jsr305:jar:3.0.0 com.google.code.findbugs:annotations:jar:3.0.0)
end
def ee_provided
%w(javax:javaee-api:jar:7.0) + self.findbugs_provided
end
def glassfish_embedded
%w(fish.payara.extras:payara-embedded-all:jar:4.1.1.171.1)
end
def eclipselink
'org.eclipse.persistence:eclipselink:jar:2.6.0'
end
def mockito
%w(org.mockito:mockito-all:jar:1.10.19)
end
def objenesis
%w(org.objenesis:objenesis:jar:2.5.1)
end
def powermock_version
'1.6.6'
end
def powermock_javaagent
"org.powermock:powermock-module-javaagent:jar:#{powermock_version}"
end
def powermock
%W(
org.powermock:powermock-core:jar:#{powermock_version}
org.powermock:powermock-reflect:jar:#{powermock_version}
org.powermock:powermock-module-testng-common:jar:#{powermock_version}
org.powermock:powermock-module-testng:jar:#{powermock_version}
org.powermock:powermock-api-mockito:jar:#{powermock_version}
org.powermock:powermock-api-mockito-common:jar:#{powermock_version}
org.powermock:powermock-api-support:jar:#{powermock_version}
org.javassist:javassist:jar:3.21.0-GA
org.powermock:powermock-module-testng-agent:jar:#{powermock_version}
#{powermock_javaagent}
) + self.objenesis
end
def jackson_annotations
%w(com.fasterxml.jackson.core:jackson-annotations:jar:2.5.4)
end
def jackson_core
%w(com.fasterxml.jackson.core:jackson-core:jar:2.5.4)
end
def jackson_databind
%w(com.fasterxml.jackson.core:jackson-databind:jar:2.5.4)
end
def jackson_gwt_support
self.jackson_core + self.jackson_databind + self.jackson_annotations
end
def jsinterop
%w(com.google.jsinterop:jsinterop-annotations:jar:1.0.1 com.google.jsinterop:jsinterop-annotations:jar:sources:1.0.1)
end
def gwt_user
%w(com.google.gwt:gwt-user:jar:2.8.0 org.w3c.css:sac:jar:1.3) + self.jsinterop
end
def gwt_servlet
%w(com.google.gwt:gwt-servlet:jar:2.8.0)
end
def gwt_dev
'com.google.gwt:gwt-dev:jar:2.8.0'
end
def javax_inject
%w(javax.inject:javax.inject:jar:1)
end
def gwt_gin
%w(com.google.gwt.inject:gin:jar:2.1.2) + self.javax_inject + self.guice + self.gwt_user
end
def gwt_property_source
%w(org.realityforge.gwt.property-source:gwt-property-source:jar:0.2)
end
def gwt_webpoller
%w(org.realityforge.gwt.webpoller:gwt-webpoller:jar:0.9.4)
end
def gwt_datatypes
%w(org.realityforge.gwt.datatypes:gwt-datatypes:jar:0.8)
end
def gwt_ga
%w(org.realityforge.gwt.ga:gwt-ga:jar:0.5)
end
def gwt_mmvp
%w(org.realityforge.gwt.mmvp:gwt-mmvp:jar:0.5)
end
def gwt_lognice
%w(org.realityforge.gwt.lognice:gwt-lognice:jar:0.4)
end
def gwt_appcache_client
%w(org.realityforge.gwt.appcache:gwt-appcache-client:jar:1.0.9 org.realityforge.gwt.appcache:gwt-appcache-linker:jar:1.0.9)
end
def gwt_appcache_server
%w(org.realityforge.gwt.appcache:gwt-appcache-server:jar:1.0.9)
end
# The appcache code required to exist on gwt path during compilation
def gwt_appcache
self.gwt_appcache_client + self.gwt_appcache_server
end
def gwt_cache_filter
%w(org.realityforge.gwt.cache-filter:gwt-cache-filter:jar:0.7)
end
def simple_session_filter
%w(org.realityforge.ssf:simple-session-filter:jar:0.7)
end
def field_filter
%w(org.realityforge.rest.field_filter:rest-field-filter:jar:0.4)
end
def rest_criteria
%w(org.realityforge.rest.criteria:rest-criteria:jar:0.9.5 org.antlr:antlr4-runtime:jar:4.3 org.antlr:antlr4-annotations:jar:4.3) + self.field_filter
end
def commons_logging
%w(commons-logging:commons-logging:jar:1.2)
end
def commons_codec
%w(commons-codec:commons-codec:jar:1.9)
end
def bouncycastle
%w(org.bouncycastle:bcprov-jdk15on:jar:1.52 org.bouncycastle:bcpkix-jdk15on:jar:1.52)
end
def proxy_servlet
self.httpclient + %w(org.realityforge.proxy-servlet:proxy-servlet:jar:0.2.0)
end
def httpclient
%w(org.apache.httpcomponents:httpclient:jar:4.5 org.apache.httpcomponents:httpcore:jar:4.4.1) +
self.commons_logging + self.commons_codec
end
def failsafe
%w(net.jodah:failsafe:jar:1.0.3)
end
def keycloak_gwt
%w(org.realityforge.gwt.keycloak:gwt-keycloak:jar:0.1)
end
def keycloak_domgen_support
%w(org.realityforge.keycloak.domgen:keycloak-domgen-support:jar:1.3)
end
def jboss_logging
%w(org.jboss.logging:jboss-logging:jar:3.3.0.Final)
end
def keycloak_core
%w(
org.keycloak:keycloak-core:jar:2.0.0.Final
org.keycloak:keycloak-common:jar:2.0.0.Final
) + self.bouncycastle
end
def keycloak
%w(
org.keycloak:keycloak-servlet-filter-adapter:jar:2.0.0.Final
org.keycloak:keycloak-adapter-spi:jar:2.0.0.Final
org.keycloak:keycloak-adapter-core:jar:2.0.0.Final
org.realityforge.org.keycloak:keycloak-servlet-adapter-spi:jar:2.0.0.Final
) + self.keycloak_core + self.keycloak_domgen_support + self.httpclient + self.jboss_logging
end
def replicant_version
'0.5.79'
end
def replicant_shared
%W(org.realityforge.replicant:replicant-shared:jar:#{replicant_version})
end
def replicant_client_common
%W(org.realityforge.replicant:replicant-client-common:jar:#{replicant_version}) + self.replicant_shared + self.gwt_webpoller + self.gwt_datatypes
end
def replicant_client_qa_support
%W(org.realityforge.replicant:replicant-client-qa-support:jar:#{replicant_version}) + self.guiceyloops_gwt
end
def replicant_ee_client
%W(org.realityforge.replicant:replicant-client-ee:jar:#{replicant_version}) + self.replicant_client_common
end
def replicant_gwt_client
%W(org.realityforge.replicant:replicant-client-gwt:jar:#{replicant_version}) + self.replicant_client_common + self.gwt_property_source
end
def replicant_server
%W(org.realityforge.replicant:replicant-server:jar:#{replicant_version}) + self.replicant_shared + self.simple_session_filter + self.gwt_rpc + self.field_filter
end
def gwt_rpc
self.gwt_datatypes + self.jackson_gwt_support + self.gwt_servlet
end
def guice
%w(aopalliance:aopalliance:jar:1.0 com.google.inject:guice:jar:3.0 com.google.inject.extensions:guice-assistedinject:jar:3.0)
end
def awaitility
%w(org.awaitility:awaitility:jar:2.0.0)
end
def testng_version
'6.11'
end
def testng
%W(org.testng:testng:jar:#{testng_version})
end
def jndikit
%w(org.realityforge.jndikit:jndikit:jar:1.4)
end
def guiceyloops
self.guiceyloops_gwt + self.glassfish_embedded
end
def guiceyloops_lib
'org.realityforge.guiceyloops:guiceyloops:jar:0.87'
end
def guiceyloops_gwt
[guiceyloops_lib] + self.mockito + self.guice + self.testng
end
def glassfish_timers_domain
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-domain:json:0.6)
end
def glassfish_timers_db
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-db:jar:0.6)
end
def slf4j
%w(org.slf4j:slf4j-api:jar:1.6.6 org.slf4j:slf4j-jdk14:jar:1.6.6)
end
def greenmail
%w(com.icegreen:greenmail:jar:1.4.1) + self.slf4j
end
def greenmail_server
'com.icegreen:greenmail-webapp:war:1.4.1'
end
def jtds
%w(net.sourceforge.jtds:jtds:jar:1.3.1)
end
def postgresql
%w(org.postgresql:postgresql:jar:9.2-1003-jdbc4)
end
def postgis
%w(org.postgis:postgis-jdbc:jar:1.3.3)
end
def db_drivers
return self.jtds if BuildrPlus::Db.mssql?
return self.postgresql + (BuildrPlus::FeatureManager.activated?(:geolatte) ? self.postgis : []) if BuildrPlus::Db.pgsql?
[]
end
end
end
Upgrade the version of gwt-datatypes
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
BuildrPlus::FeatureManager.feature(:libs) do |f|
f.enhance(:Config) do
def mustache
%w(com.github.spullara.mustache.java:compiler:jar:0.8.15)
end
def javacsv
%w(net.sourceforge.javacsv:javacsv:jar:2.1)
end
def geotools_for_geolatte
%w(org.geotools:gt-main:jar:9.4 org.geotools:gt-metadata:jar:9.4 org.geotools:gt-api:jar:9.4 org.geotools:gt-epsg-wkt:jar:9.4 org.geotools:gt-opengis:jar:9.4 org.geotools:gt-transform:jar:9.4 org.geotools:gt-geometry:jar:9.4 org.geotools:gt-jts-wrapper:jar:9.4 org.geotools:gt-referencing:jar:9.4 net.java.dev.jsr-275:jsr-275:jar:1.0-beta-2 java3d:vecmath:jar:1.3.2 javax.media:jai_core:jar:1.1.3)
end
def jts
%w(com.vividsolutions:jts:jar:1.13)
end
# Support geo libraries for geolatte
def geolatte_support
self.jts + self.slf4j
end
def geolatte_geom
%w(org.geolatte:geolatte-geom:jar:0.13)
end
def geolatte_geom_jpa
%w(org.realityforge.geolatte.jpa:geolatte-geom-jpa:jar:0.2)
end
def findbugs_provided
%w(com.google.code.findbugs:jsr305:jar:3.0.0 com.google.code.findbugs:annotations:jar:3.0.0)
end
def ee_provided
%w(javax:javaee-api:jar:7.0) + self.findbugs_provided
end
def glassfish_embedded
%w(fish.payara.extras:payara-embedded-all:jar:4.1.1.171.1)
end
def eclipselink
'org.eclipse.persistence:eclipselink:jar:2.6.0'
end
def mockito
%w(org.mockito:mockito-all:jar:1.10.19)
end
def objenesis
%w(org.objenesis:objenesis:jar:2.5.1)
end
def powermock_version
'1.6.6'
end
def powermock_javaagent
"org.powermock:powermock-module-javaagent:jar:#{powermock_version}"
end
def powermock
%W(
org.powermock:powermock-core:jar:#{powermock_version}
org.powermock:powermock-reflect:jar:#{powermock_version}
org.powermock:powermock-module-testng-common:jar:#{powermock_version}
org.powermock:powermock-module-testng:jar:#{powermock_version}
org.powermock:powermock-api-mockito:jar:#{powermock_version}
org.powermock:powermock-api-mockito-common:jar:#{powermock_version}
org.powermock:powermock-api-support:jar:#{powermock_version}
org.javassist:javassist:jar:3.21.0-GA
org.powermock:powermock-module-testng-agent:jar:#{powermock_version}
#{powermock_javaagent}
) + self.objenesis
end
def jackson_annotations
%w(com.fasterxml.jackson.core:jackson-annotations:jar:2.5.4)
end
def jackson_core
%w(com.fasterxml.jackson.core:jackson-core:jar:2.5.4)
end
def jackson_databind
%w(com.fasterxml.jackson.core:jackson-databind:jar:2.5.4)
end
def jackson_gwt_support
self.jackson_core + self.jackson_databind + self.jackson_annotations
end
def jsinterop
%w(com.google.jsinterop:jsinterop-annotations:jar:1.0.1 com.google.jsinterop:jsinterop-annotations:jar:sources:1.0.1)
end
def gwt_user
%w(com.google.gwt:gwt-user:jar:2.8.0 org.w3c.css:sac:jar:1.3) + self.jsinterop
end
def gwt_servlet
%w(com.google.gwt:gwt-servlet:jar:2.8.0)
end
def gwt_dev
'com.google.gwt:gwt-dev:jar:2.8.0'
end
def javax_inject
%w(javax.inject:javax.inject:jar:1)
end
def gwt_gin
%w(com.google.gwt.inject:gin:jar:2.1.2) + self.javax_inject + self.guice + self.gwt_user
end
def gwt_property_source
%w(org.realityforge.gwt.property-source:gwt-property-source:jar:0.2)
end
def gwt_webpoller
%w(org.realityforge.gwt.webpoller:gwt-webpoller:jar:0.9.4)
end
def gwt_datatypes
%w(org.realityforge.gwt.datatypes:gwt-datatypes:jar:0.9)
end
def gwt_ga
%w(org.realityforge.gwt.ga:gwt-ga:jar:0.5)
end
def gwt_mmvp
%w(org.realityforge.gwt.mmvp:gwt-mmvp:jar:0.5)
end
def gwt_lognice
%w(org.realityforge.gwt.lognice:gwt-lognice:jar:0.4)
end
def gwt_appcache_client
%w(org.realityforge.gwt.appcache:gwt-appcache-client:jar:1.0.9 org.realityforge.gwt.appcache:gwt-appcache-linker:jar:1.0.9)
end
def gwt_appcache_server
%w(org.realityforge.gwt.appcache:gwt-appcache-server:jar:1.0.9)
end
# The appcache code required to exist on gwt path during compilation
def gwt_appcache
self.gwt_appcache_client + self.gwt_appcache_server
end
def gwt_cache_filter
%w(org.realityforge.gwt.cache-filter:gwt-cache-filter:jar:0.7)
end
def simple_session_filter
%w(org.realityforge.ssf:simple-session-filter:jar:0.7)
end
def field_filter
%w(org.realityforge.rest.field_filter:rest-field-filter:jar:0.4)
end
def rest_criteria
%w(org.realityforge.rest.criteria:rest-criteria:jar:0.9.5 org.antlr:antlr4-runtime:jar:4.3 org.antlr:antlr4-annotations:jar:4.3) + self.field_filter
end
def commons_logging
%w(commons-logging:commons-logging:jar:1.2)
end
def commons_codec
%w(commons-codec:commons-codec:jar:1.9)
end
def bouncycastle
%w(org.bouncycastle:bcprov-jdk15on:jar:1.52 org.bouncycastle:bcpkix-jdk15on:jar:1.52)
end
def proxy_servlet
self.httpclient + %w(org.realityforge.proxy-servlet:proxy-servlet:jar:0.2.0)
end
def httpclient
%w(org.apache.httpcomponents:httpclient:jar:4.5 org.apache.httpcomponents:httpcore:jar:4.4.1) +
self.commons_logging + self.commons_codec
end
def failsafe
%w(net.jodah:failsafe:jar:1.0.3)
end
def keycloak_gwt
%w(org.realityforge.gwt.keycloak:gwt-keycloak:jar:0.1)
end
def keycloak_domgen_support
%w(org.realityforge.keycloak.domgen:keycloak-domgen-support:jar:1.3)
end
def jboss_logging
%w(org.jboss.logging:jboss-logging:jar:3.3.0.Final)
end
def keycloak_core
%w(
org.keycloak:keycloak-core:jar:2.0.0.Final
org.keycloak:keycloak-common:jar:2.0.0.Final
) + self.bouncycastle
end
def keycloak
%w(
org.keycloak:keycloak-servlet-filter-adapter:jar:2.0.0.Final
org.keycloak:keycloak-adapter-spi:jar:2.0.0.Final
org.keycloak:keycloak-adapter-core:jar:2.0.0.Final
org.realityforge.org.keycloak:keycloak-servlet-adapter-spi:jar:2.0.0.Final
) + self.keycloak_core + self.keycloak_domgen_support + self.httpclient + self.jboss_logging
end
def replicant_version
'0.5.79'
end
def replicant_shared
%W(org.realityforge.replicant:replicant-shared:jar:#{replicant_version})
end
def replicant_client_common
%W(org.realityforge.replicant:replicant-client-common:jar:#{replicant_version}) + self.replicant_shared + self.gwt_webpoller + self.gwt_datatypes
end
def replicant_client_qa_support
%W(org.realityforge.replicant:replicant-client-qa-support:jar:#{replicant_version}) + self.guiceyloops_gwt
end
def replicant_ee_client
%W(org.realityforge.replicant:replicant-client-ee:jar:#{replicant_version}) + self.replicant_client_common
end
def replicant_gwt_client
%W(org.realityforge.replicant:replicant-client-gwt:jar:#{replicant_version}) + self.replicant_client_common + self.gwt_property_source
end
def replicant_server
%W(org.realityforge.replicant:replicant-server:jar:#{replicant_version}) + self.replicant_shared + self.simple_session_filter + self.gwt_rpc + self.field_filter
end
def gwt_rpc
self.gwt_datatypes + self.jackson_gwt_support + self.gwt_servlet
end
def guice
%w(aopalliance:aopalliance:jar:1.0 com.google.inject:guice:jar:3.0 com.google.inject.extensions:guice-assistedinject:jar:3.0)
end
def awaitility
%w(org.awaitility:awaitility:jar:2.0.0)
end
def testng_version
'6.11'
end
def testng
%W(org.testng:testng:jar:#{testng_version})
end
def jndikit
%w(org.realityforge.jndikit:jndikit:jar:1.4)
end
def guiceyloops
self.guiceyloops_gwt + self.glassfish_embedded
end
def guiceyloops_lib
'org.realityforge.guiceyloops:guiceyloops:jar:0.87'
end
def guiceyloops_gwt
[guiceyloops_lib] + self.mockito + self.guice + self.testng
end
def glassfish_timers_domain
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-domain:json:0.6)
end
def glassfish_timers_db
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-db:jar:0.6)
end
def slf4j
%w(org.slf4j:slf4j-api:jar:1.6.6 org.slf4j:slf4j-jdk14:jar:1.6.6)
end
def greenmail
%w(com.icegreen:greenmail:jar:1.4.1) + self.slf4j
end
def greenmail_server
'com.icegreen:greenmail-webapp:war:1.4.1'
end
def jtds
%w(net.sourceforge.jtds:jtds:jar:1.3.1)
end
def postgresql
%w(org.postgresql:postgresql:jar:9.2-1003-jdbc4)
end
def postgis
%w(org.postgis:postgis-jdbc:jar:1.3.3)
end
def db_drivers
return self.jtds if BuildrPlus::Db.mssql?
return self.postgresql + (BuildrPlus::FeatureManager.activated?(:geolatte) ? self.postgis : []) if BuildrPlus::Db.pgsql?
[]
end
end
end
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
BuildrPlus::FeatureManager.feature(:libs) do |f|
f.enhance(:Config) do
def giggle
'org.realityforge.giggle:giggle-compiler:jar:all:0.13'
end
def mustache
%w(com.github.spullara.mustache.java:compiler:jar:0.9.6) + self.guava
end
def javacsv
%w(net.sourceforge.javacsv:javacsv:jar:2.1)
end
def geotools_for_geolatte
%w(org.geotools:gt-main:jar:9.4 org.geotools:gt-metadata:jar:9.4 org.geotools:gt-api:jar:9.4 org.geotools:gt-epsg-wkt:jar:9.4 org.geotools:gt-opengis:jar:9.4 org.geotools:gt-transform:jar:9.4 org.geotools:gt-geometry:jar:9.4 org.geotools:gt-jts-wrapper:jar:9.4 org.geotools:gt-referencing:jar:9.4 net.java.dev.jsr-275:jsr-275:jar:1.0-beta-2 java3d:vecmath:jar:1.3.2 javax.media:jai_core:jar:1.1.3)
end
def jts
%w(com.vividsolutions:jts:jar:1.13)
end
# Support geo libraries for geolatte
def geolatte_support
self.jts + self.slf4j
end
def geolatte_geom
%w(org.geolatte:geolatte-geom:jar:0.13)
end
def geolatte_geom_jpa
%w(org.realityforge.geolatte.jpa:geolatte-geom-jpa:jar:0.2)
end
def jetbrains_annotations
%w(org.realityforge.org.jetbrains.annotations:org.jetbrains.annotations:jar:1.6.0)
end
def javax_annotations
%w(org.realityforge.javax.annotation:javax.annotation:jar:1.0.1)
end
def spotbugs_provided
%w(com.github.spotbugs:spotbugs-annotations:jar:3.1.5 net.jcip:jcip-annotations:jar:1.0) + self.javax_annotations
end
def ee_provided
%w(javax:javaee-api:jar:8.0.1) + self.spotbugs_provided + self.jetbrains_annotations
end
def glassfish_embedded
%w(fish.payara.extras:payara-embedded-all:jar:5.2020.3)
end
def eclipselink
'org.eclipse.persistence:eclipselink:jar:2.7.4'
end
def mockito
%w(org.mockito:mockito-all:jar:1.10.19)
end
def jackson_annotations
%w(com.fasterxml.jackson.core:jackson-annotations:jar:2.10.4)
end
def jackson_core
%w(com.fasterxml.jackson.core:jackson-core:jar:2.10.4)
end
def jackson_databind
%w(com.fasterxml.jackson.core:jackson-databind:jar:2.10.4) + self.jackson_core + self.jackson_annotations
end
def jackson_datatype_jdk8
%w(com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar:2.9.9)
end
def jackson_datatype_jsr310
%w(com.fasterxml.jackson.datatype:jackson-datatype-jsr310:jar:2.9.9)
end
def braincheck
%w(org.realityforge.braincheck:braincheck-core:jar:1.31.0)
end
def jsinterop
%w(com.google.jsinterop:jsinterop-annotations:jar:2.0.0)
end
def jsinterop_base
%w(com.google.jsinterop:base:jar:1.0.0) + self.jsinterop
end
def gwt_user
%w(com.google.gwt:gwt-user:jar:2.9.0 org.w3c.css:sac:jar:1.3) + self.jsinterop
end
def gwt_servlet
%w(com.google.gwt:gwt-servlet:jar:2.9.0)
end
def gwt_dev
'com.google.gwt:gwt-dev:jar:2.9.0'
end
def javax_inject
%w(javax.inject:javax.inject:jar:1)
end
def javax_inject_gwt
%w(javax.inject:javax.inject:jar:sources:1) + self.javax_inject
end
def gwt_serviceworker
%w(org.realityforge.gwt.serviceworker:gwt-serviceworker-linker:jar:0.02)
end
def gwt_cache_filter
%w(org.realityforge.gwt.cache-filter:gwt-cache-filter:jar:0.9)
end
def field_filter
%w(org.realityforge.rest.field_filter:rest-field-filter:jar:0.4)
end
def timeservice
%w(org.realityforge.timeservice:timeservice:jar:0.02)
end
def graphql_java
%w(com.graphql-java:graphql-java:jar:13.0) + self.slf4j + self.antlr4_runtime + self.graphql_java_dataloader
end
def graphql_java_dataloader
%w(com.graphql-java:java-dataloader:jar:2.1.1 org.reactivestreams:reactive-streams:jar:1.0.2)
end
def graphql_java_servlet
%w(com.graphql-java-kickstart:graphql-java-servlet:jar:8.0.0) +
self.graphql_java +
self.jackson_annotations +
self.jackson_core +
self.jackson_databind +
self.jackson_datatype_jdk8 +
self.guava # Expected 24.1.1-jre
end
def graphql_java_scalars
%w(org.realityforge.graphql.scalars:graphql-java-scalars:jar:0.01)
end
def antlr4_runtime
%w(org.antlr:antlr4-runtime:jar:4.7.2)
end
def rest_criteria
%w(org.realityforge.rest.criteria:rest-criteria:jar:0.9.6) +
self.antlr4_runtime +
self.field_filter
end
def commons_logging
%w(commons-logging:commons-logging:jar:1.2)
end
def commons_codec
%w(commons-codec:commons-codec:jar:1.11)
end
def commons_io
%w(commons-io:commons-io:jar:1.3.1)
end
def bouncycastle
%w(org.bouncycastle:bcprov-jdk15on:jar:1.65 org.bouncycastle:bcpkix-jdk15on:jar:1.65)
end
def proxy_servlet
self.httpclient + %w(org.realityforge.proxy-servlet:proxy-servlet:jar:0.3.0)
end
def httpclient
%w(org.apache.httpcomponents:httpclient:jar:4.5.12 org.apache.httpcomponents:httpcore:jar:4.4.13) +
self.commons_logging + self.commons_codec
end
def failsafe
%w(net.jodah:failsafe:jar:1.0.3)
end
def keycloak_gwt
%w(org.realityforge.gwt.keycloak:gwt-keycloak:jar:0.12) + self.akasha
end
def keycloak_domgen_support
%w(org.realityforge.keycloak.domgen:keycloak-domgen-support:jar:1.5)
end
def keycloak_authfilter
%w(org.realityforge.keycloak.client.authfilter:keycloak-jaxrs-client-authfilter:jar:1.04)
end
def jboss_logging
%w(org.jboss.logging:jboss-logging:jar:3.4.1.Final)
end
def keycloak_core
%w(
org.keycloak:keycloak-core:jar:2.0.0.Final
org.keycloak:keycloak-common:jar:2.0.0.Final
) + self.bouncycastle
end
def keycloak
%w(
org.keycloak:keycloak-servlet-filter-adapter:jar:2.0.0.Final
org.keycloak:keycloak-adapter-spi:jar:2.0.0.Final
org.keycloak:keycloak-adapter-core:jar:2.0.0.Final
org.realityforge.org.keycloak:keycloak-servlet-adapter-spi:jar:2.0.0.Final
) + self.keycloak_core + self.keycloak_domgen_support + self.httpclient + self.jboss_logging
end
def keycloak_core_v11
%w(
org.keycloak:keycloak-core:jar:11.0.0
org.keycloak:keycloak-common:jar:11.0.0
com.sun.activation:jakarta.activation:jar:1.2.1
) + self.bouncycastle
end
def keycloak_v11
%w(
org.keycloak:keycloak-servlet-filter-adapter:jar:11.0.0
org.keycloak:keycloak-adapter-spi:jar:11.0.0
org.keycloak:keycloak-adapter-core:jar:11.0.0
org.keycloak:keycloak-servlet-adapter-spi:jar:11.0.0
) + self.keycloak_core_v11 + self.keycloak_domgen_support + self.httpclient + self.jboss_logging
end
def simple_keycloak_service
%w(org.realityforge.keycloak.sks:simple-keycloak-service:jar:0.2)
end
def guava
%w(com.google.guava:guava:jar:27.1-jre)
end
def arez_version
'0.198'
end
def arez
%W(org.realityforge.arez:arez-core:jar:#{arez_version}) + self.braincheck + self.jetbrains_annotations + self.grim_annotations
end
def arez_processor
%W(org.realityforge.arez:arez-processor:jar:#{arez_version})
end
def arez_spytools
%w(org.realityforge.arez.spytools:arez-spytools:jar:0.129)
end
def arez_testng
%W(org.realityforge.arez:arez-extras-testng:jar:#{arez_version})
end
def arez_dom
%W(org.realityforge.arez:arez-extras-dom:jar:#{arez_version})
end
def arez_persist_version
'0.29'
end
def arez_persist_core
%W(org.realityforge.arez.persist:arez-persist-core:jar:#{arez_persist_version})
end
def arez_persist_processor
%W(org.realityforge.arez.persist:arez-persist-processor:jar:#{arez_persist_version})
end
def grim_annotations
%w(org.realityforge.grim:grim-annotations:jar:0.04)
end
def router_fu_version
'0.34'
end
def router_fu
%W(org.realityforge.router.fu:router-fu-core:jar:#{router_fu_version}) + self.braincheck
end
def router_fu_processor
%W(org.realityforge.router.fu:router-fu-processor:jar:#{router_fu_version})
end
def sting_version
'0.20'
end
def sting_core
%W(org.realityforge.sting:sting-core:jar:#{sting_version})
end
def sting_processor
%W(org.realityforge.sting:sting-processor:jar:#{sting_version})
end
def zemeckis_core
%w(org.realityforge.zemeckis:zemeckis-core:jar:0.12) + self.braincheck + self.jetbrains_annotations + self.grim_annotations
end
def akasha
%w(org.realityforge.akasha:akasha-gwt:jar:0.16)
end
def react4j_version
'0.183'
end
def react4j
%W(
org.realityforge.react4j:react4j-core:jar:#{react4j_version}
org.realityforge.react4j:react4j-dom:jar:#{react4j_version}
) + self.akasha + self.zemeckis_core
end
def react4j_processor
%W(org.realityforge.react4j:react4j-processor:jar:#{react4j_version})
end
def replicant_version
'6.108'
end
def replicant_client
%W(org.realityforge.replicant:replicant-client:jar:#{replicant_version}) +
self.akasha +
self.zemeckis_core
end
def replicant_server
%W(org.realityforge.replicant:replicant-server:jar:#{replicant_version}) + self.gwt_rpc
end
def gwt_rpc
self.jackson_databind + self.gwt_servlet
end
def guice
%w(aopalliance:aopalliance:jar:1.0 org.ow2.asm:asm:jar:7.1 au.com.stocksoftware.com.google.inject:guice:jar:4.1.1-stock)
end
def awaitility
%w(org.awaitility:awaitility:jar:2.0.0)
end
def testng_version
'6.14.3'
end
def testng
%W(org.testng:testng:jar:#{testng_version} com.beust:jcommander:jar:1.72)
end
def jndikit
%w(org.realityforge.jndikit:jndikit:jar:1.4)
end
def guiceyloops
self.mockito + self.testng + %w(org.realityforge.guiceyloops:guiceyloops:jar:0.108) + self.guice + self.glassfish_embedded
end
def glassfish_timers_domain
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-domain:json:0.7)
end
def glassfish_timers_db
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-db:jar:0.7)
end
def slf4j
%w(org.slf4j:slf4j-api:jar:1.7.25 org.slf4j:slf4j-jdk14:jar:1.7.25)
end
def json_schema_validator
%w(
com.networknt:json-schema-validator:jar:1.0.43
org.apache.commons:commons-lang3:jar:3.5
org.jruby.joni:joni:jar:2.1.31
org.jruby.jcodings:jcodings:jar:1.0.46
) + self.jackson_databind + self.slf4j
end
def pdfbox
%w(
org.apache.pdfbox:pdfbox:jar:2.0.21
org.apache.pdfbox:fontbox:jar:2.0.21
org.apache.pdfbox:xmpbox:jar:2.0.21
) + self.commons_logging + self.bouncycastle
end
def openhtmltopdf
%w(
com.openhtmltopdf:openhtmltopdf-pdfbox:jar:1.0.4
com.openhtmltopdf:openhtmltopdf-core:jar:1.0.4
com.openhtmltopdf:openhtmltopdf-svg-support:jar:1.0.4
de.rototor.pdfbox:graphics2d:jar:0.26
com.openhtmltopdf:openhtmltopdf-core:jar:1.0.4
com.openhtmltopdf:openhtmltopdf-svg-support:jar:1.0.4
) + BuildrPlus::Libs.xmlgraphics + BuildrPlus::Libs.pdfbox
end
def xmlgraphics
%w(
org.apache.xmlgraphics:batik-anim:jar:1.12
org.apache.xmlgraphics:batik-awt-util:jar:1.12
org.apache.xmlgraphics:batik-bridge:jar:1.12
org.apache.xmlgraphics:batik-codec:jar:1.12
org.apache.xmlgraphics:batik-constants:jar:1.12
org.apache.xmlgraphics:batik-css:jar:1.12
org.apache.xmlgraphics:batik-dom:jar:1.12
org.apache.xmlgraphics:batik-ext:jar:1.12
org.apache.xmlgraphics:batik-gvt:jar:1.12
org.apache.xmlgraphics:batik-i18n:jar:1.12
org.apache.xmlgraphics:batik-parser:jar:1.12
org.apache.xmlgraphics:batik-script:jar:1.12
org.apache.xmlgraphics:batik-svg-dom:jar:1.12
org.apache.xmlgraphics:batik-svggen:jar:1.12
org.apache.xmlgraphics:batik-transcoder:jar:1.12
org.apache.xmlgraphics:batik-util:jar:1.12
org.apache.xmlgraphics:batik-xml:jar:1.12
org.apache.xmlgraphics:xmlgraphics-commons:jar:2.4
) + self.commons_io + self.xml_apis_ext
end
def xml_apis_ext
%w(xml-apis:xml-apis-ext:jar:1.3.04)
end
def thymeleaf
%w(
org.thymeleaf:thymeleaf:jar:3.0.11.RELEASE
ognl:ognl:jar:3.1.12
org.javassist:javassist:jar:3.20.0-GA
org.attoparser:attoparser:jar:2.0.5.RELEASE
org.unbescape:unbescape:jar:1.1.6.RELEASE
org.thymeleaf.extras:thymeleaf-extras-java8time:jar:3.0.4.RELEASE
) + self.commons_logging
end
def greenmail
%w(com.icegreen:greenmail:jar:1.4.1) + self.slf4j
end
def greenmail_server
'com.icegreen:greenmail-webapp:war:1.4.1'
end
def jtds
%w(net.sourceforge.jtds:jtds:jar:1.3.1)
end
def postgresql
%w(org.postgresql:postgresql:jar:9.2-1003-jdbc4)
end
def postgis
%w(org.postgis:postgis-jdbc:jar:1.3.3)
end
def db_drivers
return self.jtds if BuildrPlus::Db.mssql?
return self.postgresql + (BuildrPlus::FeatureManager.activated?(:geolatte) ? self.postgis : []) if BuildrPlus::Db.pgsql?
[]
end
end
end
jsinterop_base is required when react4j is used
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
BuildrPlus::FeatureManager.feature(:libs) do |f|
f.enhance(:Config) do
def giggle
'org.realityforge.giggle:giggle-compiler:jar:all:0.13'
end
def mustache
%w(com.github.spullara.mustache.java:compiler:jar:0.9.6) + self.guava
end
def javacsv
%w(net.sourceforge.javacsv:javacsv:jar:2.1)
end
def geotools_for_geolatte
%w(org.geotools:gt-main:jar:9.4 org.geotools:gt-metadata:jar:9.4 org.geotools:gt-api:jar:9.4 org.geotools:gt-epsg-wkt:jar:9.4 org.geotools:gt-opengis:jar:9.4 org.geotools:gt-transform:jar:9.4 org.geotools:gt-geometry:jar:9.4 org.geotools:gt-jts-wrapper:jar:9.4 org.geotools:gt-referencing:jar:9.4 net.java.dev.jsr-275:jsr-275:jar:1.0-beta-2 java3d:vecmath:jar:1.3.2 javax.media:jai_core:jar:1.1.3)
end
def jts
%w(com.vividsolutions:jts:jar:1.13)
end
# Support geo libraries for geolatte
def geolatte_support
self.jts + self.slf4j
end
def geolatte_geom
%w(org.geolatte:geolatte-geom:jar:0.13)
end
def geolatte_geom_jpa
%w(org.realityforge.geolatte.jpa:geolatte-geom-jpa:jar:0.2)
end
def jetbrains_annotations
%w(org.realityforge.org.jetbrains.annotations:org.jetbrains.annotations:jar:1.6.0)
end
def javax_annotations
%w(org.realityforge.javax.annotation:javax.annotation:jar:1.0.1)
end
def spotbugs_provided
%w(com.github.spotbugs:spotbugs-annotations:jar:3.1.5 net.jcip:jcip-annotations:jar:1.0) + self.javax_annotations
end
def ee_provided
%w(javax:javaee-api:jar:8.0.1) + self.spotbugs_provided + self.jetbrains_annotations
end
def glassfish_embedded
%w(fish.payara.extras:payara-embedded-all:jar:5.2020.3)
end
def eclipselink
'org.eclipse.persistence:eclipselink:jar:2.7.4'
end
def mockito
%w(org.mockito:mockito-all:jar:1.10.19)
end
def jackson_annotations
%w(com.fasterxml.jackson.core:jackson-annotations:jar:2.10.4)
end
def jackson_core
%w(com.fasterxml.jackson.core:jackson-core:jar:2.10.4)
end
def jackson_databind
%w(com.fasterxml.jackson.core:jackson-databind:jar:2.10.4) + self.jackson_core + self.jackson_annotations
end
def jackson_datatype_jdk8
%w(com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar:2.9.9)
end
def jackson_datatype_jsr310
%w(com.fasterxml.jackson.datatype:jackson-datatype-jsr310:jar:2.9.9)
end
def braincheck
%w(org.realityforge.braincheck:braincheck-core:jar:1.31.0)
end
def jsinterop
%w(com.google.jsinterop:jsinterop-annotations:jar:2.0.0)
end
def jsinterop_base
%w(com.google.jsinterop:base:jar:1.0.0) + self.jsinterop
end
def gwt_user
%w(com.google.gwt:gwt-user:jar:2.9.0 org.w3c.css:sac:jar:1.3) + self.jsinterop
end
def gwt_servlet
%w(com.google.gwt:gwt-servlet:jar:2.9.0)
end
def gwt_dev
'com.google.gwt:gwt-dev:jar:2.9.0'
end
def javax_inject
%w(javax.inject:javax.inject:jar:1)
end
def javax_inject_gwt
%w(javax.inject:javax.inject:jar:sources:1) + self.javax_inject
end
def gwt_serviceworker
%w(org.realityforge.gwt.serviceworker:gwt-serviceworker-linker:jar:0.02)
end
def gwt_cache_filter
%w(org.realityforge.gwt.cache-filter:gwt-cache-filter:jar:0.9)
end
def field_filter
%w(org.realityforge.rest.field_filter:rest-field-filter:jar:0.4)
end
def timeservice
%w(org.realityforge.timeservice:timeservice:jar:0.02)
end
def graphql_java
%w(com.graphql-java:graphql-java:jar:13.0) + self.slf4j + self.antlr4_runtime + self.graphql_java_dataloader
end
def graphql_java_dataloader
%w(com.graphql-java:java-dataloader:jar:2.1.1 org.reactivestreams:reactive-streams:jar:1.0.2)
end
def graphql_java_servlet
%w(com.graphql-java-kickstart:graphql-java-servlet:jar:8.0.0) +
self.graphql_java +
self.jackson_annotations +
self.jackson_core +
self.jackson_databind +
self.jackson_datatype_jdk8 +
self.guava # Expected 24.1.1-jre
end
def graphql_java_scalars
%w(org.realityforge.graphql.scalars:graphql-java-scalars:jar:0.01)
end
def antlr4_runtime
%w(org.antlr:antlr4-runtime:jar:4.7.2)
end
def rest_criteria
%w(org.realityforge.rest.criteria:rest-criteria:jar:0.9.6) +
self.antlr4_runtime +
self.field_filter
end
def commons_logging
%w(commons-logging:commons-logging:jar:1.2)
end
def commons_codec
%w(commons-codec:commons-codec:jar:1.11)
end
def commons_io
%w(commons-io:commons-io:jar:1.3.1)
end
def bouncycastle
%w(org.bouncycastle:bcprov-jdk15on:jar:1.65 org.bouncycastle:bcpkix-jdk15on:jar:1.65)
end
def proxy_servlet
self.httpclient + %w(org.realityforge.proxy-servlet:proxy-servlet:jar:0.3.0)
end
def httpclient
%w(org.apache.httpcomponents:httpclient:jar:4.5.12 org.apache.httpcomponents:httpcore:jar:4.4.13) +
self.commons_logging + self.commons_codec
end
def failsafe
%w(net.jodah:failsafe:jar:1.0.3)
end
def keycloak_gwt
%w(org.realityforge.gwt.keycloak:gwt-keycloak:jar:0.12) + self.akasha
end
def keycloak_domgen_support
%w(org.realityforge.keycloak.domgen:keycloak-domgen-support:jar:1.5)
end
def keycloak_authfilter
%w(org.realityforge.keycloak.client.authfilter:keycloak-jaxrs-client-authfilter:jar:1.04)
end
def jboss_logging
%w(org.jboss.logging:jboss-logging:jar:3.4.1.Final)
end
def keycloak_core
%w(
org.keycloak:keycloak-core:jar:2.0.0.Final
org.keycloak:keycloak-common:jar:2.0.0.Final
) + self.bouncycastle
end
def keycloak
%w(
org.keycloak:keycloak-servlet-filter-adapter:jar:2.0.0.Final
org.keycloak:keycloak-adapter-spi:jar:2.0.0.Final
org.keycloak:keycloak-adapter-core:jar:2.0.0.Final
org.realityforge.org.keycloak:keycloak-servlet-adapter-spi:jar:2.0.0.Final
) + self.keycloak_core + self.keycloak_domgen_support + self.httpclient + self.jboss_logging
end
def keycloak_core_v11
%w(
org.keycloak:keycloak-core:jar:11.0.0
org.keycloak:keycloak-common:jar:11.0.0
com.sun.activation:jakarta.activation:jar:1.2.1
) + self.bouncycastle
end
def keycloak_v11
%w(
org.keycloak:keycloak-servlet-filter-adapter:jar:11.0.0
org.keycloak:keycloak-adapter-spi:jar:11.0.0
org.keycloak:keycloak-adapter-core:jar:11.0.0
org.keycloak:keycloak-servlet-adapter-spi:jar:11.0.0
) + self.keycloak_core_v11 + self.keycloak_domgen_support + self.httpclient + self.jboss_logging
end
def simple_keycloak_service
%w(org.realityforge.keycloak.sks:simple-keycloak-service:jar:0.2)
end
def guava
%w(com.google.guava:guava:jar:27.1-jre)
end
def arez_version
'0.198'
end
def arez
%W(org.realityforge.arez:arez-core:jar:#{arez_version}) + self.braincheck + self.jetbrains_annotations + self.grim_annotations
end
def arez_processor
%W(org.realityforge.arez:arez-processor:jar:#{arez_version})
end
def arez_spytools
%w(org.realityforge.arez.spytools:arez-spytools:jar:0.129)
end
def arez_testng
%W(org.realityforge.arez:arez-extras-testng:jar:#{arez_version})
end
def arez_dom
%W(org.realityforge.arez:arez-extras-dom:jar:#{arez_version})
end
def arez_persist_version
'0.29'
end
def arez_persist_core
%W(org.realityforge.arez.persist:arez-persist-core:jar:#{arez_persist_version})
end
def arez_persist_processor
%W(org.realityforge.arez.persist:arez-persist-processor:jar:#{arez_persist_version})
end
def grim_annotations
%w(org.realityforge.grim:grim-annotations:jar:0.04)
end
def router_fu_version
'0.34'
end
def router_fu
%W(org.realityforge.router.fu:router-fu-core:jar:#{router_fu_version}) + self.braincheck
end
def router_fu_processor
%W(org.realityforge.router.fu:router-fu-processor:jar:#{router_fu_version})
end
def sting_version
'0.20'
end
def sting_core
%W(org.realityforge.sting:sting-core:jar:#{sting_version})
end
def sting_processor
%W(org.realityforge.sting:sting-processor:jar:#{sting_version})
end
def zemeckis_core
%w(org.realityforge.zemeckis:zemeckis-core:jar:0.12) + self.braincheck + self.jetbrains_annotations + self.grim_annotations
end
def akasha
%w(org.realityforge.akasha:akasha-gwt:jar:0.16)
end
def react4j_version
'0.183'
end
def react4j
%W(
org.realityforge.react4j:react4j-core:jar:#{react4j_version}
org.realityforge.react4j:react4j-dom:jar:#{react4j_version}
) + self.akasha + self.zemeckis_core + self.jsinterop_base
end
def react4j_processor
%W(org.realityforge.react4j:react4j-processor:jar:#{react4j_version})
end
def replicant_version
'6.108'
end
def replicant_client
%W(org.realityforge.replicant:replicant-client:jar:#{replicant_version}) +
self.akasha +
self.zemeckis_core
end
def replicant_server
%W(org.realityforge.replicant:replicant-server:jar:#{replicant_version}) + self.gwt_rpc
end
def gwt_rpc
self.jackson_databind + self.gwt_servlet
end
def guice
%w(aopalliance:aopalliance:jar:1.0 org.ow2.asm:asm:jar:7.1 au.com.stocksoftware.com.google.inject:guice:jar:4.1.1-stock)
end
def awaitility
%w(org.awaitility:awaitility:jar:2.0.0)
end
def testng_version
'6.14.3'
end
def testng
%W(org.testng:testng:jar:#{testng_version} com.beust:jcommander:jar:1.72)
end
def jndikit
%w(org.realityforge.jndikit:jndikit:jar:1.4)
end
def guiceyloops
self.mockito + self.testng + %w(org.realityforge.guiceyloops:guiceyloops:jar:0.108) + self.guice + self.glassfish_embedded
end
def glassfish_timers_domain
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-domain:json:0.7)
end
def glassfish_timers_db
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-db:jar:0.7)
end
def slf4j
%w(org.slf4j:slf4j-api:jar:1.7.25 org.slf4j:slf4j-jdk14:jar:1.7.25)
end
def json_schema_validator
%w(
com.networknt:json-schema-validator:jar:1.0.43
org.apache.commons:commons-lang3:jar:3.5
org.jruby.joni:joni:jar:2.1.31
org.jruby.jcodings:jcodings:jar:1.0.46
) + self.jackson_databind + self.slf4j
end
def pdfbox
%w(
org.apache.pdfbox:pdfbox:jar:2.0.21
org.apache.pdfbox:fontbox:jar:2.0.21
org.apache.pdfbox:xmpbox:jar:2.0.21
) + self.commons_logging + self.bouncycastle
end
def openhtmltopdf
%w(
com.openhtmltopdf:openhtmltopdf-pdfbox:jar:1.0.4
com.openhtmltopdf:openhtmltopdf-core:jar:1.0.4
com.openhtmltopdf:openhtmltopdf-svg-support:jar:1.0.4
de.rototor.pdfbox:graphics2d:jar:0.26
com.openhtmltopdf:openhtmltopdf-core:jar:1.0.4
com.openhtmltopdf:openhtmltopdf-svg-support:jar:1.0.4
) + BuildrPlus::Libs.xmlgraphics + BuildrPlus::Libs.pdfbox
end
def xmlgraphics
%w(
org.apache.xmlgraphics:batik-anim:jar:1.12
org.apache.xmlgraphics:batik-awt-util:jar:1.12
org.apache.xmlgraphics:batik-bridge:jar:1.12
org.apache.xmlgraphics:batik-codec:jar:1.12
org.apache.xmlgraphics:batik-constants:jar:1.12
org.apache.xmlgraphics:batik-css:jar:1.12
org.apache.xmlgraphics:batik-dom:jar:1.12
org.apache.xmlgraphics:batik-ext:jar:1.12
org.apache.xmlgraphics:batik-gvt:jar:1.12
org.apache.xmlgraphics:batik-i18n:jar:1.12
org.apache.xmlgraphics:batik-parser:jar:1.12
org.apache.xmlgraphics:batik-script:jar:1.12
org.apache.xmlgraphics:batik-svg-dom:jar:1.12
org.apache.xmlgraphics:batik-svggen:jar:1.12
org.apache.xmlgraphics:batik-transcoder:jar:1.12
org.apache.xmlgraphics:batik-util:jar:1.12
org.apache.xmlgraphics:batik-xml:jar:1.12
org.apache.xmlgraphics:xmlgraphics-commons:jar:2.4
) + self.commons_io + self.xml_apis_ext
end
def xml_apis_ext
%w(xml-apis:xml-apis-ext:jar:1.3.04)
end
def thymeleaf
%w(
org.thymeleaf:thymeleaf:jar:3.0.11.RELEASE
ognl:ognl:jar:3.1.12
org.javassist:javassist:jar:3.20.0-GA
org.attoparser:attoparser:jar:2.0.5.RELEASE
org.unbescape:unbescape:jar:1.1.6.RELEASE
org.thymeleaf.extras:thymeleaf-extras-java8time:jar:3.0.4.RELEASE
) + self.commons_logging
end
def greenmail
%w(com.icegreen:greenmail:jar:1.4.1) + self.slf4j
end
def greenmail_server
'com.icegreen:greenmail-webapp:war:1.4.1'
end
def jtds
%w(net.sourceforge.jtds:jtds:jar:1.3.1)
end
def postgresql
%w(org.postgresql:postgresql:jar:9.2-1003-jdbc4)
end
def postgis
%w(org.postgis:postgis-jdbc:jar:1.3.3)
end
def db_drivers
return self.jtds if BuildrPlus::Db.mssql?
return self.postgresql + (BuildrPlus::FeatureManager.activated?(:geolatte) ? self.postgis : []) if BuildrPlus::Db.pgsql?
[]
end
end
end
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
BuildrPlus::FeatureManager.feature(:libs) do |f|
f.enhance(:Config) do
def giggle
'org.realityforge.giggle:giggle-compiler:jar:all:0.13'
end
def mustache
%w(com.github.spullara.mustache.java:compiler:jar:0.9.6) + self.guava
end
def javacsv
%w(net.sourceforge.javacsv:javacsv:jar:2.1)
end
def geotools_for_geolatte
%w(org.geotools:gt-main:jar:9.4 org.geotools:gt-metadata:jar:9.4 org.geotools:gt-api:jar:9.4 org.geotools:gt-epsg-wkt:jar:9.4 org.geotools:gt-opengis:jar:9.4 org.geotools:gt-transform:jar:9.4 org.geotools:gt-geometry:jar:9.4 org.geotools:gt-jts-wrapper:jar:9.4 org.geotools:gt-referencing:jar:9.4 net.java.dev.jsr-275:jsr-275:jar:1.0-beta-2 java3d:vecmath:jar:1.3.2 javax.media:jai_core:jar:1.1.3)
end
def jts
%w(com.vividsolutions:jts:jar:1.13)
end
# Support geo libraries for geolatte
def geolatte_support
self.jts + self.slf4j
end
def geolatte_geom
%w(org.geolatte:geolatte-geom:jar:0.13)
end
def geolatte_geom_jpa
%w(org.realityforge.geolatte.jpa:geolatte-geom-jpa:jar:0.2)
end
def jetbrains_annotations
%w(org.realityforge.org.jetbrains.annotations:org.jetbrains.annotations:jar:1.6.0)
end
def javax_annotations
%w(org.realityforge.javax.annotation:javax.annotation:jar:1.0.1)
end
def spotbugs_provided
%w(com.github.spotbugs:spotbugs-annotations:jar:3.1.5 net.jcip:jcip-annotations:jar:1.0) + self.javax_annotations
end
def ee_provided
%w(javax:javaee-api:jar:8.0.1) + self.spotbugs_provided + self.jetbrains_annotations
end
def glassfish_embedded
%w(fish.payara.extras:payara-embedded-all:jar:5.2020.3)
end
def eclipselink
'org.eclipse.persistence:eclipselink:jar:2.7.4'
end
def mockito
%w(org.mockito:mockito-all:jar:1.10.19)
end
def jackson_annotations
%w(com.fasterxml.jackson.core:jackson-annotations:jar:2.10.4)
end
def jackson_core
%w(com.fasterxml.jackson.core:jackson-core:jar:2.10.4)
end
def jackson_databind
%w(com.fasterxml.jackson.core:jackson-databind:jar:2.10.4) + self.jackson_core + self.jackson_annotations
end
def jackson_datatype_jdk8
%w(com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar:2.9.9)
end
def jackson_datatype_jsr310
%w(com.fasterxml.jackson.datatype:jackson-datatype-jsr310:jar:2.9.9)
end
def braincheck
%w(org.realityforge.braincheck:braincheck-core:jar:1.31.0)
end
def jsinterop
%w(com.google.jsinterop:jsinterop-annotations:jar:2.0.0)
end
def jsinterop_base
%w(com.google.jsinterop:base:jar:1.0.0) + self.jsinterop
end
def gwt_user
%w(com.google.gwt:gwt-user:jar:2.9.0 org.w3c.css:sac:jar:1.3) + self.jsinterop
end
def gwt_servlet
%w(com.google.gwt:gwt-servlet:jar:2.9.0)
end
def gwt_dev
'com.google.gwt:gwt-dev:jar:2.9.0'
end
def javax_inject
%w(javax.inject:javax.inject:jar:1)
end
def javax_inject_gwt
%w(javax.inject:javax.inject:jar:sources:1) + self.javax_inject
end
def gwt_serviceworker
%w(org.realityforge.gwt.serviceworker:gwt-serviceworker-linker:jar:0.02)
end
def gwt_cache_filter
%w(org.realityforge.gwt.cache-filter:gwt-cache-filter:jar:0.9)
end
def field_filter
%w(org.realityforge.rest.field_filter:rest-field-filter:jar:0.4)
end
def timeservice
%w(org.realityforge.timeservice:timeservice:jar:0.02)
end
def graphql_java
%w(com.graphql-java:graphql-java:jar:13.0) + self.slf4j + self.antlr4_runtime + self.graphql_java_dataloader
end
def graphql_java_dataloader
%w(com.graphql-java:java-dataloader:jar:2.1.1 org.reactivestreams:reactive-streams:jar:1.0.2)
end
def graphql_java_servlet
%w(com.graphql-java-kickstart:graphql-java-servlet:jar:8.0.0) +
self.graphql_java +
self.jackson_annotations +
self.jackson_core +
self.jackson_databind +
self.jackson_datatype_jdk8 +
self.guava # Expected 24.1.1-jre
end
def graphql_java_scalars
%w(org.realityforge.graphql.scalars:graphql-java-scalars:jar:0.01)
end
def antlr4_runtime
%w(org.antlr:antlr4-runtime:jar:4.7.2)
end
def rest_criteria
%w(org.realityforge.rest.criteria:rest-criteria:jar:0.9.6) +
self.antlr4_runtime +
self.field_filter
end
def commons_logging
%w(commons-logging:commons-logging:jar:1.2)
end
def commons_codec
%w(commons-codec:commons-codec:jar:1.11)
end
def commons_io
%w(commons-io:commons-io:jar:1.3.1)
end
def bouncycastle
%w(org.bouncycastle:bcprov-jdk15on:jar:1.65 org.bouncycastle:bcpkix-jdk15on:jar:1.65)
end
def proxy_servlet
self.httpclient + %w(org.realityforge.proxy-servlet:proxy-servlet:jar:0.3.0)
end
def httpclient
%w(org.apache.httpcomponents:httpclient:jar:4.5.12 org.apache.httpcomponents:httpcore:jar:4.4.13) +
self.commons_logging + self.commons_codec
end
def failsafe
%w(net.jodah:failsafe:jar:1.0.3)
end
def keycloak_gwt
%w(org.realityforge.gwt.keycloak:gwt-keycloak:jar:0.12) + self.akasha
end
def keycloak_domgen_support
%w(org.realityforge.keycloak.domgen:keycloak-domgen-support:jar:1.5)
end
def keycloak_authfilter
%w(org.realityforge.keycloak.client.authfilter:keycloak-jaxrs-client-authfilter:jar:1.04)
end
def jboss_logging
%w(org.jboss.logging:jboss-logging:jar:3.4.1.Final)
end
def keycloak_core
%w(
org.keycloak:keycloak-core:jar:2.0.0.Final
org.keycloak:keycloak-common:jar:2.0.0.Final
) + self.bouncycastle
end
def keycloak
%w(
org.keycloak:keycloak-servlet-filter-adapter:jar:2.0.0.Final
org.keycloak:keycloak-adapter-spi:jar:2.0.0.Final
org.keycloak:keycloak-adapter-core:jar:2.0.0.Final
org.realityforge.org.keycloak:keycloak-servlet-adapter-spi:jar:2.0.0.Final
) + self.keycloak_core + self.keycloak_domgen_support + self.httpclient + self.jboss_logging
end
def keycloak_core_v11
%w(
org.keycloak:keycloak-core:jar:11.0.0
org.keycloak:keycloak-common:jar:11.0.0
com.sun.activation:jakarta.activation:jar:1.2.1
) + self.bouncycastle
end
def keycloak_v11
%w(
org.keycloak:keycloak-servlet-filter-adapter:jar:11.0.0
org.keycloak:keycloak-adapter-spi:jar:11.0.0
org.keycloak:keycloak-adapter-core:jar:11.0.0
org.keycloak:keycloak-servlet-adapter-spi:jar:11.0.0
) + self.keycloak_core_v11 + self.keycloak_domgen_support + self.httpclient + self.jboss_logging
end
def simple_keycloak_service
%w(org.realityforge.keycloak.sks:simple-keycloak-service:jar:0.2)
end
def guava
%w(com.google.guava:guava:jar:27.1-jre)
end
def arez_version
'0.202'
end
def arez
%W(org.realityforge.arez:arez-core:jar:#{arez_version}) + self.braincheck + self.jetbrains_annotations + self.grim_annotations
end
def arez_processor
%W(org.realityforge.arez:arez-processor:jar:#{arez_version})
end
def arez_spytools
%W(org.realityforge.arez:arez-extras-spytools:jar:#{arez_version})
end
def arez_testng
%W(org.realityforge.arez:arez-extras-testng:jar:#{arez_version})
end
def arez_dom
%W(org.realityforge.arez:arez-extras-dom:jar:#{arez_version})
end
def arez_persist_version
'0.35'
end
def arez_persist_core
%W(org.realityforge.arez.persist:arez-persist-core:jar:#{arez_persist_version})
end
def arez_persist_processor
%W(org.realityforge.arez.persist:arez-persist-processor:jar:#{arez_persist_version})
end
def grim_annotations
%w(org.realityforge.grim:grim-annotations:jar:0.06)
end
def router_fu_version
'0.37'
end
def router_fu
%W(org.realityforge.router.fu:router-fu-core:jar:#{router_fu_version}) + self.braincheck
end
def router_fu_processor
%W(org.realityforge.router.fu:router-fu-processor:jar:#{router_fu_version})
end
def sting_version
'0.23'
end
def sting_core
%W(org.realityforge.sting:sting-core:jar:#{sting_version})
end
def sting_processor
%W(org.realityforge.sting:sting-processor:jar:#{sting_version})
end
def zemeckis_core
%w(org.realityforge.zemeckis:zemeckis-core:jar:0.13) + self.braincheck + self.jetbrains_annotations + self.grim_annotations
end
def akasha
%w(org.realityforge.akasha:akasha-gwt:jar:0.29)
end
def react4j_version
'0.186'
end
def react4j
%W(
org.realityforge.react4j:react4j-core:jar:#{react4j_version}
org.realityforge.react4j:react4j-dom:jar:#{react4j_version}
) + self.akasha + self.zemeckis_core + self.jsinterop_base
end
def react4j_processor
%W(org.realityforge.react4j:react4j-processor:jar:#{react4j_version})
end
def replicant_version
'6.114'
end
def replicant_client
%W(org.realityforge.replicant:replicant-client:jar:#{replicant_version}) +
self.akasha +
self.zemeckis_core
end
def replicant_server
%W(org.realityforge.replicant:replicant-server:jar:#{replicant_version}) + self.gwt_rpc
end
def gwt_rpc
self.jackson_databind + self.gwt_servlet
end
def guice
%w(aopalliance:aopalliance:jar:1.0 org.ow2.asm:asm:jar:7.1 au.com.stocksoftware.com.google.inject:guice:jar:4.1.1-stock)
end
def awaitility
%w(org.awaitility:awaitility:jar:2.0.0)
end
def testng_version
'7.4.0'
end
def testng
%w(org.testng:testng:jar:7.4.0 com.beust:jcommander:jar:1.78 org.webjars:jquery:jar:3.5.1)
end
def jndikit
%w(org.realityforge.jndikit:jndikit:jar:1.4)
end
def guiceyloops
self.mockito + self.testng + %w(org.realityforge.guiceyloops:guiceyloops:jar:0.110) + self.guice + self.glassfish_embedded
end
def glassfish_timers_domain
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-domain:json:0.7)
end
def glassfish_timers_db
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-db:jar:0.7)
end
def slf4j
%w(org.slf4j:slf4j-api:jar:1.7.25 org.slf4j:slf4j-jdk14:jar:1.7.25)
end
def json_schema_validator
%w(
com.networknt:json-schema-validator:jar:1.0.43
org.apache.commons:commons-lang3:jar:3.5
org.jruby.joni:joni:jar:2.1.31
org.jruby.jcodings:jcodings:jar:1.0.46
) + self.jackson_databind + self.slf4j
end
def pdfbox
%w(
org.apache.pdfbox:pdfbox:jar:2.0.21
org.apache.pdfbox:fontbox:jar:2.0.21
org.apache.pdfbox:xmpbox:jar:2.0.21
) + self.commons_logging + self.bouncycastle
end
def openhtmltopdf
%w(
com.openhtmltopdf:openhtmltopdf-pdfbox:jar:1.0.4
com.openhtmltopdf:openhtmltopdf-core:jar:1.0.4
com.openhtmltopdf:openhtmltopdf-svg-support:jar:1.0.4
de.rototor.pdfbox:graphics2d:jar:0.26
com.openhtmltopdf:openhtmltopdf-core:jar:1.0.4
com.openhtmltopdf:openhtmltopdf-svg-support:jar:1.0.4
) + BuildrPlus::Libs.xmlgraphics + BuildrPlus::Libs.pdfbox
end
def xmlgraphics
%w(
org.apache.xmlgraphics:batik-anim:jar:1.12
org.apache.xmlgraphics:batik-awt-util:jar:1.12
org.apache.xmlgraphics:batik-bridge:jar:1.12
org.apache.xmlgraphics:batik-codec:jar:1.12
org.apache.xmlgraphics:batik-constants:jar:1.12
org.apache.xmlgraphics:batik-css:jar:1.12
org.apache.xmlgraphics:batik-dom:jar:1.12
org.apache.xmlgraphics:batik-ext:jar:1.12
org.apache.xmlgraphics:batik-gvt:jar:1.12
org.apache.xmlgraphics:batik-i18n:jar:1.12
org.apache.xmlgraphics:batik-parser:jar:1.12
org.apache.xmlgraphics:batik-script:jar:1.12
org.apache.xmlgraphics:batik-svg-dom:jar:1.12
org.apache.xmlgraphics:batik-svggen:jar:1.12
org.apache.xmlgraphics:batik-transcoder:jar:1.12
org.apache.xmlgraphics:batik-util:jar:1.12
org.apache.xmlgraphics:batik-xml:jar:1.12
org.apache.xmlgraphics:xmlgraphics-commons:jar:2.4
) + self.commons_io + self.xml_apis_ext
end
def xml_apis_ext
%w(xml-apis:xml-apis-ext:jar:1.3.04)
end
def thymeleaf
%w(
org.thymeleaf:thymeleaf:jar:3.0.11.RELEASE
ognl:ognl:jar:3.1.12
org.javassist:javassist:jar:3.20.0-GA
org.attoparser:attoparser:jar:2.0.5.RELEASE
org.unbescape:unbescape:jar:1.1.6.RELEASE
org.thymeleaf.extras:thymeleaf-extras-java8time:jar:3.0.4.RELEASE
) + self.commons_logging
end
def greenmail
%w(com.icegreen:greenmail:jar:1.4.1) + self.slf4j
end
def greenmail_server
'com.icegreen:greenmail-webapp:war:1.4.1'
end
def jtds
%w(net.sourceforge.jtds:jtds:jar:1.3.1)
end
def postgresql
%w(org.postgresql:postgresql:jar:9.2-1003-jdbc4)
end
def postgis
%w(org.postgis:postgis-jdbc:jar:1.3.3)
end
def db_drivers
return self.jtds if BuildrPlus::Db.mssql?
return self.postgresql + (BuildrPlus::FeatureManager.activated?(:geolatte) ? self.postgis : []) if BuildrPlus::Db.pgsql?
[]
end
end
end
Bump the version of guiceyloops
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
BuildrPlus::FeatureManager.feature(:libs) do |f|
f.enhance(:Config) do
def giggle
'org.realityforge.giggle:giggle-compiler:jar:all:0.13'
end
def mustache
%w(com.github.spullara.mustache.java:compiler:jar:0.9.6) + self.guava
end
def javacsv
%w(net.sourceforge.javacsv:javacsv:jar:2.1)
end
def geotools_for_geolatte
%w(org.geotools:gt-main:jar:9.4 org.geotools:gt-metadata:jar:9.4 org.geotools:gt-api:jar:9.4 org.geotools:gt-epsg-wkt:jar:9.4 org.geotools:gt-opengis:jar:9.4 org.geotools:gt-transform:jar:9.4 org.geotools:gt-geometry:jar:9.4 org.geotools:gt-jts-wrapper:jar:9.4 org.geotools:gt-referencing:jar:9.4 net.java.dev.jsr-275:jsr-275:jar:1.0-beta-2 java3d:vecmath:jar:1.3.2 javax.media:jai_core:jar:1.1.3)
end
def jts
%w(com.vividsolutions:jts:jar:1.13)
end
# Support geo libraries for geolatte
def geolatte_support
self.jts + self.slf4j
end
def geolatte_geom
%w(org.geolatte:geolatte-geom:jar:0.13)
end
def geolatte_geom_jpa
%w(org.realityforge.geolatte.jpa:geolatte-geom-jpa:jar:0.2)
end
def jetbrains_annotations
%w(org.realityforge.org.jetbrains.annotations:org.jetbrains.annotations:jar:1.6.0)
end
def javax_annotations
%w(org.realityforge.javax.annotation:javax.annotation:jar:1.0.1)
end
def spotbugs_provided
%w(com.github.spotbugs:spotbugs-annotations:jar:3.1.5 net.jcip:jcip-annotations:jar:1.0) + self.javax_annotations
end
def ee_provided
%w(javax:javaee-api:jar:8.0.1) + self.spotbugs_provided + self.jetbrains_annotations
end
def glassfish_embedded
%w(fish.payara.extras:payara-embedded-all:jar:5.2020.3)
end
def eclipselink
'org.eclipse.persistence:eclipselink:jar:2.7.4'
end
def mockito
%w(org.mockito:mockito-all:jar:1.10.19)
end
def jackson_annotations
%w(com.fasterxml.jackson.core:jackson-annotations:jar:2.10.4)
end
def jackson_core
%w(com.fasterxml.jackson.core:jackson-core:jar:2.10.4)
end
def jackson_databind
%w(com.fasterxml.jackson.core:jackson-databind:jar:2.10.4) + self.jackson_core + self.jackson_annotations
end
def jackson_datatype_jdk8
%w(com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar:2.9.9)
end
def jackson_datatype_jsr310
%w(com.fasterxml.jackson.datatype:jackson-datatype-jsr310:jar:2.9.9)
end
def braincheck
%w(org.realityforge.braincheck:braincheck-core:jar:1.31.0)
end
def jsinterop
%w(com.google.jsinterop:jsinterop-annotations:jar:2.0.0)
end
def jsinterop_base
%w(com.google.jsinterop:base:jar:1.0.0) + self.jsinterop
end
def gwt_user
%w(com.google.gwt:gwt-user:jar:2.9.0 org.w3c.css:sac:jar:1.3) + self.jsinterop
end
def gwt_servlet
%w(com.google.gwt:gwt-servlet:jar:2.9.0)
end
def gwt_dev
'com.google.gwt:gwt-dev:jar:2.9.0'
end
def javax_inject
%w(javax.inject:javax.inject:jar:1)
end
def javax_inject_gwt
%w(javax.inject:javax.inject:jar:sources:1) + self.javax_inject
end
def gwt_serviceworker
%w(org.realityforge.gwt.serviceworker:gwt-serviceworker-linker:jar:0.02)
end
def gwt_cache_filter
%w(org.realityforge.gwt.cache-filter:gwt-cache-filter:jar:0.9)
end
def field_filter
%w(org.realityforge.rest.field_filter:rest-field-filter:jar:0.4)
end
def timeservice
%w(org.realityforge.timeservice:timeservice:jar:0.02)
end
def graphql_java
%w(com.graphql-java:graphql-java:jar:13.0) + self.slf4j + self.antlr4_runtime + self.graphql_java_dataloader
end
def graphql_java_dataloader
%w(com.graphql-java:java-dataloader:jar:2.1.1 org.reactivestreams:reactive-streams:jar:1.0.2)
end
def graphql_java_servlet
%w(com.graphql-java-kickstart:graphql-java-servlet:jar:8.0.0) +
self.graphql_java +
self.jackson_annotations +
self.jackson_core +
self.jackson_databind +
self.jackson_datatype_jdk8 +
self.guava # Expected 24.1.1-jre
end
def graphql_java_scalars
%w(org.realityforge.graphql.scalars:graphql-java-scalars:jar:0.01)
end
def antlr4_runtime
%w(org.antlr:antlr4-runtime:jar:4.7.2)
end
def rest_criteria
%w(org.realityforge.rest.criteria:rest-criteria:jar:0.9.6) +
self.antlr4_runtime +
self.field_filter
end
def commons_logging
%w(commons-logging:commons-logging:jar:1.2)
end
def commons_codec
%w(commons-codec:commons-codec:jar:1.11)
end
def commons_io
%w(commons-io:commons-io:jar:1.3.1)
end
def bouncycastle
%w(org.bouncycastle:bcprov-jdk15on:jar:1.65 org.bouncycastle:bcpkix-jdk15on:jar:1.65)
end
def proxy_servlet
self.httpclient + %w(org.realityforge.proxy-servlet:proxy-servlet:jar:0.3.0)
end
def httpclient
%w(org.apache.httpcomponents:httpclient:jar:4.5.12 org.apache.httpcomponents:httpcore:jar:4.4.13) +
self.commons_logging + self.commons_codec
end
def failsafe
%w(net.jodah:failsafe:jar:1.0.3)
end
def keycloak_gwt
%w(org.realityforge.gwt.keycloak:gwt-keycloak:jar:0.12) + self.akasha
end
def keycloak_domgen_support
%w(org.realityforge.keycloak.domgen:keycloak-domgen-support:jar:1.5)
end
def keycloak_authfilter
%w(org.realityforge.keycloak.client.authfilter:keycloak-jaxrs-client-authfilter:jar:1.04)
end
def jboss_logging
%w(org.jboss.logging:jboss-logging:jar:3.4.1.Final)
end
def keycloak_core
%w(
org.keycloak:keycloak-core:jar:2.0.0.Final
org.keycloak:keycloak-common:jar:2.0.0.Final
) + self.bouncycastle
end
def keycloak
%w(
org.keycloak:keycloak-servlet-filter-adapter:jar:2.0.0.Final
org.keycloak:keycloak-adapter-spi:jar:2.0.0.Final
org.keycloak:keycloak-adapter-core:jar:2.0.0.Final
org.realityforge.org.keycloak:keycloak-servlet-adapter-spi:jar:2.0.0.Final
) + self.keycloak_core + self.keycloak_domgen_support + self.httpclient + self.jboss_logging
end
def keycloak_core_v11
%w(
org.keycloak:keycloak-core:jar:11.0.0
org.keycloak:keycloak-common:jar:11.0.0
com.sun.activation:jakarta.activation:jar:1.2.1
) + self.bouncycastle
end
def keycloak_v11
%w(
org.keycloak:keycloak-servlet-filter-adapter:jar:11.0.0
org.keycloak:keycloak-adapter-spi:jar:11.0.0
org.keycloak:keycloak-adapter-core:jar:11.0.0
org.keycloak:keycloak-servlet-adapter-spi:jar:11.0.0
) + self.keycloak_core_v11 + self.keycloak_domgen_support + self.httpclient + self.jboss_logging
end
def simple_keycloak_service
%w(org.realityforge.keycloak.sks:simple-keycloak-service:jar:0.2)
end
def guava
%w(com.google.guava:guava:jar:27.1-jre)
end
def arez_version
'0.202'
end
def arez
%W(org.realityforge.arez:arez-core:jar:#{arez_version}) + self.braincheck + self.jetbrains_annotations + self.grim_annotations
end
def arez_processor
%W(org.realityforge.arez:arez-processor:jar:#{arez_version})
end
def arez_spytools
%W(org.realityforge.arez:arez-extras-spytools:jar:#{arez_version})
end
def arez_testng
%W(org.realityforge.arez:arez-extras-testng:jar:#{arez_version})
end
def arez_dom
%W(org.realityforge.arez:arez-extras-dom:jar:#{arez_version})
end
def arez_persist_version
'0.35'
end
def arez_persist_core
%W(org.realityforge.arez.persist:arez-persist-core:jar:#{arez_persist_version})
end
def arez_persist_processor
%W(org.realityforge.arez.persist:arez-persist-processor:jar:#{arez_persist_version})
end
def grim_annotations
%w(org.realityforge.grim:grim-annotations:jar:0.06)
end
def router_fu_version
'0.37'
end
def router_fu
%W(org.realityforge.router.fu:router-fu-core:jar:#{router_fu_version}) + self.braincheck
end
def router_fu_processor
%W(org.realityforge.router.fu:router-fu-processor:jar:#{router_fu_version})
end
def sting_version
'0.23'
end
def sting_core
%W(org.realityforge.sting:sting-core:jar:#{sting_version})
end
def sting_processor
%W(org.realityforge.sting:sting-processor:jar:#{sting_version})
end
def zemeckis_core
%w(org.realityforge.zemeckis:zemeckis-core:jar:0.13) + self.braincheck + self.jetbrains_annotations + self.grim_annotations
end
def akasha
%w(org.realityforge.akasha:akasha-gwt:jar:0.29)
end
def react4j_version
'0.186'
end
def react4j
%W(
org.realityforge.react4j:react4j-core:jar:#{react4j_version}
org.realityforge.react4j:react4j-dom:jar:#{react4j_version}
) + self.akasha + self.zemeckis_core + self.jsinterop_base
end
def react4j_processor
%W(org.realityforge.react4j:react4j-processor:jar:#{react4j_version})
end
def replicant_version
'6.114'
end
def replicant_client
%W(org.realityforge.replicant:replicant-client:jar:#{replicant_version}) +
self.akasha +
self.zemeckis_core
end
def replicant_server
%W(org.realityforge.replicant:replicant-server:jar:#{replicant_version}) + self.gwt_rpc
end
def gwt_rpc
self.jackson_databind + self.gwt_servlet
end
def guice
%w(aopalliance:aopalliance:jar:1.0 org.ow2.asm:asm:jar:7.1 au.com.stocksoftware.com.google.inject:guice:jar:4.1.1-stock)
end
def awaitility
%w(org.awaitility:awaitility:jar:2.0.0)
end
def testng_version
'7.4.0'
end
def testng
%w(org.testng:testng:jar:7.4.0 com.beust:jcommander:jar:1.78 org.webjars:jquery:jar:3.5.1)
end
def jndikit
%w(org.realityforge.jndikit:jndikit:jar:1.4)
end
def guiceyloops
self.mockito + self.testng + %w(org.realityforge.guiceyloops:guiceyloops:jar:0.111) + self.guice + self.glassfish_embedded
end
def glassfish_timers_domain
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-domain:json:0.7)
end
def glassfish_timers_db
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-db:jar:0.7)
end
def slf4j
%w(org.slf4j:slf4j-api:jar:1.7.25 org.slf4j:slf4j-jdk14:jar:1.7.25)
end
def json_schema_validator
%w(
com.networknt:json-schema-validator:jar:1.0.43
org.apache.commons:commons-lang3:jar:3.5
org.jruby.joni:joni:jar:2.1.31
org.jruby.jcodings:jcodings:jar:1.0.46
) + self.jackson_databind + self.slf4j
end
def pdfbox
%w(
org.apache.pdfbox:pdfbox:jar:2.0.21
org.apache.pdfbox:fontbox:jar:2.0.21
org.apache.pdfbox:xmpbox:jar:2.0.21
) + self.commons_logging + self.bouncycastle
end
def openhtmltopdf
%w(
com.openhtmltopdf:openhtmltopdf-pdfbox:jar:1.0.4
com.openhtmltopdf:openhtmltopdf-core:jar:1.0.4
com.openhtmltopdf:openhtmltopdf-svg-support:jar:1.0.4
de.rototor.pdfbox:graphics2d:jar:0.26
com.openhtmltopdf:openhtmltopdf-core:jar:1.0.4
com.openhtmltopdf:openhtmltopdf-svg-support:jar:1.0.4
) + BuildrPlus::Libs.xmlgraphics + BuildrPlus::Libs.pdfbox
end
def xmlgraphics
%w(
org.apache.xmlgraphics:batik-anim:jar:1.12
org.apache.xmlgraphics:batik-awt-util:jar:1.12
org.apache.xmlgraphics:batik-bridge:jar:1.12
org.apache.xmlgraphics:batik-codec:jar:1.12
org.apache.xmlgraphics:batik-constants:jar:1.12
org.apache.xmlgraphics:batik-css:jar:1.12
org.apache.xmlgraphics:batik-dom:jar:1.12
org.apache.xmlgraphics:batik-ext:jar:1.12
org.apache.xmlgraphics:batik-gvt:jar:1.12
org.apache.xmlgraphics:batik-i18n:jar:1.12
org.apache.xmlgraphics:batik-parser:jar:1.12
org.apache.xmlgraphics:batik-script:jar:1.12
org.apache.xmlgraphics:batik-svg-dom:jar:1.12
org.apache.xmlgraphics:batik-svggen:jar:1.12
org.apache.xmlgraphics:batik-transcoder:jar:1.12
org.apache.xmlgraphics:batik-util:jar:1.12
org.apache.xmlgraphics:batik-xml:jar:1.12
org.apache.xmlgraphics:xmlgraphics-commons:jar:2.4
) + self.commons_io + self.xml_apis_ext
end
def xml_apis_ext
%w(xml-apis:xml-apis-ext:jar:1.3.04)
end
def thymeleaf
%w(
org.thymeleaf:thymeleaf:jar:3.0.11.RELEASE
ognl:ognl:jar:3.1.12
org.javassist:javassist:jar:3.20.0-GA
org.attoparser:attoparser:jar:2.0.5.RELEASE
org.unbescape:unbescape:jar:1.1.6.RELEASE
org.thymeleaf.extras:thymeleaf-extras-java8time:jar:3.0.4.RELEASE
) + self.commons_logging
end
def greenmail
%w(com.icegreen:greenmail:jar:1.4.1) + self.slf4j
end
def greenmail_server
'com.icegreen:greenmail-webapp:war:1.4.1'
end
def jtds
%w(net.sourceforge.jtds:jtds:jar:1.3.1)
end
def postgresql
%w(org.postgresql:postgresql:jar:9.2-1003-jdbc4)
end
def postgis
%w(org.postgis:postgis-jdbc:jar:1.3.3)
end
def db_drivers
return self.jtds if BuildrPlus::Db.mssql?
return self.postgresql + (BuildrPlus::FeatureManager.activated?(:geolatte) ? self.postgis : []) if BuildrPlus::Db.pgsql?
[]
end
end
end
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
BuildrPlus::FeatureManager.feature(:libs) do |f|
f.enhance(:Config) do
def mustache
%w(com.github.spullara.mustache.java:compiler:jar:0.8.15)
end
def javacsv
%w(net.sourceforge.javacsv:javacsv:jar:2.1)
end
def geotools_for_geolatte
%w(org.geotools:gt-main:jar:9.4 org.geotools:gt-metadata:jar:9.4 org.geotools:gt-api:jar:9.4 org.geotools:gt-epsg-wkt:jar:9.4 org.geotools:gt-opengis:jar:9.4 org.geotools:gt-transform:jar:9.4 org.geotools:gt-geometry:jar:9.4 org.geotools:gt-jts-wrapper:jar:9.4 org.geotools:gt-referencing:jar:9.4 net.java.dev.jsr-275:jsr-275:jar:1.0-beta-2 java3d:vecmath:jar:1.3.2 javax.media:jai_core:jar:1.1.3)
end
def jts
%w(com.vividsolutions:jts:jar:1.13)
end
# Support geo libraries for geolatte
def geolatte_support
self.jts + self.slf4j
end
def geolatte_geom
%w(org.geolatte:geolatte-geom:jar:0.13)
end
def geolatte_geom_jpa
%w(org.realityforge.geolatte.jpa:geolatte-geom-jpa:jar:0.2)
end
def findbugs_provided
%w(com.google.code.findbugs:jsr305:jar:3.0.0 com.google.code.findbugs:annotations:jar:3.0.0)
end
def ee_provided
%w(javax:javaee-api:jar:7.0) + self.findbugs_provided
end
def glassfish_embedded
%w(fish.payara.extras:payara-embedded-all:jar:4.1.1.162)
end
def eclipselink
'org.eclipse.persistence:eclipselink:jar:2.6.0'
end
def mockito
%w(org.mockito:mockito-all:jar:1.9.5)
end
def jackson_annotations
%w(com.fasterxml.jackson.core:jackson-annotations:jar:2.5.4)
end
def jackson_core
%w(com.fasterxml.jackson.core:jackson-core:jar:2.5.4)
end
def jackson_databind
%w(com.fasterxml.jackson.core:jackson-databind:jar:2.5.4)
end
def jackson_gwt_support
self.jackson_core + self.jackson_databind + self.jackson_annotations
end
def gwt_user
%w(com.google.gwt:gwt-user:jar:2.7.0)
end
def gwt_servlet
%w(com.google.gwt:gwt-servlet:jar:2.7.0)
end
def gwt_dev
'com.google.gwt:gwt-dev:jar:2.7.0'
end
def gwt_gin
%w(com.google.gwt.inject:gin:jar:2.1.2 javax.inject:javax.inject:jar:1) + self.guice + self.gwt_user
end
def replicant
%w(org.realityforge.replicant:replicant:jar:0.5.56)
end
def gwt_property_source
%w(org.realityforge.gwt.property-source:gwt-property-source:jar:0.2)
end
def gwt_webpoller
%w(org.realityforge.gwt.webpoller:gwt-webpoller:jar:0.8)
end
def gwt_datatypes
%w(org.realityforge.gwt.datatypes:gwt-datatypes:jar:0.8)
end
def gwt_ga
%w(org.realityforge.gwt.ga:gwt-ga:jar:0.5)
end
def gwt_mmvp
%w(org.realityforge.gwt.mmvp:gwt-mmvp:jar:0.5)
end
def gwt_lognice
%w(org.realityforge.gwt.lognice:gwt-lognice:jar:0.2)
end
def gwt_appcache_client
%w(org.realityforge.gwt.appcache:gwt-appcache-client:jar:1.0.8 org.realityforge.gwt.appcache:gwt-appcache-linker:jar:1.0.8)
end
def gwt_appcache_server
%w(org.realityforge.gwt.appcache:gwt-appcache-server:jar:1.0.8)
end
# The appcache code required to exist on gwt path during compilation
def gwt_appcache
self.gwt_appcache_client + self.gwt_appcache_server
end
def gwt_cache_filter
%w(org.realityforge.gwt.cache-filter:gwt-cache-filter:jar:0.6)
end
def simple_session_filter
%w(org.realityforge.ssf:simple-session-filter:jar:0.6)
end
def field_filter
%w(org.realityforge.rest.field_filter:rest-field-filter:jar:0.4)
end
def rest_criteria
%w(org.realityforge.rest.criteria:rest-criteria:jar:0.9.3 org.antlr:antlr4-runtime:jar:4.3 org.antlr:antlr4-annotations:jar:4.3) + self.field_filter
end
def commons_logging
%w(commons-logging:commons-logging:jar:1.2)
end
def commons_codec
%w(commons-codec:commons-codec:jar:1.9)
end
def bouncycastle
%w(org.bouncycastle:bcprov-jdk15on:jar:1.52 org.bouncycastle:bcpkix-jdk15on:jar:1.52)
end
def httpclient
%w(org.apache.httpcomponents:httpclient:jar:4.5 org.apache.httpcomponents:httpcore:jar:4.4.1) +
self.commons_logging + self.commons_codec
end
def keycloak_domgen_support
%w(org.realityforge.keycloak.domgen:keycloak-domgen-support:jar:1.1)
end
def jboss_logging
%w(org.jboss.logging:jboss-logging:jar:3.3.0.Final)
end
def keycloak_core
%w(
org.keycloak:keycloak-core:jar:2.0.0.Final
org.keycloak:keycloak-common:jar:2.0.0.Final
) + self.bouncycastle
end
def keycloak
%w(
org.keycloak:keycloak-servlet-filter-adapter:jar:2.0.0.Final
org.keycloak:keycloak-adapter-spi:jar:2.0.0.Final
org.keycloak:keycloak-adapter-core:jar:2.0.0.Final
org.realityforge.org.keycloak:keycloak-servlet-adapter-spi:jar:2.0.0.Final
) + self.keycloak_core + self.keycloak_domgen_support + self.httpclient + self.jboss_logging
end
def replicant_client
self.replicant + self.gwt_property_source + self.gwt_datatypes + self.gwt_webpoller
end
def replicant_server
self.replicant + self.simple_session_filter + self.gwt_rpc + self.field_filter
end
def gwt_rpc
self.gwt_datatypes + self.jackson_gwt_support + self.gwt_servlet
end
def guice
%w(aopalliance:aopalliance:jar:1.0 com.google.inject:guice:jar:3.0 com.google.inject.extensions:guice-assistedinject:jar:3.0)
end
def testng
%w(org.testng:testng:jar:6.8)
end
def jndikit
%w(org.realityforge.jndikit:jndikit:jar:1.4)
end
def guiceyloops
self.glassfish_embedded + self.guiceyloops_gwt
end
def guiceyloops_gwt
%w(org.realityforge.guiceyloops:guiceyloops:jar:0.76) + self.mockito + self.guice + self.testng
end
def glassfish_timers_domain
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-domain:json:0.4)
end
def glassfish_timers_db
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-db:jar:0.4)
end
def slf4j
%w(org.slf4j:slf4j-api:jar:1.6.6 org.slf4j:slf4j-jdk14:jar:1.6.6)
end
def greenmail
%w(com.icegreen:greenmail:jar:1.4.1) + self.slf4j
end
def greenmail_server
'com.icegreen:greenmail-webapp:war:1.4.1'
end
def jtds
%w(net.sourceforge.jtds:jtds:jar:1.3.1)
end
def postgresql
%w(org.postgresql:postgresql:jar:9.2-1003-jdbc4)
end
def postgis
%w(org.postgis:postgis-jdbc:jar:1.3.3)
end
def db_drivers
return self.jtds if BuildrPlus::Db.mssql?
return self.postgresql + (BuildrPlus::FeatureManager.activated?(:geolatte) ? self.postgis : []) if BuildrPlus::Db.pgsql?
[]
end
end
end
Add proxy_servlet library
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
BuildrPlus::FeatureManager.feature(:libs) do |f|
f.enhance(:Config) do
def mustache
%w(com.github.spullara.mustache.java:compiler:jar:0.8.15)
end
def javacsv
%w(net.sourceforge.javacsv:javacsv:jar:2.1)
end
def geotools_for_geolatte
%w(org.geotools:gt-main:jar:9.4 org.geotools:gt-metadata:jar:9.4 org.geotools:gt-api:jar:9.4 org.geotools:gt-epsg-wkt:jar:9.4 org.geotools:gt-opengis:jar:9.4 org.geotools:gt-transform:jar:9.4 org.geotools:gt-geometry:jar:9.4 org.geotools:gt-jts-wrapper:jar:9.4 org.geotools:gt-referencing:jar:9.4 net.java.dev.jsr-275:jsr-275:jar:1.0-beta-2 java3d:vecmath:jar:1.3.2 javax.media:jai_core:jar:1.1.3)
end
def jts
%w(com.vividsolutions:jts:jar:1.13)
end
# Support geo libraries for geolatte
def geolatte_support
self.jts + self.slf4j
end
def geolatte_geom
%w(org.geolatte:geolatte-geom:jar:0.13)
end
def geolatte_geom_jpa
%w(org.realityforge.geolatte.jpa:geolatte-geom-jpa:jar:0.2)
end
def findbugs_provided
%w(com.google.code.findbugs:jsr305:jar:3.0.0 com.google.code.findbugs:annotations:jar:3.0.0)
end
def ee_provided
%w(javax:javaee-api:jar:7.0) + self.findbugs_provided
end
def glassfish_embedded
%w(fish.payara.extras:payara-embedded-all:jar:4.1.1.162)
end
def eclipselink
'org.eclipse.persistence:eclipselink:jar:2.6.0'
end
def mockito
%w(org.mockito:mockito-all:jar:1.9.5)
end
def jackson_annotations
%w(com.fasterxml.jackson.core:jackson-annotations:jar:2.5.4)
end
def jackson_core
%w(com.fasterxml.jackson.core:jackson-core:jar:2.5.4)
end
def jackson_databind
%w(com.fasterxml.jackson.core:jackson-databind:jar:2.5.4)
end
def jackson_gwt_support
self.jackson_core + self.jackson_databind + self.jackson_annotations
end
def gwt_user
%w(com.google.gwt:gwt-user:jar:2.7.0)
end
def gwt_servlet
%w(com.google.gwt:gwt-servlet:jar:2.7.0)
end
def gwt_dev
'com.google.gwt:gwt-dev:jar:2.7.0'
end
def gwt_gin
%w(com.google.gwt.inject:gin:jar:2.1.2 javax.inject:javax.inject:jar:1) + self.guice + self.gwt_user
end
def replicant
%w(org.realityforge.replicant:replicant:jar:0.5.56)
end
def gwt_property_source
%w(org.realityforge.gwt.property-source:gwt-property-source:jar:0.2)
end
def gwt_webpoller
%w(org.realityforge.gwt.webpoller:gwt-webpoller:jar:0.8)
end
def gwt_datatypes
%w(org.realityforge.gwt.datatypes:gwt-datatypes:jar:0.8)
end
def gwt_ga
%w(org.realityforge.gwt.ga:gwt-ga:jar:0.5)
end
def gwt_mmvp
%w(org.realityforge.gwt.mmvp:gwt-mmvp:jar:0.5)
end
def gwt_lognice
%w(org.realityforge.gwt.lognice:gwt-lognice:jar:0.2)
end
def gwt_appcache_client
%w(org.realityforge.gwt.appcache:gwt-appcache-client:jar:1.0.8 org.realityforge.gwt.appcache:gwt-appcache-linker:jar:1.0.8)
end
def gwt_appcache_server
%w(org.realityforge.gwt.appcache:gwt-appcache-server:jar:1.0.8)
end
# The appcache code required to exist on gwt path during compilation
def gwt_appcache
self.gwt_appcache_client + self.gwt_appcache_server
end
def gwt_cache_filter
%w(org.realityforge.gwt.cache-filter:gwt-cache-filter:jar:0.6)
end
def simple_session_filter
%w(org.realityforge.ssf:simple-session-filter:jar:0.6)
end
def field_filter
%w(org.realityforge.rest.field_filter:rest-field-filter:jar:0.4)
end
def rest_criteria
%w(org.realityforge.rest.criteria:rest-criteria:jar:0.9.3 org.antlr:antlr4-runtime:jar:4.3 org.antlr:antlr4-annotations:jar:4.3) + self.field_filter
end
def commons_logging
%w(commons-logging:commons-logging:jar:1.2)
end
def commons_codec
%w(commons-codec:commons-codec:jar:1.9)
end
def bouncycastle
%w(org.bouncycastle:bcprov-jdk15on:jar:1.52 org.bouncycastle:bcpkix-jdk15on:jar:1.52)
end
def proxy_servlet
self.httpclient + %w(org.realityforge.proxy-servlet:proxy-servlet:jar:0.2.0)
end
def httpclient
%w(org.apache.httpcomponents:httpclient:jar:4.5 org.apache.httpcomponents:httpcore:jar:4.4.1) +
self.commons_logging + self.commons_codec
end
def keycloak_domgen_support
%w(org.realityforge.keycloak.domgen:keycloak-domgen-support:jar:1.1)
end
def jboss_logging
%w(org.jboss.logging:jboss-logging:jar:3.3.0.Final)
end
def keycloak_core
%w(
org.keycloak:keycloak-core:jar:2.0.0.Final
org.keycloak:keycloak-common:jar:2.0.0.Final
) + self.bouncycastle
end
def keycloak
%w(
org.keycloak:keycloak-servlet-filter-adapter:jar:2.0.0.Final
org.keycloak:keycloak-adapter-spi:jar:2.0.0.Final
org.keycloak:keycloak-adapter-core:jar:2.0.0.Final
org.realityforge.org.keycloak:keycloak-servlet-adapter-spi:jar:2.0.0.Final
) + self.keycloak_core + self.keycloak_domgen_support + self.httpclient + self.jboss_logging
end
def replicant_client
self.replicant + self.gwt_property_source + self.gwt_datatypes + self.gwt_webpoller
end
def replicant_server
self.replicant + self.simple_session_filter + self.gwt_rpc + self.field_filter
end
def gwt_rpc
self.gwt_datatypes + self.jackson_gwt_support + self.gwt_servlet
end
def guice
%w(aopalliance:aopalliance:jar:1.0 com.google.inject:guice:jar:3.0 com.google.inject.extensions:guice-assistedinject:jar:3.0)
end
def testng
%w(org.testng:testng:jar:6.8)
end
def jndikit
%w(org.realityforge.jndikit:jndikit:jar:1.4)
end
def guiceyloops
self.glassfish_embedded + self.guiceyloops_gwt
end
def guiceyloops_gwt
%w(org.realityforge.guiceyloops:guiceyloops:jar:0.76) + self.mockito + self.guice + self.testng
end
def glassfish_timers_domain
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-domain:json:0.4)
end
def glassfish_timers_db
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-db:jar:0.4)
end
def slf4j
%w(org.slf4j:slf4j-api:jar:1.6.6 org.slf4j:slf4j-jdk14:jar:1.6.6)
end
def greenmail
%w(com.icegreen:greenmail:jar:1.4.1) + self.slf4j
end
def greenmail_server
'com.icegreen:greenmail-webapp:war:1.4.1'
end
def jtds
%w(net.sourceforge.jtds:jtds:jar:1.3.1)
end
def postgresql
%w(org.postgresql:postgresql:jar:9.2-1003-jdbc4)
end
def postgis
%w(org.postgis:postgis-jdbc:jar:1.3.3)
end
def db_drivers
return self.jtds if BuildrPlus::Db.mssql?
return self.postgresql + (BuildrPlus::FeatureManager.activated?(:geolatte) ? self.postgis : []) if BuildrPlus::Db.pgsql?
[]
end
end
end
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
BuildrPlus::FeatureManager.feature(:libs) do |f|
f.enhance(:Config) do
def giggle
'org.realityforge.giggle:giggle-compiler:jar:all:0.08'
end
def mustache
%w(com.github.spullara.mustache.java:compiler:jar:0.9.6) + self.guava
end
def javacsv
%w(net.sourceforge.javacsv:javacsv:jar:2.1)
end
def geotools_for_geolatte
%w(org.geotools:gt-main:jar:9.4 org.geotools:gt-metadata:jar:9.4 org.geotools:gt-api:jar:9.4 org.geotools:gt-epsg-wkt:jar:9.4 org.geotools:gt-opengis:jar:9.4 org.geotools:gt-transform:jar:9.4 org.geotools:gt-geometry:jar:9.4 org.geotools:gt-jts-wrapper:jar:9.4 org.geotools:gt-referencing:jar:9.4 net.java.dev.jsr-275:jsr-275:jar:1.0-beta-2 java3d:vecmath:jar:1.3.2 javax.media:jai_core:jar:1.1.3)
end
def jts
%w(com.vividsolutions:jts:jar:1.13)
end
# Support geo libraries for geolatte
def geolatte_support
self.jts + self.slf4j
end
def geolatte_geom
%w(org.geolatte:geolatte-geom:jar:0.13)
end
def geolatte_geom_jpa
%w(org.realityforge.geolatte.jpa:geolatte-geom-jpa:jar:0.2)
end
def jetbrains_annotations
%w(org.realityforge.org.jetbrains.annotations:org.jetbrains.annotations:jar:1.5.0)
end
def javax_annotations
%w(org.realityforge.javax.annotation:javax.annotation:jar:1.0.1)
end
def spotbugs_provided
%w(com.github.spotbugs:spotbugs-annotations:jar:3.1.5 net.jcip:jcip-annotations:jar:1.0) + self.javax_annotations
end
def ee_provided
%w(javax:javaee-api:jar:8.0.1) + self.spotbugs_provided + self.jetbrains_annotations
end
def glassfish_embedded
%w(fish.payara.extras:payara-embedded-all:jar:5.2020.2)
end
def eclipselink
'org.eclipse.persistence:eclipselink:jar:2.7.4'
end
def mockito
%w(org.mockito:mockito-all:jar:1.10.19)
end
def jackson_annotations
%w(com.fasterxml.jackson.core:jackson-annotations:jar:2.9.9)
end
def jackson_core
%w(com.fasterxml.jackson.core:jackson-core:jar:2.9.9)
end
def jackson_databind
%w(com.fasterxml.jackson.core:jackson-databind:jar:2.9.9)
end
def jackson_datatype_jdk8
%w(com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar:2.9.9)
end
def jackson_datatype_jsr310
%w(com.fasterxml.jackson.datatype:jackson-datatype-jsr310:jar:2.9.9)
end
def jackson_gwt_support
self.jackson_core + self.jackson_databind + self.jackson_annotations
end
def braincheck
%w(org.realityforge.braincheck:braincheck:jar:1.29.0)
end
def jsinterop
%w(com.google.jsinterop:jsinterop-annotations:jar:2.0.0)
end
def jsinterop_base
%w(com.google.jsinterop:base:jar:1.0.0) + self.jsinterop
end
def elemental2_version
'2.27'
end
def elemental2_group_id
'org.realityforge.com.google.elemental2'
end
def elemental2_core
%W(#{elemental2_group_id}:elemental2-core:jar:#{elemental2_version}) + self.jsinterop_base
end
def elemental2_dom
%W(#{elemental2_group_id}:elemental2-dom:jar:#{elemental2_version}) + self.elemental2_promise
end
def elemental2_promise
%W(#{elemental2_group_id}:elemental2-promise:jar:#{elemental2_version}) + self.elemental2_core
end
def elemental2_webstorage
%W(#{elemental2_group_id}:elemental2-webstorage:jar:#{elemental2_version}) + self.elemental2_dom
end
def gwt_user
%w(com.google.gwt:gwt-user:jar:2.9.0 org.w3c.css:sac:jar:1.3) + self.jsinterop
end
def gwt_servlet
%w(com.google.gwt:gwt-servlet:jar:2.9.0)
end
def gwt_dev
'com.google.gwt:gwt-dev:jar:2.9.0'
end
def javax_inject
%w(javax.inject:javax.inject:jar:1)
end
def javax_inject_gwt
%w(javax.inject:javax.inject:jar:sources:1) + self.javax_inject
end
def gwt_serviceworker
%w(org.realityforge.gwt.serviceworker:gwt-serviceworker-linker:jar:0.02)
end
def gwt_cache_filter
%w(org.realityforge.gwt.cache-filter:gwt-cache-filter:jar:0.9)
end
def field_filter
%w(org.realityforge.rest.field_filter:rest-field-filter:jar:0.4)
end
def timeservice
%w(org.realityforge.timeservice:timeservice:jar:0.02)
end
def graphql_java
%w(com.graphql-java:graphql-java:jar:13.0) + self.slf4j + self.antlr4_runtime + self.graphql_java_dataloader
end
def graphql_java_dataloader
%w(com.graphql-java:java-dataloader:jar:2.1.1 org.reactivestreams:reactive-streams:jar:1.0.2)
end
def graphql_java_servlet
%w(com.graphql-java-kickstart:graphql-java-servlet:jar:8.0.0) +
self.graphql_java +
self.jackson_annotations +
self.jackson_core +
self.jackson_databind +
self.jackson_datatype_jdk8 +
self.guava # Expected 24.1.1-jre
end
def graphql_java_scalars
%w(org.realityforge.graphql.scalars:graphql-java-scalars:jar:0.01)
end
def antlr4_runtime
%w(org.antlr:antlr4-runtime:jar:4.7.2)
end
def rest_criteria
%w(org.realityforge.rest.criteria:rest-criteria:jar:0.9.6) +
self.antlr4_runtime +
self.field_filter
end
def commons_logging
%w(commons-logging:commons-logging:jar:1.2)
end
def commons_codec
%w(commons-codec:commons-codec:jar:1.9)
end
def bouncycastle
%w(org.bouncycastle:bcprov-jdk15on:jar:1.52 org.bouncycastle:bcpkix-jdk15on:jar:1.52)
end
def proxy_servlet
self.httpclient + %w(org.realityforge.proxy-servlet:proxy-servlet:jar:0.2.0)
end
def httpclient
%w(org.apache.httpcomponents:httpclient:jar:4.5 org.apache.httpcomponents:httpcore:jar:4.4.1) +
self.commons_logging + self.commons_codec
end
def failsafe
%w(net.jodah:failsafe:jar:1.0.3)
end
def keycloak_gwt
%w(org.realityforge.gwt.keycloak:gwt-keycloak:jar:0.7) + self.elemental2_webstorage
end
def keycloak_domgen_support
%w(org.realityforge.keycloak.domgen:keycloak-domgen-support:jar:1.4)
end
def keycloak_authfilter
%w(org.realityforge.keycloak.client.authfilter:keycloak-jaxrs-client-authfilter:jar:1.02)
end
def keycloak_converger
'org.realityforge.keycloak.converger:keycloak-converger:jar:1.8'
end
def jboss_logging
%w(org.jboss.logging:jboss-logging:jar:3.3.0.Final)
end
def keycloak_core
%w(
org.keycloak:keycloak-core:jar:2.0.0.Final
org.keycloak:keycloak-common:jar:2.0.0.Final
) + self.bouncycastle
end
def keycloak
%w(
org.keycloak:keycloak-servlet-filter-adapter:jar:2.0.0.Final
org.keycloak:keycloak-adapter-spi:jar:2.0.0.Final
org.keycloak:keycloak-adapter-core:jar:2.0.0.Final
org.realityforge.org.keycloak:keycloak-servlet-adapter-spi:jar:2.0.0.Final
) + self.keycloak_core + self.keycloak_domgen_support + self.httpclient + self.jboss_logging
end
def simple_keycloak_service
%w(org.realityforge.keycloak.sks:simple-keycloak-service:jar:0.1)
end
def guava
%w(com.google.guava:guava:jar:27.1-jre)
end
def arez_version
'0.184'
end
def arez
%W(org.realityforge.arez:arez-core:jar:#{arez_version}) + self.braincheck
end
def arez_processor
%W(org.realityforge.arez:arez-processor:jar:#{arez_version})
end
def arez_spytools
%w(org.realityforge.arez.spytools:arez-spytools:jar:0.111)
end
def arez_testng
%w(org.realityforge.arez.testng:arez-testng:jar:0.16)
end
def arez_dom
%w(org.realityforge.arez.dom:arez-dom:jar:0.71)
end
def arez_persist_version
'0.12'
end
def arez_persist_core
%W(org.realityforge.arez.persist:arez-persist-core:jar:#{arez_persist_version})
end
def arez_persist_processor
%W(org.realityforge.arez.persist:arez-persist-processor:jar:#{arez_persist_version})
end
def grim_annotations
%w(org.realityforge.grim:grim-annotations:jar:0.04)
end
def router_fu_version
'0.31'
end
def router_fu
%W(org.realityforge.router.fu:router-fu-core:jar:#{router_fu_version}) + self.braincheck
end
def router_fu_processor
%W(org.realityforge.router.fu:router-fu-processor:jar:#{router_fu_version})
end
def sting_version
'0.16'
end
def sting_core
%W(org.realityforge.sting:sting-core:jar:#{sting_version})
end
def sting_processor
%W(org.realityforge.sting:sting-processor:jar:#{sting_version})
end
def react4j_version
'0.169'
end
def react4j
%W(
org.realityforge.react4j:react4j-core:jar:#{react4j_version}
org.realityforge.react4j:react4j-dom:jar:#{react4j_version}
) + self.elemental2_dom
end
def react4j_processor
%W(org.realityforge.react4j:react4j-processor:jar:#{react4j_version})
end
def replicant_version
'6.94'
end
def replicant_client
%W(org.realityforge.replicant:replicant-client:jar:#{replicant_version}) + self.elemental2_webstorage
end
def replicant_server
%W(org.realityforge.replicant:replicant-server:jar:#{replicant_version}) + self.gwt_rpc
end
def gwt_rpc
self.jackson_gwt_support + self.gwt_servlet
end
def guice
%w(aopalliance:aopalliance:jar:1.0 org.ow2.asm:asm:jar:7.1 au.com.stocksoftware.com.google.inject:guice:jar:4.1.1-stock)
end
def awaitility
%w(org.awaitility:awaitility:jar:2.0.0)
end
def testng_version
'6.11'
end
def testng
%W(org.testng:testng:jar:#{testng_version})
end
def jndikit
%w(org.realityforge.jndikit:jndikit:jar:1.4)
end
def guiceyloops
self.mockito + self.testng + %w(org.realityforge.guiceyloops:guiceyloops:jar:0.106) + self.guice + self.glassfish_embedded
end
def glassfish_timers_domain
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-domain:json:0.7)
end
def glassfish_timers_db
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-db:jar:0.7)
end
def slf4j
%w(org.slf4j:slf4j-api:jar:1.7.25 org.slf4j:slf4j-jdk14:jar:1.7.25)
end
def greenmail
%w(com.icegreen:greenmail:jar:1.4.1) + self.slf4j
end
def greenmail_server
'com.icegreen:greenmail-webapp:war:1.4.1'
end
def jtds
%w(net.sourceforge.jtds:jtds:jar:1.3.1)
end
def postgresql
%w(org.postgresql:postgresql:jar:9.2-1003-jdbc4)
end
def postgis
%w(org.postgis:postgis-jdbc:jar:1.3.3)
end
def db_drivers
return self.jtds if BuildrPlus::Db.mssql?
return self.postgresql + (BuildrPlus::FeatureManager.activated?(:geolatte) ? self.postgis : []) if BuildrPlus::Db.pgsql?
[]
end
end
end
Bump react4j version
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
BuildrPlus::FeatureManager.feature(:libs) do |f|
f.enhance(:Config) do
def giggle
'org.realityforge.giggle:giggle-compiler:jar:all:0.08'
end
def mustache
%w(com.github.spullara.mustache.java:compiler:jar:0.9.6) + self.guava
end
def javacsv
%w(net.sourceforge.javacsv:javacsv:jar:2.1)
end
def geotools_for_geolatte
%w(org.geotools:gt-main:jar:9.4 org.geotools:gt-metadata:jar:9.4 org.geotools:gt-api:jar:9.4 org.geotools:gt-epsg-wkt:jar:9.4 org.geotools:gt-opengis:jar:9.4 org.geotools:gt-transform:jar:9.4 org.geotools:gt-geometry:jar:9.4 org.geotools:gt-jts-wrapper:jar:9.4 org.geotools:gt-referencing:jar:9.4 net.java.dev.jsr-275:jsr-275:jar:1.0-beta-2 java3d:vecmath:jar:1.3.2 javax.media:jai_core:jar:1.1.3)
end
def jts
%w(com.vividsolutions:jts:jar:1.13)
end
# Support geo libraries for geolatte
def geolatte_support
self.jts + self.slf4j
end
def geolatte_geom
%w(org.geolatte:geolatte-geom:jar:0.13)
end
def geolatte_geom_jpa
%w(org.realityforge.geolatte.jpa:geolatte-geom-jpa:jar:0.2)
end
def jetbrains_annotations
%w(org.realityforge.org.jetbrains.annotations:org.jetbrains.annotations:jar:1.5.0)
end
def javax_annotations
%w(org.realityforge.javax.annotation:javax.annotation:jar:1.0.1)
end
def spotbugs_provided
%w(com.github.spotbugs:spotbugs-annotations:jar:3.1.5 net.jcip:jcip-annotations:jar:1.0) + self.javax_annotations
end
def ee_provided
%w(javax:javaee-api:jar:8.0.1) + self.spotbugs_provided + self.jetbrains_annotations
end
def glassfish_embedded
%w(fish.payara.extras:payara-embedded-all:jar:5.2020.2)
end
def eclipselink
'org.eclipse.persistence:eclipselink:jar:2.7.4'
end
def mockito
%w(org.mockito:mockito-all:jar:1.10.19)
end
def jackson_annotations
%w(com.fasterxml.jackson.core:jackson-annotations:jar:2.9.9)
end
def jackson_core
%w(com.fasterxml.jackson.core:jackson-core:jar:2.9.9)
end
def jackson_databind
%w(com.fasterxml.jackson.core:jackson-databind:jar:2.9.9)
end
def jackson_datatype_jdk8
%w(com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar:2.9.9)
end
def jackson_datatype_jsr310
%w(com.fasterxml.jackson.datatype:jackson-datatype-jsr310:jar:2.9.9)
end
def jackson_gwt_support
self.jackson_core + self.jackson_databind + self.jackson_annotations
end
def braincheck
%w(org.realityforge.braincheck:braincheck:jar:1.29.0)
end
def jsinterop
%w(com.google.jsinterop:jsinterop-annotations:jar:2.0.0)
end
def jsinterop_base
%w(com.google.jsinterop:base:jar:1.0.0) + self.jsinterop
end
def elemental2_version
'2.27'
end
def elemental2_group_id
'org.realityforge.com.google.elemental2'
end
def elemental2_core
%W(#{elemental2_group_id}:elemental2-core:jar:#{elemental2_version}) + self.jsinterop_base
end
def elemental2_dom
%W(#{elemental2_group_id}:elemental2-dom:jar:#{elemental2_version}) + self.elemental2_promise
end
def elemental2_promise
%W(#{elemental2_group_id}:elemental2-promise:jar:#{elemental2_version}) + self.elemental2_core
end
def elemental2_webstorage
%W(#{elemental2_group_id}:elemental2-webstorage:jar:#{elemental2_version}) + self.elemental2_dom
end
def gwt_user
%w(com.google.gwt:gwt-user:jar:2.9.0 org.w3c.css:sac:jar:1.3) + self.jsinterop
end
def gwt_servlet
%w(com.google.gwt:gwt-servlet:jar:2.9.0)
end
def gwt_dev
'com.google.gwt:gwt-dev:jar:2.9.0'
end
def javax_inject
%w(javax.inject:javax.inject:jar:1)
end
def javax_inject_gwt
%w(javax.inject:javax.inject:jar:sources:1) + self.javax_inject
end
def gwt_serviceworker
%w(org.realityforge.gwt.serviceworker:gwt-serviceworker-linker:jar:0.02)
end
def gwt_cache_filter
%w(org.realityforge.gwt.cache-filter:gwt-cache-filter:jar:0.9)
end
def field_filter
%w(org.realityforge.rest.field_filter:rest-field-filter:jar:0.4)
end
def timeservice
%w(org.realityforge.timeservice:timeservice:jar:0.02)
end
def graphql_java
%w(com.graphql-java:graphql-java:jar:13.0) + self.slf4j + self.antlr4_runtime + self.graphql_java_dataloader
end
def graphql_java_dataloader
%w(com.graphql-java:java-dataloader:jar:2.1.1 org.reactivestreams:reactive-streams:jar:1.0.2)
end
def graphql_java_servlet
%w(com.graphql-java-kickstart:graphql-java-servlet:jar:8.0.0) +
self.graphql_java +
self.jackson_annotations +
self.jackson_core +
self.jackson_databind +
self.jackson_datatype_jdk8 +
self.guava # Expected 24.1.1-jre
end
def graphql_java_scalars
%w(org.realityforge.graphql.scalars:graphql-java-scalars:jar:0.01)
end
def antlr4_runtime
%w(org.antlr:antlr4-runtime:jar:4.7.2)
end
def rest_criteria
%w(org.realityforge.rest.criteria:rest-criteria:jar:0.9.6) +
self.antlr4_runtime +
self.field_filter
end
def commons_logging
%w(commons-logging:commons-logging:jar:1.2)
end
def commons_codec
%w(commons-codec:commons-codec:jar:1.9)
end
def bouncycastle
%w(org.bouncycastle:bcprov-jdk15on:jar:1.52 org.bouncycastle:bcpkix-jdk15on:jar:1.52)
end
def proxy_servlet
self.httpclient + %w(org.realityforge.proxy-servlet:proxy-servlet:jar:0.2.0)
end
def httpclient
%w(org.apache.httpcomponents:httpclient:jar:4.5 org.apache.httpcomponents:httpcore:jar:4.4.1) +
self.commons_logging + self.commons_codec
end
def failsafe
%w(net.jodah:failsafe:jar:1.0.3)
end
def keycloak_gwt
%w(org.realityforge.gwt.keycloak:gwt-keycloak:jar:0.7) + self.elemental2_webstorage
end
def keycloak_domgen_support
%w(org.realityforge.keycloak.domgen:keycloak-domgen-support:jar:1.4)
end
def keycloak_authfilter
%w(org.realityforge.keycloak.client.authfilter:keycloak-jaxrs-client-authfilter:jar:1.02)
end
def keycloak_converger
'org.realityforge.keycloak.converger:keycloak-converger:jar:1.8'
end
def jboss_logging
%w(org.jboss.logging:jboss-logging:jar:3.3.0.Final)
end
def keycloak_core
%w(
org.keycloak:keycloak-core:jar:2.0.0.Final
org.keycloak:keycloak-common:jar:2.0.0.Final
) + self.bouncycastle
end
def keycloak
%w(
org.keycloak:keycloak-servlet-filter-adapter:jar:2.0.0.Final
org.keycloak:keycloak-adapter-spi:jar:2.0.0.Final
org.keycloak:keycloak-adapter-core:jar:2.0.0.Final
org.realityforge.org.keycloak:keycloak-servlet-adapter-spi:jar:2.0.0.Final
) + self.keycloak_core + self.keycloak_domgen_support + self.httpclient + self.jboss_logging
end
def simple_keycloak_service
%w(org.realityforge.keycloak.sks:simple-keycloak-service:jar:0.1)
end
def guava
%w(com.google.guava:guava:jar:27.1-jre)
end
def arez_version
'0.184'
end
def arez
%W(org.realityforge.arez:arez-core:jar:#{arez_version}) + self.braincheck
end
def arez_processor
%W(org.realityforge.arez:arez-processor:jar:#{arez_version})
end
def arez_spytools
%w(org.realityforge.arez.spytools:arez-spytools:jar:0.111)
end
def arez_testng
%w(org.realityforge.arez.testng:arez-testng:jar:0.16)
end
def arez_dom
%w(org.realityforge.arez.dom:arez-dom:jar:0.71)
end
def arez_persist_version
'0.12'
end
def arez_persist_core
%W(org.realityforge.arez.persist:arez-persist-core:jar:#{arez_persist_version})
end
def arez_persist_processor
%W(org.realityforge.arez.persist:arez-persist-processor:jar:#{arez_persist_version})
end
def grim_annotations
%w(org.realityforge.grim:grim-annotations:jar:0.04)
end
def router_fu_version
'0.31'
end
def router_fu
%W(org.realityforge.router.fu:router-fu-core:jar:#{router_fu_version}) + self.braincheck
end
def router_fu_processor
%W(org.realityforge.router.fu:router-fu-processor:jar:#{router_fu_version})
end
def sting_version
'0.16'
end
def sting_core
%W(org.realityforge.sting:sting-core:jar:#{sting_version})
end
def sting_processor
%W(org.realityforge.sting:sting-processor:jar:#{sting_version})
end
def react4j_version
'0.170'
end
def react4j
%W(
org.realityforge.react4j:react4j-core:jar:#{react4j_version}
org.realityforge.react4j:react4j-dom:jar:#{react4j_version}
) + self.elemental2_dom
end
def react4j_processor
%W(org.realityforge.react4j:react4j-processor:jar:#{react4j_version})
end
def replicant_version
'6.94'
end
def replicant_client
%W(org.realityforge.replicant:replicant-client:jar:#{replicant_version}) + self.elemental2_webstorage
end
def replicant_server
%W(org.realityforge.replicant:replicant-server:jar:#{replicant_version}) + self.gwt_rpc
end
def gwt_rpc
self.jackson_gwt_support + self.gwt_servlet
end
def guice
%w(aopalliance:aopalliance:jar:1.0 org.ow2.asm:asm:jar:7.1 au.com.stocksoftware.com.google.inject:guice:jar:4.1.1-stock)
end
def awaitility
%w(org.awaitility:awaitility:jar:2.0.0)
end
def testng_version
'6.11'
end
def testng
%W(org.testng:testng:jar:#{testng_version})
end
def jndikit
%w(org.realityforge.jndikit:jndikit:jar:1.4)
end
def guiceyloops
self.mockito + self.testng + %w(org.realityforge.guiceyloops:guiceyloops:jar:0.106) + self.guice + self.glassfish_embedded
end
def glassfish_timers_domain
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-domain:json:0.7)
end
def glassfish_timers_db
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-db:jar:0.7)
end
def slf4j
%w(org.slf4j:slf4j-api:jar:1.7.25 org.slf4j:slf4j-jdk14:jar:1.7.25)
end
def greenmail
%w(com.icegreen:greenmail:jar:1.4.1) + self.slf4j
end
def greenmail_server
'com.icegreen:greenmail-webapp:war:1.4.1'
end
def jtds
%w(net.sourceforge.jtds:jtds:jar:1.3.1)
end
def postgresql
%w(org.postgresql:postgresql:jar:9.2-1003-jdbc4)
end
def postgis
%w(org.postgis:postgis-jdbc:jar:1.3.3)
end
def db_drivers
return self.jtds if BuildrPlus::Db.mssql?
return self.postgresql + (BuildrPlus::FeatureManager.activated?(:geolatte) ? self.postgis : []) if BuildrPlus::Db.pgsql?
[]
end
end
end
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
BuildrPlus::FeatureManager.feature(:libs) do |f|
f.enhance(:Config) do
def mustache
%w(com.github.spullara.mustache.java:compiler:jar:0.8.15)
end
def javacsv
%w(net.sourceforge.javacsv:javacsv:jar:2.1)
end
def geotools_for_geolatte
%w(org.geotools:gt-main:jar:9.4 org.geotools:gt-metadata:jar:9.4 org.geotools:gt-api:jar:9.4 org.geotools:gt-epsg-wkt:jar:9.4 org.geotools:gt-opengis:jar:9.4 org.geotools:gt-transform:jar:9.4 org.geotools:gt-geometry:jar:9.4 org.geotools:gt-jts-wrapper:jar:9.4 org.geotools:gt-referencing:jar:9.4 net.java.dev.jsr-275:jsr-275:jar:1.0-beta-2 java3d:vecmath:jar:1.3.2 javax.media:jai_core:jar:1.1.3)
end
def jts
%w(com.vividsolutions:jts:jar:1.13)
end
# Support geo libraries for geolatte
def geolatte_support
self.jts + self.slf4j
end
def geolatte_geom
%w(org.geolatte:geolatte-geom:jar:0.13)
end
def geolatte_geom_jpa
%w(org.realityforge.geolatte.jpa:geolatte-geom-jpa:jar:0.2)
end
def jetbrains_annotations
%w(org.jetbrains:annotations:jar:15.0)
end
def findbugs_provided
%w(com.google.code.findbugs:jsr305:jar:3.0.0 com.google.code.findbugs:annotations:jar:3.0.0)
end
def ee_provided
%w(javax:javaee-api:jar:7.0) + self.findbugs_provided + self.jetbrains_annotations
end
def glassfish_embedded
%w(fish.payara.extras:payara-embedded-all:jar:4.1.2.172 fish.payara.api:payara-api:jar:4.1.2.172)
end
def eclipselink
'org.eclipse.persistence:eclipselink:jar:2.6.0'
end
def mockito
%w(org.mockito:mockito-all:jar:1.10.19)
end
def objenesis
%w(org.objenesis:objenesis:jar:2.5.1)
end
def powermock_version
'1.6.6'
end
def powermock_javaagent
"org.powermock:powermock-module-javaagent:jar:#{powermock_version}"
end
def powermock
%W(
org.powermock:powermock-core:jar:#{powermock_version}
org.powermock:powermock-reflect:jar:#{powermock_version}
org.powermock:powermock-module-testng-common:jar:#{powermock_version}
org.powermock:powermock-module-testng:jar:#{powermock_version}
org.powermock:powermock-api-mockito:jar:#{powermock_version}
org.powermock:powermock-api-mockito-common:jar:#{powermock_version}
org.powermock:powermock-api-support:jar:#{powermock_version}
org.javassist:javassist:jar:3.21.0-GA
org.powermock:powermock-module-testng-agent:jar:#{powermock_version}
#{powermock_javaagent}
) + self.objenesis
end
def jackson_annotations
%w(com.fasterxml.jackson.core:jackson-annotations:jar:2.8.8)
end
def jackson_core
%w(com.fasterxml.jackson.core:jackson-core:jar:2.8.8)
end
def jackson_databind
%w(com.fasterxml.jackson.core:jackson-databind:jar:2.8.8)
end
def jackson_datatype_jdk8
%w(com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar:2.8.8)
end
def jackson_module_kotlin
%w(com.fasterxml.jackson.module:jackson-module-kotlin:jar:2.8.8)
end
def jackson_gwt_support
self.jackson_core + self.jackson_databind + self.jackson_annotations
end
def anodoc
%w(org.realityforge.anodoc:anodoc:jar:1.0.0)
end
def braincheck_version
'1.3.0'
end
def braincheck
%W(org.realityforge.braincheck:braincheck:jar:#{braincheck_version}) + self.anodoc
end
def braincheck_gwt
%W(org.realityforge.braincheck:braincheck:jar:gwt:#{braincheck_version}) + self.anodoc
end
def jsinterop
%w(com.google.jsinterop:jsinterop-annotations:jar:1.0.1 com.google.jsinterop:jsinterop-annotations:jar:sources:1.0.1)
end
def jsinterop_base
%w(com.google.jsinterop:base:jar:1.0.0-beta-1 com.google.jsinterop:base:jar:sources:1.0.0-beta-1) + self.jsinterop
end
def elemental2_core
%w(com.google.elemental2:elemental2-core:jar:1.0.0-beta-1) + self.jsinterop_base
end
def elemental2_dom
%w(com.google.elemental2:elemental2-dom:jar:1.0.0-beta-1) + self.elemental2_core
end
def elemental2_promise
%w(com.google.elemental2:elemental2-promise:jar:1.0.0-beta-1) + self.elemental2_core
end
def gwt_user
%w(com.google.gwt:gwt-user:jar:2.8.2 org.w3c.css:sac:jar:1.3) + self.jsinterop
end
def gwt_servlet
%w(com.google.gwt:gwt-servlet:jar:2.8.2)
end
def gwt_dev
'com.google.gwt:gwt-dev:jar:2.8.2'
end
def javax_inject
%w(javax.inject:javax.inject:jar:1)
end
def gwt_gin
%w(com.google.gwt.inject:gin:jar:2.1.2) + self.javax_inject + self.guice + self.gwt_user
end
def gwt_gin_extensions
%w(org.realityforge.gwt.gin:gwt-gin-extensions:jar:0.1)
end
def gwt_webpoller
%w(org.realityforge.gwt.webpoller:gwt-webpoller:jar:0.9.5)
end
def gwt_datatypes
%w(org.realityforge.gwt.datatypes:gwt-datatypes:jar:0.9)
end
def gwt_ga
%w(org.realityforge.gwt.ga:gwt-ga:jar:0.5)
end
def gwt_mmvp
%w(org.realityforge.gwt.mmvp:gwt-mmvp:jar:0.9)
end
def gwt_lognice
%w(org.realityforge.gwt.lognice:gwt-lognice:jar:0.6)
end
def gwt_appcache_client
%w(org.realityforge.gwt.appcache:gwt-appcache-client:jar:1.0.11 org.realityforge.gwt.appcache:gwt-appcache-linker:jar:1.0.11)
end
def gwt_appcache_server
%w(org.realityforge.gwt.appcache:gwt-appcache-server:jar:1.0.11)
end
# The appcache code required to exist on gwt path during compilation
def gwt_appcache
self.gwt_appcache_client + self.gwt_appcache_server
end
def gwt_cache_filter
%w(org.realityforge.gwt.cache-filter:gwt-cache-filter:jar:0.7)
end
def field_filter
%w(org.realityforge.rest.field_filter:rest-field-filter:jar:0.4)
end
def graphql_java
%w(com.graphql-java:graphql-java:jar:3.0.0) + BuildrPlus::Libs.slf4j + BuildrPlus::Libs.antlr4_runtime
end
def graphql_java_tools
%w(
com.esotericsoftware:reflectasm:jar:1.11.3
com.google.guava:guava:jar:21.0
com.graphql-java:graphql-java-tools:jar:3.2.1
org.jetbrains.kotlin:kotlin-reflect:jar:1.1.1
org.jetbrains.kotlin:kotlin-stdlib:jar:1.1.1
org.jetbrains.kotlin:kotlin-stdlib:jar:1.1.3-2
org.ow2.asm:asm:jar:5.0.4
ru.vyarus:generics-resolver:jar:2.0.1
) + self.graphql_java +
self.jackson_annotations +
self.jackson_core +
self.jackson_databind +
self.jackson_datatype_jdk8 +
self.jackson_module_kotlin +
self.jetbrains_annotations
end
def graphql_java_servlet
%w(
com.graphql-java:graphql-java-servlet:jar:4.0.0
commons-fileupload:commons-fileupload:jar:1.3.3
commons-io:commons-io:jar:2.5
) + self.graphql_java_tools
end
def graphql_domgen_support
%w(org.realityforge.keycloak.domgen:graphql-domgen-support:jar:1.2.0)
end
def antlr4_runtime
%w(org.antlr:antlr4-runtime:jar:4.5.1)
end
def rest_criteria
%w(org.realityforge.rest.criteria:rest-criteria:jar:0.9.6) +
self.antlr4_runtime +
self.field_filter
end
def commons_logging
%w(commons-logging:commons-logging:jar:1.2)
end
def commons_codec
%w(commons-codec:commons-codec:jar:1.9)
end
def bouncycastle
%w(org.bouncycastle:bcprov-jdk15on:jar:1.52 org.bouncycastle:bcpkix-jdk15on:jar:1.52)
end
def proxy_servlet
self.httpclient + %w(org.realityforge.proxy-servlet:proxy-servlet:jar:0.2.0)
end
def httpclient
%w(org.apache.httpcomponents:httpclient:jar:4.5 org.apache.httpcomponents:httpcore:jar:4.4.1) +
self.commons_logging + self.commons_codec
end
def failsafe
%w(net.jodah:failsafe:jar:1.0.3)
end
def keycloak_gwt
%w(org.realityforge.gwt.keycloak:gwt-keycloak:jar:0.2)
end
def keycloak_domgen_support
%w(org.realityforge.keycloak.domgen:keycloak-domgen-support:jar:1.4)
end
def keycloak_authfilter
%w(org.realityforge.keycloak.client.authfilter:keycloak-jaxrs-client-authfilter:jar:0.2)
end
def keycloak_converger
'org.realityforge.keycloak.converger:keycloak-converger:jar:1.6'
end
def jboss_logging
%w(org.jboss.logging:jboss-logging:jar:3.3.0.Final)
end
def keycloak_core
%w(
org.keycloak:keycloak-core:jar:2.0.0.Final
org.keycloak:keycloak-common:jar:2.0.0.Final
) + self.bouncycastle
end
def keycloak
%w(
org.keycloak:keycloak-servlet-filter-adapter:jar:2.0.0.Final
org.keycloak:keycloak-adapter-spi:jar:2.0.0.Final
org.keycloak:keycloak-adapter-core:jar:2.0.0.Final
org.realityforge.org.keycloak:keycloak-servlet-adapter-spi:jar:2.0.0.Final
) + self.keycloak_core + self.keycloak_domgen_support + self.httpclient + self.jboss_logging
end
def simple_keycloak_service
%w(org.realityforge.keycloak.sks:simple-keycloak-service:jar:0.1)
end
def guava
%w(com.google.guava:guava:jar:21.0)
end
def javapoet
%w(com.squareup:javapoet:jar:1.7.0) + self.guava
end
def arez_version
'0.25'
end
def arez
%W(
org.realityforge.arez:arez-annotations:jar:#{arez_version}
org.realityforge.arez:arez-core:jar:#{arez_version}
org.realityforge.arez:arez-component:jar:#{arez_version}
org.realityforge.arez:arez-processor:jar:#{arez_version}
org.realityforge.arez:arez-extras:jar:#{arez_version}
) + self.braincheck + self.javapoet
end
def arez_gwt
%W(
org.realityforge.arez:arez-annotations:jar:gwt:#{arez_version}
org.realityforge.arez:arez-core:jar:gwt:#{arez_version}
org.realityforge.arez:arez-component:jar:gwt:#{arez_version}
org.realityforge.arez:arez-processor:jar:#{arez_version}
org.realityforge.arez:arez-extras:jar:gwt:#{arez_version}
) + self.braincheck_gwt + self.javapoet
end
def arez_browser_gwt
%W(org.realityforge.arez:arez-browser-extras:jar:gwt:#{arez_version})
end
def react4j_version
'0.15'
end
def react4j
%W(
org.realityforge.react4j:react4j-annotations:jar:#{react4j_version}
org.realityforge.react4j:react4j-core:jar:#{react4j_version}
org.realityforge.react4j:react4j-dom:jar:#{react4j_version}
org.realityforge.react4j:react4j-processor:jar:#{react4j_version}
org.realityforge.react4j:react4j-widget:jar:#{react4j_version}
) + self.elemental2_dom + self.elemental2_promise
end
def react4j_arez
%W(org.realityforge.react4j:react4j-arez:jar:#{react4j_version})
end
def replicant_version
'0.5.94-arez-b5'
end
def replicant_shared
%W(org.realityforge.replicant:replicant-shared:jar:#{replicant_version})
end
def replicant_shared_ee
%W(org.realityforge.replicant:replicant-shared-ee:jar:#{replicant_version})
end
def replicant_client_common
%W(org.realityforge.replicant:replicant-client-common:jar:#{replicant_version}) + self.replicant_shared + self.gwt_webpoller + self.gwt_datatypes
end
def replicant_client_qa_support
%W(org.realityforge.replicant:replicant-client-qa-support:jar:#{replicant_version}) + self.guiceyloops_gwt
end
def replicant_ee_client
%W(org.realityforge.replicant:replicant-client-ee:jar:#{replicant_version}) + self.replicant_client_common + self.replicant_shared_ee
end
def replicant_gwt_client
%W(org.realityforge.replicant:replicant-client-gwt:jar:#{replicant_version}) + self.replicant_client_common
end
def replicant_server
%W(org.realityforge.replicant:replicant-server:jar:#{replicant_version}) + self.replicant_shared + self.gwt_rpc + self.replicant_shared_ee
end
def gwt_rpc
self.gwt_datatypes + self.jackson_gwt_support + self.gwt_servlet
end
def guice
%w(aopalliance:aopalliance:jar:1.0 com.google.inject:guice:jar:3.0 com.google.inject.extensions:guice-assistedinject:jar:3.0)
end
def awaitility
%w(org.awaitility:awaitility:jar:2.0.0)
end
def testng_version
'6.11'
end
def testng
%W(org.testng:testng:jar:#{testng_version})
end
def jndikit
%w(org.realityforge.jndikit:jndikit:jar:1.4)
end
def guiceyloops
self.guiceyloops_gwt + self.glassfish_embedded
end
def guiceyloops_lib
'org.realityforge.guiceyloops:guiceyloops:jar:0.94'
end
def guiceyloops_gwt
[guiceyloops_lib] + self.mockito + self.guice + self.testng
end
def glassfish_timers_domain
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-domain:json:0.6)
end
def glassfish_timers_db
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-db:jar:0.6)
end
def slf4j
%w(org.slf4j:slf4j-api:jar:1.6.6 org.slf4j:slf4j-jdk14:jar:1.6.6)
end
def greenmail
%w(com.icegreen:greenmail:jar:1.4.1) + self.slf4j
end
def greenmail_server
'com.icegreen:greenmail-webapp:war:1.4.1'
end
def jtds
%w(net.sourceforge.jtds:jtds:jar:1.3.1)
end
def postgresql
%w(org.postgresql:postgresql:jar:9.2-1003-jdbc4)
end
def postgis
%w(org.postgis:postgis-jdbc:jar:1.3.3)
end
def db_drivers
return self.jtds if BuildrPlus::Db.mssql?
return self.postgresql + (BuildrPlus::FeatureManager.activated?(:geolatte) ? self.postgis : []) if BuildrPlus::Db.pgsql?
[]
end
end
end
Add router-fu version
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
BuildrPlus::FeatureManager.feature(:libs) do |f|
f.enhance(:Config) do
def mustache
%w(com.github.spullara.mustache.java:compiler:jar:0.8.15)
end
def javacsv
%w(net.sourceforge.javacsv:javacsv:jar:2.1)
end
def geotools_for_geolatte
%w(org.geotools:gt-main:jar:9.4 org.geotools:gt-metadata:jar:9.4 org.geotools:gt-api:jar:9.4 org.geotools:gt-epsg-wkt:jar:9.4 org.geotools:gt-opengis:jar:9.4 org.geotools:gt-transform:jar:9.4 org.geotools:gt-geometry:jar:9.4 org.geotools:gt-jts-wrapper:jar:9.4 org.geotools:gt-referencing:jar:9.4 net.java.dev.jsr-275:jsr-275:jar:1.0-beta-2 java3d:vecmath:jar:1.3.2 javax.media:jai_core:jar:1.1.3)
end
def jts
%w(com.vividsolutions:jts:jar:1.13)
end
# Support geo libraries for geolatte
def geolatte_support
self.jts + self.slf4j
end
def geolatte_geom
%w(org.geolatte:geolatte-geom:jar:0.13)
end
def geolatte_geom_jpa
%w(org.realityforge.geolatte.jpa:geolatte-geom-jpa:jar:0.2)
end
def jetbrains_annotations
%w(org.jetbrains:annotations:jar:15.0)
end
def findbugs_provided
%w(com.google.code.findbugs:jsr305:jar:3.0.0 com.google.code.findbugs:annotations:jar:3.0.0)
end
def ee_provided
%w(javax:javaee-api:jar:7.0) + self.findbugs_provided + self.jetbrains_annotations
end
def glassfish_embedded
%w(fish.payara.extras:payara-embedded-all:jar:4.1.2.172 fish.payara.api:payara-api:jar:4.1.2.172)
end
def eclipselink
'org.eclipse.persistence:eclipselink:jar:2.6.0'
end
def mockito
%w(org.mockito:mockito-all:jar:1.10.19)
end
def objenesis
%w(org.objenesis:objenesis:jar:2.5.1)
end
def powermock_version
'1.6.6'
end
def powermock_javaagent
"org.powermock:powermock-module-javaagent:jar:#{powermock_version}"
end
def powermock
%W(
org.powermock:powermock-core:jar:#{powermock_version}
org.powermock:powermock-reflect:jar:#{powermock_version}
org.powermock:powermock-module-testng-common:jar:#{powermock_version}
org.powermock:powermock-module-testng:jar:#{powermock_version}
org.powermock:powermock-api-mockito:jar:#{powermock_version}
org.powermock:powermock-api-mockito-common:jar:#{powermock_version}
org.powermock:powermock-api-support:jar:#{powermock_version}
org.javassist:javassist:jar:3.21.0-GA
org.powermock:powermock-module-testng-agent:jar:#{powermock_version}
#{powermock_javaagent}
) + self.objenesis
end
def jackson_annotations
%w(com.fasterxml.jackson.core:jackson-annotations:jar:2.8.8)
end
def jackson_core
%w(com.fasterxml.jackson.core:jackson-core:jar:2.8.8)
end
def jackson_databind
%w(com.fasterxml.jackson.core:jackson-databind:jar:2.8.8)
end
def jackson_datatype_jdk8
%w(com.fasterxml.jackson.datatype:jackson-datatype-jdk8:jar:2.8.8)
end
def jackson_module_kotlin
%w(com.fasterxml.jackson.module:jackson-module-kotlin:jar:2.8.8)
end
def jackson_gwt_support
self.jackson_core + self.jackson_databind + self.jackson_annotations
end
def anodoc
%w(org.realityforge.anodoc:anodoc:jar:1.0.0)
end
def braincheck_version
'1.3.0'
end
def braincheck
%W(org.realityforge.braincheck:braincheck:jar:#{braincheck_version}) + self.anodoc
end
def braincheck_gwt
%W(org.realityforge.braincheck:braincheck:jar:gwt:#{braincheck_version}) + self.anodoc
end
def jsinterop
%w(com.google.jsinterop:jsinterop-annotations:jar:1.0.1 com.google.jsinterop:jsinterop-annotations:jar:sources:1.0.1)
end
def jsinterop_base
%w(com.google.jsinterop:base:jar:1.0.0-beta-1 com.google.jsinterop:base:jar:sources:1.0.0-beta-1) + self.jsinterop
end
def elemental2_core
%w(com.google.elemental2:elemental2-core:jar:1.0.0-beta-1) + self.jsinterop_base
end
def elemental2_dom
%w(com.google.elemental2:elemental2-dom:jar:1.0.0-beta-1) + self.elemental2_core
end
def elemental2_promise
%w(com.google.elemental2:elemental2-promise:jar:1.0.0-beta-1) + self.elemental2_core
end
def gwt_user
%w(com.google.gwt:gwt-user:jar:2.8.2 org.w3c.css:sac:jar:1.3) + self.jsinterop
end
def gwt_servlet
%w(com.google.gwt:gwt-servlet:jar:2.8.2)
end
def gwt_dev
'com.google.gwt:gwt-dev:jar:2.8.2'
end
def javax_inject
%w(javax.inject:javax.inject:jar:1)
end
def gwt_gin
%w(com.google.gwt.inject:gin:jar:2.1.2) + self.javax_inject + self.guice + self.gwt_user
end
def gwt_gin_extensions
%w(org.realityforge.gwt.gin:gwt-gin-extensions:jar:0.1)
end
def gwt_webpoller
%w(org.realityforge.gwt.webpoller:gwt-webpoller:jar:0.9.5)
end
def gwt_datatypes
%w(org.realityforge.gwt.datatypes:gwt-datatypes:jar:0.9)
end
def gwt_ga
%w(org.realityforge.gwt.ga:gwt-ga:jar:0.5)
end
def gwt_mmvp
%w(org.realityforge.gwt.mmvp:gwt-mmvp:jar:0.9)
end
def gwt_lognice
%w(org.realityforge.gwt.lognice:gwt-lognice:jar:0.6)
end
def gwt_appcache_client
%w(org.realityforge.gwt.appcache:gwt-appcache-client:jar:1.0.11 org.realityforge.gwt.appcache:gwt-appcache-linker:jar:1.0.11)
end
def gwt_appcache_server
%w(org.realityforge.gwt.appcache:gwt-appcache-server:jar:1.0.11)
end
# The appcache code required to exist on gwt path during compilation
def gwt_appcache
self.gwt_appcache_client + self.gwt_appcache_server
end
def gwt_cache_filter
%w(org.realityforge.gwt.cache-filter:gwt-cache-filter:jar:0.7)
end
def field_filter
%w(org.realityforge.rest.field_filter:rest-field-filter:jar:0.4)
end
def graphql_java
%w(com.graphql-java:graphql-java:jar:3.0.0) + BuildrPlus::Libs.slf4j + BuildrPlus::Libs.antlr4_runtime
end
def graphql_java_tools
%w(
com.esotericsoftware:reflectasm:jar:1.11.3
com.google.guava:guava:jar:21.0
com.graphql-java:graphql-java-tools:jar:3.2.1
org.jetbrains.kotlin:kotlin-reflect:jar:1.1.1
org.jetbrains.kotlin:kotlin-stdlib:jar:1.1.1
org.jetbrains.kotlin:kotlin-stdlib:jar:1.1.3-2
org.ow2.asm:asm:jar:5.0.4
ru.vyarus:generics-resolver:jar:2.0.1
) + self.graphql_java +
self.jackson_annotations +
self.jackson_core +
self.jackson_databind +
self.jackson_datatype_jdk8 +
self.jackson_module_kotlin +
self.jetbrains_annotations
end
def graphql_java_servlet
%w(
com.graphql-java:graphql-java-servlet:jar:4.0.0
commons-fileupload:commons-fileupload:jar:1.3.3
commons-io:commons-io:jar:2.5
) + self.graphql_java_tools
end
def graphql_domgen_support
%w(org.realityforge.keycloak.domgen:graphql-domgen-support:jar:1.2.0)
end
def antlr4_runtime
%w(org.antlr:antlr4-runtime:jar:4.5.1)
end
def rest_criteria
%w(org.realityforge.rest.criteria:rest-criteria:jar:0.9.6) +
self.antlr4_runtime +
self.field_filter
end
def commons_logging
%w(commons-logging:commons-logging:jar:1.2)
end
def commons_codec
%w(commons-codec:commons-codec:jar:1.9)
end
def bouncycastle
%w(org.bouncycastle:bcprov-jdk15on:jar:1.52 org.bouncycastle:bcpkix-jdk15on:jar:1.52)
end
def proxy_servlet
self.httpclient + %w(org.realityforge.proxy-servlet:proxy-servlet:jar:0.2.0)
end
def httpclient
%w(org.apache.httpcomponents:httpclient:jar:4.5 org.apache.httpcomponents:httpcore:jar:4.4.1) +
self.commons_logging + self.commons_codec
end
def failsafe
%w(net.jodah:failsafe:jar:1.0.3)
end
def keycloak_gwt
%w(org.realityforge.gwt.keycloak:gwt-keycloak:jar:0.2)
end
def keycloak_domgen_support
%w(org.realityforge.keycloak.domgen:keycloak-domgen-support:jar:1.4)
end
def keycloak_authfilter
%w(org.realityforge.keycloak.client.authfilter:keycloak-jaxrs-client-authfilter:jar:0.2)
end
def keycloak_converger
'org.realityforge.keycloak.converger:keycloak-converger:jar:1.6'
end
def jboss_logging
%w(org.jboss.logging:jboss-logging:jar:3.3.0.Final)
end
def keycloak_core
%w(
org.keycloak:keycloak-core:jar:2.0.0.Final
org.keycloak:keycloak-common:jar:2.0.0.Final
) + self.bouncycastle
end
def keycloak
%w(
org.keycloak:keycloak-servlet-filter-adapter:jar:2.0.0.Final
org.keycloak:keycloak-adapter-spi:jar:2.0.0.Final
org.keycloak:keycloak-adapter-core:jar:2.0.0.Final
org.realityforge.org.keycloak:keycloak-servlet-adapter-spi:jar:2.0.0.Final
) + self.keycloak_core + self.keycloak_domgen_support + self.httpclient + self.jboss_logging
end
def simple_keycloak_service
%w(org.realityforge.keycloak.sks:simple-keycloak-service:jar:0.1)
end
def guava
%w(com.google.guava:guava:jar:21.0)
end
def javapoet
%w(com.squareup:javapoet:jar:1.7.0) + self.guava
end
def arez_version
'0.25'
end
def arez
%W(
org.realityforge.arez:arez-annotations:jar:#{arez_version}
org.realityforge.arez:arez-core:jar:#{arez_version}
org.realityforge.arez:arez-component:jar:#{arez_version}
org.realityforge.arez:arez-processor:jar:#{arez_version}
org.realityforge.arez:arez-extras:jar:#{arez_version}
) + self.braincheck + self.javapoet
end
def arez_gwt
%W(
org.realityforge.arez:arez-annotations:jar:gwt:#{arez_version}
org.realityforge.arez:arez-core:jar:gwt:#{arez_version}
org.realityforge.arez:arez-component:jar:gwt:#{arez_version}
org.realityforge.arez:arez-processor:jar:#{arez_version}
org.realityforge.arez:arez-extras:jar:gwt:#{arez_version}
) + self.braincheck_gwt + self.javapoet
end
def arez_browser_gwt
%W(org.realityforge.arez:arez-browser-extras:jar:gwt:#{arez_version})
end
def router_fu
%w(
org.realityforge.router.fu:router-fu-annotations:jar:0.01
org.realityforge.router.fu:router-fu-core:jar:0.01
org.realityforge.router.fu:router-fu-processor:jar:0.01
) + self.braincheck_gwt + self.javapoet
end
def react4j_version
'0.15'
end
def react4j
%W(
org.realityforge.react4j:react4j-annotations:jar:#{react4j_version}
org.realityforge.react4j:react4j-core:jar:#{react4j_version}
org.realityforge.react4j:react4j-dom:jar:#{react4j_version}
org.realityforge.react4j:react4j-processor:jar:#{react4j_version}
org.realityforge.react4j:react4j-widget:jar:#{react4j_version}
) + self.elemental2_dom + self.elemental2_promise
end
def react4j_arez
%W(org.realityforge.react4j:react4j-arez:jar:#{react4j_version})
end
def replicant_version
'0.5.94-arez-b5'
end
def replicant_shared
%W(org.realityforge.replicant:replicant-shared:jar:#{replicant_version})
end
def replicant_shared_ee
%W(org.realityforge.replicant:replicant-shared-ee:jar:#{replicant_version})
end
def replicant_client_common
%W(org.realityforge.replicant:replicant-client-common:jar:#{replicant_version}) + self.replicant_shared + self.gwt_webpoller + self.gwt_datatypes
end
def replicant_client_qa_support
%W(org.realityforge.replicant:replicant-client-qa-support:jar:#{replicant_version}) + self.guiceyloops_gwt
end
def replicant_ee_client
%W(org.realityforge.replicant:replicant-client-ee:jar:#{replicant_version}) + self.replicant_client_common + self.replicant_shared_ee
end
def replicant_gwt_client
%W(org.realityforge.replicant:replicant-client-gwt:jar:#{replicant_version}) + self.replicant_client_common
end
def replicant_server
%W(org.realityforge.replicant:replicant-server:jar:#{replicant_version}) + self.replicant_shared + self.gwt_rpc + self.replicant_shared_ee
end
def gwt_rpc
self.gwt_datatypes + self.jackson_gwt_support + self.gwt_servlet
end
def guice
%w(aopalliance:aopalliance:jar:1.0 com.google.inject:guice:jar:3.0 com.google.inject.extensions:guice-assistedinject:jar:3.0)
end
def awaitility
%w(org.awaitility:awaitility:jar:2.0.0)
end
def testng_version
'6.11'
end
def testng
%W(org.testng:testng:jar:#{testng_version})
end
def jndikit
%w(org.realityforge.jndikit:jndikit:jar:1.4)
end
def guiceyloops
self.guiceyloops_gwt + self.glassfish_embedded
end
def guiceyloops_lib
'org.realityforge.guiceyloops:guiceyloops:jar:0.94'
end
def guiceyloops_gwt
[guiceyloops_lib] + self.mockito + self.guice + self.testng
end
def glassfish_timers_domain
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-domain:json:0.6)
end
def glassfish_timers_db
%W(org.realityforge.glassfish.timers#{BuildrPlus::Db.pgsql? ? '.pg' : ''}:glassfish-timers-db:jar:0.6)
end
def slf4j
%w(org.slf4j:slf4j-api:jar:1.6.6 org.slf4j:slf4j-jdk14:jar:1.6.6)
end
def greenmail
%w(com.icegreen:greenmail:jar:1.4.1) + self.slf4j
end
def greenmail_server
'com.icegreen:greenmail-webapp:war:1.4.1'
end
def jtds
%w(net.sourceforge.jtds:jtds:jar:1.3.1)
end
def postgresql
%w(org.postgresql:postgresql:jar:9.2-1003-jdbc4)
end
def postgis
%w(org.postgis:postgis-jdbc:jar:1.3.3)
end
def db_drivers
return self.jtds if BuildrPlus::Db.mssql?
return self.postgresql + (BuildrPlus::FeatureManager.activated?(:geolatte) ? self.postgis : []) if BuildrPlus::Db.pgsql?
[]
end
end
end
|
namespace :assets do
desc "Compile assets"
task :compile do
next if fetch(:assets_compile).nil?
run_locally do
puts capture(fetch(:assets_compile))
end
end
desc "Push assets to remote"
task :push do
next unless any? :assets_output
on roles(:all) do |host|
fetch(:assets_output).each do |dir|
execute :mkdir, '-p', release_path.join(dir)
run_locally do
ssh = SSH.new(host, fetch(:ssh_options))
execute :rsync, "--rsh=\"ssh #{ssh.args.join(' ')}\"", fetch(:rsync_options), "#{dir}/", "#{ssh.remote}:#{release_path.join(dir)}"
end
end
end
end
before :push, 'compile'
end
limit asset tasks to one ssh session
namespace :assets do
desc "Compile assets"
task :compile do
next if fetch(:assets_compile).nil?
run_locally do
puts capture(fetch(:assets_compile))
end
end
desc "Push assets to remote"
task :push do
next unless any? :assets_output
run_locally do
roles(:all).each do |host|
fetch(:assets_output).each do |dir|
ssh = SSH.new(host, fetch(:ssh_options))
execute :rsync, "--rsh=\"ssh #{ssh.args.join(' ')}\"", fetch(:rsync_options), "#{dir}/", "#{ssh.remote}:#{release_path.join(dir)}"
end
end
end
end
before :push, 'compile'
end
|
namespace :deploy do
desc 'Upload compiled assets'
task :publish_assets do
on roles(:web) do
execute "rm -rf #{release_path}/web/compiled"
upload! "web/compiled", "#{release_path}/web/", recursive: true
end
end
desc 'Launch doctrine migration'
task :migrate do
on roles(:web) do
invoke 'symfony:console', 'doctrine:migrations:migrate', '--no-interaction'
end
end
desc "Put a robots.txt that disallow all indexing."
task :no_robots do
on roles(:web) do
execute "echo 'User-agent: *\\nDisallow: /' > #{release_path}/web/robots.txt"
end
end
end
Fix typo
namespace :deploy do
desc 'Upload compiled assets'
task :publish_assets do
on roles(:web) do
execute "rm -rf #{release_path}/web/compiled"
upload! "web/compiled", "#{release_path}/web/", recursive: true
end
end
desc 'Launch doctrine migration'
task :migrate do
on roles(:web) do
invoke 'symfony:console', 'doctrine:migrations:migrate', '--no-interaction'
end
end
desc "Put a robots.txt that disallow all indexing."
task :no_robots do
on roles(:web) do
execute "echo 'User-agent: *\nDisallow: /' > #{release_path}/web/robots.txt"
end
end
end
|
namespace :deploy do
desc 'Upload compiled assets'
task :publish_assets do
on roles(:web) do
if fetch(:publish_assets)
info "Upload assets on server"
execute "rm -rf #{release_path}/web/compiled"
upload! "web/compiled", "#{release_path}/web/", recursive: true
end
end
end
desc 'Launch doctrine migration'
task :migrate do
on roles(:web) do
info "Migrate database"
invoke 'symfony:console', 'doctrine:migrations:migrate', '--no-interaction'
end
end
desc "Put a robots.txt that disallow all indexing."
task :no_robots do
on roles(:web) do
info "Prevent robots indexation"
execute "printf 'User-agent: *\\nDisallow: /' > #{release_path}/web/robots.txt"
end
end
desc "Secure the project with htpasswd."
task :secure do
on roles(:web) do
info "Secure the web access with a htpasswd"
execute "htpasswd -cb #{release_path}/web/.htpasswd #{fetch(:htpasswd_user)} #{fetch(:htpasswd_pwd)}"
contents = <<-EOS.gsub(/^ {8}/, '')
s~#AUTHORIZATION~AuthUserFile #{release_path}/web/.htpasswd \\
AuthType Basic \\
AuthName "#{fetch(:application)}" \\
Require valid-user \\
Order Allow,Deny \\
Allow from env=NOPASSWD \\
EOS
fetch(:htpasswd_whitelist).each do |ip|
contents = "#{contents}Allow from #{ip} \\\n"
end
contents = "#{contents}Satisfy any~m"
upload! StringIO.new(contents), shared_path.join("auth_basic.sed")
execute "sed -i -f #{shared_path.join("auth_basic.sed")} #{release_path}/web/.htaccess"
end
end
after :starting, 'composer:install_executable'
after :publishing, 'symfony:assets:install'
after :publishing, 'deploy:publish_assets'
after :finishing, 'deploy:migrate'
after :finishing, 'deploy:cleanup'
end
Cleanup when finished
namespace :deploy do
desc 'Upload compiled assets'
task :publish_assets do
on roles(:web) do
if fetch(:publish_assets)
info "Upload assets on server"
execute "rm -rf #{release_path}/web/compiled"
upload! "web/compiled", "#{release_path}/web/", recursive: true
end
end
end
desc 'Launch doctrine migration'
task :migrate do
on roles(:web) do
info "Migrate database"
invoke 'symfony:console', 'doctrine:migrations:migrate', '--no-interaction'
end
end
desc "Put a robots.txt that disallow all indexing."
task :no_robots do
on roles(:web) do
info "Prevent robots indexation"
execute "printf 'User-agent: *\\nDisallow: /' > #{release_path}/web/robots.txt"
end
end
desc "Secure the project with htpasswd."
task :secure do
on roles(:web) do
info "Secure the web access with a htpasswd"
execute "htpasswd -cb #{release_path}/web/.htpasswd #{fetch(:htpasswd_user)} #{fetch(:htpasswd_pwd)}"
contents = <<-EOS.gsub(/^ {8}/, '')
s~#AUTHORIZATION~AuthUserFile #{release_path}/web/.htpasswd \\
AuthType Basic \\
AuthName "#{fetch(:application)}" \\
Require valid-user \\
Order Allow,Deny \\
Allow from env=NOPASSWD \\
EOS
fetch(:htpasswd_whitelist).each do |ip|
contents = "#{contents}Allow from #{ip} \\\n"
end
contents = "#{contents}Satisfy any~m"
upload! StringIO.new(contents), shared_path.join("auth_basic.sed")
execute "sed -i -f #{shared_path.join("auth_basic.sed")} #{release_path}/web/.htaccess"
end
end
after :starting, 'composer:install_executable'
after :publishing, 'symfony:assets:install'
after :publishing, 'deploy:publish_assets'
after :finishing, 'deploy:migrate'
after :finished, 'deploy:cleanup'
end
|
namespace :deploy do
desc 'Makes sure local git is in sync with remote.'
task :check_revision do
unless `git rev-parse HEAD` == `git rev-parse origin/master`
puts 'WARNING: HEAD is not the same as origin/master'
puts 'Run `git push` to sync changes.'
exit
end
end
desc 'Seed the database.'
task :refresh_sitemap do
on roles(:app) do
within "#{current_path}" do
with rails_env: :production do
execute :rake, 'sitemap:refresh'
end
end
end
end
end
Fixed a copy-paste typo in deploy.rake
namespace :deploy do
desc 'Makes sure local git is in sync with remote.'
task :check_revision do
unless `git rev-parse HEAD` == `git rev-parse origin/master`
puts 'WARNING: HEAD is not the same as origin/master'
puts 'Run `git push` to sync changes.'
exit
end
end
desc 'Refresh the sitemap of the site'
task :refresh_sitemap do
on roles(:app) do
within "#{current_path}" do
with rails_env: :production do
execute :rake, 'sitemap:refresh'
end
end
end
end
end
|
include ::Capistrano::Runit
include ::Capistrano::Runit::ResqueHelper
namespace :load do
task :defaults do
set :runit_resque_run_template, nil
set :runit_resque_default_hooks, -> { true }
set :runit_resque_role, -> { :app }
set :runit_resque_workers, -> { {'*' => 1} }
set :runit_resque_interval, "5"
set :runit_resque_environment_task, true
set :runit_resque_kill_signal, -> { 'QUIT' }
set :runit_resque_verbose, -> { true }
set :runit_resque_log_file, { ::File.join(shared_path, 'log', "resque.#{resque_environment}.log") }
end
end
namespace :deploy do
before :starting, :runit_check_resque_hooks do
invoke 'runit:resque:add_default_hooks' if fetch(:runit_resque_default_hooks)
end
end
namespace :runit do
namespace :resque do
# Helpers
def collect_resque_run_command(queue)
array = []
array << env_variables
array << "RAILS_ENV=#{resque_environment}"
array << "INTERVAL=#{fetch(:runit_resque_interval)}"
array << "QUEUE=#{queue}"
array << "VERBOSE=1" if fetch(:runit_resque_verbose)
array << "exec #{SSHKit.config.command_map[:rake]} #{"environment" if fetch(:runit_resque_environment_task)} resque:work"
array << output_redirection
array.compact.join(' ')
end
def resque_runit_stop_commamd
@resque_runit_stop_command ||= case fetch(:runit_resque_kill_signal)
when 'QUIT', 'TERM', 'KILL', 'CONT'
fetch(:runit_resque_kill_signal).downcase
when 'USR1'
'1'
when 'USR2'
'2'
when 'INT'
'interrupt'
end
end
def generate_namespace_for_resque_task(name, queue, count, parent_task)
my_namespace = "runit:resque:#{name}"
parent_task.application.define_task Rake::Task, "#{my_namespace}:setup" do
count.times.each do |i|
setup_service("resque_#{name}_#{i}", collect_resque_run_command(queue))
end
end
parent_task.application.define_task Rake::Task, "#{my_namespace}:enable" do
count.times.each do |i|
enable_service("resque_#{name}_#{i}")
end
end
parent_task.application.define_task Rake::Task, "#{my_namespace}:disable" do
count.times.each do |i|
disable_service("resque_#{name}_#{i}")
end
end
parent_task.application.define_task Rake::Task, "#{my_namespace}:start" do
count.times.each do |i|
start_service("resque_#{name}_#{i}")
end
end
parent_task.application.define_task Rake::Task, "#{my_namespace}:stop" do
on roles fetch("runit_resque_#{name}_role".to_sym) do
count.times.each do |i|
stop_service("resque_#{name}_#{i}", false)
end
end
end
parent_task.application.define_task Rake::Task, "#{my_namespace}:quiet" do
on roles fetch("runit_resque_#{name}_role".to_sym) do
count.times.each do |i|
runit_execute_command("resque_#{name}_#{i}", '2')
end
end
end
parent_task.application.define_task Rake::Task, "#{my_namespace}:restart" do
count.times.each do |i|
restart_service("resque_#{name}_#{i}")
end
end
end
task :add_default_hooks do
after 'deploy:check', 'runit:resque:check'
after 'deploy:starting', 'runit:resque:quiet'
after 'deploy:updated', 'runit:resque:stop'
after 'deploy:reverted', 'runit:resque:stop'
after 'deploy:published', 'runit:resque:start'
end
task :hook do |task|
fetch(:runit_resque_workers).each do |key, count|
name = if key == '*'
'general'
else
key.gsub(/\s*[^A-Za-z0-9\.\-]\s*/, '_')
end
set "runit_resque_#{name}_role".to_sym, -> { :app }
generate_namespace_for_resque_task(name, key, count, task)
end
end
task :check do
fetch(:runit_resque_workers).each do |key, value|
key = 'general' if key == '*'
name = key.gsub(/\s*[^A-Za-z0-9\.\-]\s*/, '_')
check_service('resque', name)
end
end
task :stop do
fetch(:runit_resque_workers).each do |key, value|
key = 'general' if key == '*'
name = key.gsub(/\s*[^A-Za-z0-9\.\-]\s*/, '_')
::Rake::Task["runit:resque:#{name}:stop"].invoke
end
end
task :quiet do
fetch(:runit_resque_workers).each do |key, value|
key = 'general' if key == '*'
name = key.gsub(/\s*[^A-Za-z0-9\.\-]\s*/, '_')
::Rake::Task["runit:resque:#{name}:quiet"].invoke
end
end
task :start do
fetch(:runit_resque_workers).each do |key, value|
key = 'general' if key == '*'
name = key.gsub(/\s*[^A-Za-z0-9\.\-]\s*/, '_')
::Rake::Task["runit:resque:#{name}:start"].invoke
end
end
task :restart do
fetch(:runit_resque_workers).each do |key, value|
key = 'general' if key == '*'
name = key.gsub(/\s*[^A-Za-z0-9\.\-]\s*/, '_')
::Rake::Task["runit:resque:#{name}:restart"].invoke
end
end
end
end
Capistrano::DSL.stages.each do |stage|
after stage, 'runit:resque:hook'
end
Fix error
include ::Capistrano::Runit
include ::Capistrano::Runit::ResqueHelper
namespace :load do
task :defaults do
set :runit_resque_run_template, nil
set :runit_resque_default_hooks, -> { true }
set :runit_resque_role, -> { :app }
set :runit_resque_workers, -> { {'*' => 1} }
set :runit_resque_interval, "5"
set :runit_resque_environment_task, true
set :runit_resque_kill_signal, -> { 'QUIT' }
set :runit_resque_verbose, -> { true }
set :runit_resque_log_file, -> { ::File.join(shared_path, 'log', "resque.#{resque_environment}.log") }
end
end
namespace :deploy do
before :starting, :runit_check_resque_hooks do
invoke 'runit:resque:add_default_hooks' if fetch(:runit_resque_default_hooks)
end
end
namespace :runit do
namespace :resque do
# Helpers
def collect_resque_run_command(queue)
array = []
array << env_variables
array << "RAILS_ENV=#{resque_environment}"
array << "INTERVAL=#{fetch(:runit_resque_interval)}"
array << "QUEUE=#{queue}"
array << "VERBOSE=1" if fetch(:runit_resque_verbose)
array << "exec #{SSHKit.config.command_map[:rake]} #{"environment" if fetch(:runit_resque_environment_task)} resque:work"
array << output_redirection
array.compact.join(' ')
end
def resque_runit_stop_commamd
@resque_runit_stop_command ||= case fetch(:runit_resque_kill_signal)
when 'QUIT', 'TERM', 'KILL', 'CONT'
fetch(:runit_resque_kill_signal).downcase
when 'USR1'
'1'
when 'USR2'
'2'
when 'INT'
'interrupt'
end
end
def generate_namespace_for_resque_task(name, queue, count, parent_task)
my_namespace = "runit:resque:#{name}"
parent_task.application.define_task Rake::Task, "#{my_namespace}:setup" do
count.times.each do |i|
setup_service("resque_#{name}_#{i}", collect_resque_run_command(queue))
end
end
parent_task.application.define_task Rake::Task, "#{my_namespace}:enable" do
count.times.each do |i|
enable_service("resque_#{name}_#{i}")
end
end
parent_task.application.define_task Rake::Task, "#{my_namespace}:disable" do
count.times.each do |i|
disable_service("resque_#{name}_#{i}")
end
end
parent_task.application.define_task Rake::Task, "#{my_namespace}:start" do
count.times.each do |i|
start_service("resque_#{name}_#{i}")
end
end
parent_task.application.define_task Rake::Task, "#{my_namespace}:stop" do
on roles fetch("runit_resque_#{name}_role".to_sym) do
count.times.each do |i|
stop_service("resque_#{name}_#{i}", false)
end
end
end
parent_task.application.define_task Rake::Task, "#{my_namespace}:quiet" do
on roles fetch("runit_resque_#{name}_role".to_sym) do
count.times.each do |i|
runit_execute_command("resque_#{name}_#{i}", '2')
end
end
end
parent_task.application.define_task Rake::Task, "#{my_namespace}:restart" do
count.times.each do |i|
restart_service("resque_#{name}_#{i}")
end
end
end
task :add_default_hooks do
after 'deploy:check', 'runit:resque:check'
after 'deploy:starting', 'runit:resque:quiet'
after 'deploy:updated', 'runit:resque:stop'
after 'deploy:reverted', 'runit:resque:stop'
after 'deploy:published', 'runit:resque:start'
end
task :hook do |task|
fetch(:runit_resque_workers).each do |key, count|
name = if key == '*'
'general'
else
key.gsub(/\s*[^A-Za-z0-9\.\-]\s*/, '_')
end
set "runit_resque_#{name}_role".to_sym, -> { :app }
generate_namespace_for_resque_task(name, key, count, task)
end
end
task :check do
fetch(:runit_resque_workers).each do |key, value|
key = 'general' if key == '*'
name = key.gsub(/\s*[^A-Za-z0-9\.\-]\s*/, '_')
check_service('resque', name)
end
end
task :stop do
fetch(:runit_resque_workers).each do |key, value|
key = 'general' if key == '*'
name = key.gsub(/\s*[^A-Za-z0-9\.\-]\s*/, '_')
::Rake::Task["runit:resque:#{name}:stop"].invoke
end
end
task :quiet do
fetch(:runit_resque_workers).each do |key, value|
key = 'general' if key == '*'
name = key.gsub(/\s*[^A-Za-z0-9\.\-]\s*/, '_')
::Rake::Task["runit:resque:#{name}:quiet"].invoke
end
end
task :start do
fetch(:runit_resque_workers).each do |key, value|
key = 'general' if key == '*'
name = key.gsub(/\s*[^A-Za-z0-9\.\-]\s*/, '_')
::Rake::Task["runit:resque:#{name}:start"].invoke
end
end
task :restart do
fetch(:runit_resque_workers).each do |key, value|
key = 'general' if key == '*'
name = key.gsub(/\s*[^A-Za-z0-9\.\-]\s*/, '_')
::Rake::Task["runit:resque:#{name}:restart"].invoke
end
end
end
end
Capistrano::DSL.stages.each do |stage|
after stage, 'runit:resque:hook'
end
|
module Capistrano
module Yeoman
VERSION = "0.0.1"
end
end
increment version after fix
module Capistrano
module Yeoman
VERSION = "0.0.2"
end
end |
require_relative 'providers/fdw/odbc/generic_odbc'
require_relative 'providers/fdw/odbc/mysql'
require_relative 'providers/fdw/odbc/postgresql'
require_relative 'providers/fdw/odbc/sqlserver'
require_relative 'providers/fdw/odbc/hive'
require_relative 'providers/fdw/pg_fdw'
module Carto
class Connector
PROVIDERS = [
GenericOdbcProvider,
PostgreSQLProvider,
MySqlProvider,
SqlServerProvider,
HiveProvider
]
puts "-"*80
puts "-"*80
puts "PROVIDERS INITIALIZED #{PROVIDERS.size}"
puts "-"*80
puts "-"*80
DEFAULT_PROVIDER = nil # No default provider
class << self
def provider_class(provider_id)
provider_data provider_id
end
def provider_public?(provider_id)
provider_item provider_id, :public?
end
def provider_name(provider_id)
provider_item provider_id, :name
end
def provider_ids
PROVIDERS.map &:id
end
private
def provider_data(provider_id)
provider_id ||= DEFAULT_PROVIDER
PROVIDERS.find{|p| p.id == provider_id}
end
def provider_item(provider_id, item)
data = provider_data(provider_id)
data&.send item.to_sym
end
end
end
end
Remove debug messages
require_relative 'providers/fdw/odbc/generic_odbc'
require_relative 'providers/fdw/odbc/mysql'
require_relative 'providers/fdw/odbc/postgresql'
require_relative 'providers/fdw/odbc/sqlserver'
require_relative 'providers/fdw/odbc/hive'
require_relative 'providers/fdw/pg_fdw'
module Carto
class Connector
PROVIDERS = [
GenericOdbcProvider,
PostgreSQLProvider,
MySqlProvider,
SqlServerProvider,
HiveProvider
]
DEFAULT_PROVIDER = nil # No default provider
class << self
def provider_class(provider_id)
provider_data provider_id
end
def provider_public?(provider_id)
provider_item provider_id, :public?
end
def provider_name(provider_id)
provider_item provider_id, :name
end
def provider_ids
PROVIDERS.map &:id
end
private
def provider_data(provider_id)
provider_id ||= DEFAULT_PROVIDER
PROVIDERS.find{|p| p.id == provider_id}
end
def provider_item(provider_id, item)
data = provider_data(provider_id)
data&.send item.to_sym
end
end
end
end
|
#
# Author:: Steven Danna (<steve@opscode.com>)
# Copyright:: Copyright 2011 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Opc
class OpcOrgCreate < Chef::Knife
category "OPSCODE PRIVATE CHEF ORGANIZATION MANAGEMENT"
banner "knife opc org create ORG_SHORT_NAME ORG_FULL_NAME (options)"
option :filename,
:long => '--filename FILENAME',
:short => '-f FILENAME'
option :association_user,
:long => '--association_user USERNAME',
:short => '-a USERNAME'
attr_accessor :org_name, :org_full_name
def run
@org_name, @org_full_name = @name_args
if !org_name || !org_full_name
ui.fatal "You must specify an ORG_NAME and an ORG_FULL_NAME"
show_usage
exit 1
end
org_args = { :name => org_name, :full_name => org_full_name}
@chef_rest = Chef::REST.new(Chef::Config[:chef_server_root])
result = @chef_rest.post_rest("organizations/", org_args)
if config[:filename]
File.open(config[:filename], "w") do |f|
f.print(result['private_key'])
end
else
ui.msg result['private_key']
end
associate_user config[:association_user] if config[:association_user]
end
def associate_user(username)
request_body = {:user => username}
response = @chef_rest.post_rest "organizations/#{org_name}/association_requests", request_body
association_id = response["uri"].split("/").last
@chef_rest.put_rest "users/#{username}/association_requests/#{association_id}", { :response => 'accept' }
end
end
end
Add user to admins and billing-admins group after association.
#
# Author:: Steven Danna (<steve@opscode.com>)
# Copyright:: Copyright 2011 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
module Opc
class OpcOrgCreate < Chef::Knife
category "OPSCODE PRIVATE CHEF ORGANIZATION MANAGEMENT"
banner "knife opc org create ORG_SHORT_NAME ORG_FULL_NAME (options)"
option :filename,
:long => '--filename FILENAME',
:short => '-f FILENAME'
option :association_user,
:long => '--association_user USERNAME',
:short => '-a USERNAME'
attr_accessor :org_name, :org_full_name
def run
@org_name, @org_full_name = @name_args
if !org_name || !org_full_name
ui.fatal "You must specify an ORG_NAME and an ORG_FULL_NAME"
show_usage
exit 1
end
org_args = { :name => org_name, :full_name => org_full_name}
@chef_rest = Chef::REST.new(Chef::Config[:chef_server_root])
result = @chef_rest.post_rest("organizations/", org_args)
if config[:filename]
File.open(config[:filename], "w") do |f|
f.print(result['private_key'])
end
else
ui.msg result['private_key']
end
associate_user config[:association_user] if config[:association_user]
end
def associate_user(username)
# First, create and accept the organization invite
request_body = {:user => username}
response = @chef_rest.post_rest "organizations/#{org_name}/association_requests", request_body
association_id = response["uri"].split("/").last
@chef_rest.put_rest "users/#{username}/association_requests/#{association_id}", { :response => 'accept' }
# Next, add the user to the admin and billing-admin group
add_user_to_group(username, "admins")
add_user_to_group(username, "billing-admins")
end
# Note, this should *really* use the same method
# used in knife-acl
def add_user_to_group(username, groupname)
group = @chef_rest.get_rest "organizations/#{org_name}/groups/#{groupname}"
body_hash = {
:groupname => "#{groupname}",
:actors => {
"users" => group["actors"].concat([username]),
"groups" => group["groups"]
}
}
@chef_rest.put_rest "organizations/#{org_name}/groups/#{groupname}", body_hash
end
end
end
|
# encoding: utf-8
module ChinaSMS
module Service
module Yunpian
extend self
GET_URL = "http://yunpian.com/v1/sms/get.json"
SEND_URL = 'http://yunpian.com/v1/sms/send.json'
TPL_SEND_URL = 'http://yunpian.com/v1/sms/tpl_send.json'
def to phone, content, options = {}
options[:tpl_id] ||= 2
options[:apikey] ||= options[:password]
except! options, :username, :password
res = if content.is_a? Hash
message = parse_content content
options.merge!({ mobile: phone, tpl_value: message })
Net::HTTP.post_form(URI.parse(TPL_SEND_URL), options)
else
except! options, :tpl_id
message = content
options.merge!({ mobile: phone, text: message })
Net::HTTP.post_form(URI.parse(SEND_URL), options)
end
result res.body
end
def get options = {}
options[:apikey] ||= options[:password]
except! options, :username, :password
res = Net::HTTP.post_form(URI.parse(GET_URL), options)
result res.body
end
def result body
begin
JSON.parse body
rescue => e
{
code: 502,
msg: "内容解析错误",
detail: e.to_s
}
end
end
private
def except! options = {}, *keys
keys.each {|key| options.delete(key)}
options
end
def parse_content content
content.map { |k, v| "##{k}#=#{v}" }.join('&')
end
end
end
end
change yunpian api's get_url
# encoding: utf-8
module ChinaSMS
module Service
module Yunpian
extend self
GET_URL = "http://yunpian.com/v1/user/get.json"
SEND_URL = 'http://yunpian.com/v1/sms/send.json'
TPL_SEND_URL = 'http://yunpian.com/v1/sms/tpl_send.json'
def to phone, content, options = {}
options[:tpl_id] ||= 2
options[:apikey] ||= options[:password]
except! options, :username, :password
res = if content.is_a? Hash
message = parse_content content
options.merge!({ mobile: phone, tpl_value: message })
Net::HTTP.post_form(URI.parse(TPL_SEND_URL), options)
else
except! options, :tpl_id
message = content
options.merge!({ mobile: phone, text: message })
Net::HTTP.post_form(URI.parse(SEND_URL), options)
end
result res.body
end
def get options = {}
options[:apikey] ||= options[:password]
except! options, :username, :password
res = Net::HTTP.post_form(URI.parse(GET_URL), options)
result res.body
end
def result body
begin
JSON.parse body
rescue => e
{
code: 502,
msg: "内容解析错误",
detail: e.to_s
}
end
end
private
def except! options = {}, *keys
keys.each {|key| options.delete(key)}
options
end
def parse_content content
content.map { |k, v| "##{k}#=#{v}" }.join('&')
end
end
end
end
|
###############################################################################
# This client is for using Azure Resource Manager. It will be obsoleted soon
# after azure-sdk-for-ruby supports Azure Resource Manager.
###############################################################################
module Bosh::AzureCloud
class AzureError < Bosh::Clouds::CloudError; end
class AzureUnauthorizedError < AzureError; end
class AzureNoFoundError < AzureError; end
class AzureClient2
include Helpers
API_VERSION = '2015-05-01-preview'
API_VERSION_1 = '2015-01-01'
HTTP_CODE_OK = 200
HTTP_CODE_CREATED = 201
HTTP_CODE_ACCEPTED = 202
HTTP_CODE_NOCONTENT = 204
HTTP_CODE_PARTIALCONTENT = 206
HTTP_CODE_BADREQUEST = 400
HTTP_CODE_UNAUTHORIZED = 401
HTTP_CODE_FORBIDDEN = 403
HTTP_CODE_NOTFOUND = 404
HTTP_CODE_CONFLICT = 409
HTTP_CODE_LENGTHREQUIRED = 411
HTTP_CODE_PRECONDITIONFAILED = 412
REST_API_PROVIDER_COMPUTER = 'Microsoft.Compute'
REST_API_COMPUTER_VIRTUAL_MACHINES = 'virtualMachines'
REST_API_PROVIDER_NETWORK = 'Microsoft.Network'
REST_API_NETWORK_PUBLIC_IP_ADDRESSES = 'publicIPAddresses'
REST_API_NETWORK_LOAD_BALANCERS = 'loadBalancers'
REST_API_NETWORK_INTERFACES = 'networkInterfaces'
REST_API_NETWORK_VNETS = 'virtualNetworks'
REST_API_PROVIDER_STORAGE = 'Microsoft.Storage'
REST_API_STORAGE_ACCOUNTS = 'storageAccounts'
def initialize(azure_properties, logger)
@logger = logger
@azure_properties = azure_properties
end
# Common
def rest_api_url(resource_provider, resource_type, name = nil, others = nil)
url = "/subscriptions/#{URI.escape(@azure_properties['subscription_id'])}"
url += "/resourceGroups/#{URI.escape(@azure_properties['resource_group_name'])}"
url += "/providers/#{resource_provider}"
url += "/#{resource_type}"
url += "/#{URI.escape(name)}" unless name.nil?
url += "/#{URI.escape(others)}" unless others.nil?
url
end
def parse_name_from_id(id)
ret = id.match('/subscriptions/([^/]*)/resourceGroups/([^/]*)/providers/([^/]*)/([^/]*)/([^/]*)(.*)')
raise AzureError, "\"#{id}\" is not a valid URL." if ret.nil?
result = {}
result[:subscription_id] = ret[1]
result[:resource_group_name] = ret[2]
result[:provider_name] = ret[3]
result[:resource_type] = ret[4]
result[:resource_name] = ret[5]
result
end
def delete_resource_by_id(url)
begin
http_delete(url, nil, 10)
rescue => e
@logger.warn("delete_resource_by_id - error: #{e.message}\n#{e.backtrace.join("\n")}")
raise e
end
end
def get_resource_by_id(url, params = {})
result = nil
begin
result = http_get(url, params)
rescue AzureNoFoundError => e
result = nil
rescue => e
@logger.warn("get_resource_by_id - error: #{e.message}\n#{e.backtrace.join("\n")}")
raise e
end
result
end
# Resource Groups
def get_resource_group()
resource_group = nil
url = "/subscriptions/#{URI.escape(@azure_properties['subscription_id'])}"
url += "/resourceGroups/#{URI.escape(@azure_properties['resource_group_name'])}"
params = { 'api-version' => API_VERSION_1}
result = get_resource_by_id(url, params)
unless result.nil?
resource_group = {}
resource_group[:id] = result['id']
resource_group[:name] = result['name']
resource_group[:location] = result['location']
resource_group[:tags] = result['tags']
resource_group[:provisioning_state] = result['properties']['provisioningState']
end
resource_group
end
# Compute/Virtual Machines
# Public: Provisions a virtual machine based on the supplied configuration.
#
# ==== Attributes
#
# @param [Hash] vm_params - Parameters for creating the virtual machine.
# @param [Hash] network_interface - Network Interface Instance.
#
# ==== Params
#
# Accepted key/value pairs are:
# * +:name+ - String. Name of virtual machine.
# * +:location+ - String. The location where the virtual machine will be created.
# * +:vm_size+ - String. Specifies the size of the virtual machine instance.
# * +:username+ - String. User name for the virtual machine instance.
# * +:custom_data+ - String. Specifies a base-64 encoded string of custom data.
# * +:image_uri+ - String. The URI of the image.
# * +:os_vhd_uri+ - String. The URI of the OS disk for the virtual machine instance.
# * +:ssh_cert_data+ - String. The content of SSH certificate.
#
def create_virtual_machine(vm_params, network_interface)
url = rest_api_url(REST_API_PROVIDER_COMPUTER, REST_API_COMPUTER_VIRTUAL_MACHINES, vm_params[:name])
vm = {
'name' => vm_params[:name],
'location' => vm_params[:location],
'type' => "#{REST_API_PROVIDER_COMPUTER}/#{REST_API_COMPUTER_VIRTUAL_MACHINES}",
'properties' => {
'hardwareProfile' => {
'vmSize' => vm_params[:vm_size]
},
'osProfile' => {
'customData' => vm_params[:custom_data],
'computername' => vm_params[:name],
'adminUsername' => vm_params[:username],
'linuxConfiguration' => {
'disablePasswordAuthentication' => 'true',
'ssh' => {
'publicKeys' => [
{
'path' => "/home/#{vm_params[:username]}/.ssh/authorized_keys",
'keyData' => vm_params[:ssh_cert_data],
}
]
},
},
},
'storageProfile' => {
'osDisk' => {
'name' => "#{vm_params[:name]}_os_disk",
'osType' => 'Linux',
'createOption' => 'FromImage',
'caching' => 'ReadWrite',
'image' => {
'uri' => vm_params[:image_uri]
},
'vhd' => {
'uri' => vm_params[:os_vhd_uri]
}
},
},
'networkProfile' => {
'networkInterfaces' => [
{
'id' => network_interface[:id]
}
]
}
}
}
params = {
'validating' => 'true'
}
http_put(url, vm, 30, params)
rescue => e
@logger.warn("create_virtual_machine - error: #{e.message}\n#{e.backtrace.join("\n")}")
raise e
end
def restart_virtual_machine(name)
url = rest_api_url(REST_API_PROVIDER_COMPUTER, REST_API_COMPUTER_VIRTUAL_MACHINES, name, 'restart')
begin
http_post(url)
rescue => e
@logger.warn("restart_virtual_machine - error: #{e.message}\n#{e.backtrace.join("\n")}")
raise e
end
end
# Public: Set tags for a VM
# @param [String] name Name of virtual machine.
# @param [Hash] metadata metadata key/value pairs.
def update_tags_of_virtual_machine(name, tags)
url = rest_api_url(REST_API_PROVIDER_COMPUTER, REST_API_COMPUTER_VIRTUAL_MACHINES, name)
result = get_resource_by_id(url)
if result.nil?
raise AzureNoFoundError, "update_tags_of_virtual_machine - cannot find the virtual machine by name \"#{name}\""
end
begin
result['tags'] = tags
http_put(url, result)
rescue => e
@logger.warn("update_tags_of_virtual_machine - error: #{e.message}\n#{e.backtrace.join("\n")}")
raise e
end
end
def attach_disk_to_virtual_machine(name, disk_name, disk_uri)
url = rest_api_url(REST_API_PROVIDER_COMPUTER, REST_API_COMPUTER_VIRTUAL_MACHINES, name)
result = get_resource_by_id(url)
if result.nil?
raise AzureNoFoundError, "attach_disk_to_virtual_machine - cannot find the virtual machine by name \"#{name}\""
end
begin
lun = 0
data_disks = result['properties']['storageProfile']['dataDisks']
for i in 0..128
disk = data_disks.find { |disk| disk['lun'] == i}
if disk.nil?
lun = i
break
end
end
new_disk = {
'name' => disk_name,
'lun' => lun,
'createOption' => 'Attach',
'caching' => 'ReadWrite',
'vhd' => { 'uri' => disk_uri }
}
result['properties']['storageProfile']['dataDisks'].push(new_disk)
@logger.info("attach_disk_to_virtual_machine - attach disk #{disk_name} to #{lun}")
http_put(url, result)
disk = {
:name => disk_name,
:lun => lun,
:createOption => 'Attach',
:caching => 'ReadWrite',
:vhd => { :uri => disk_uri }
}
rescue => e
@logger.warn("attach_disk_to_virtual_machine - error: #{e.message}\n#{e.backtrace.join("\n")}")
raise e
end
end
def detach_disk_from_virtual_machine(name, disk_name)
url = rest_api_url(REST_API_PROVIDER_COMPUTER, REST_API_COMPUTER_VIRTUAL_MACHINES, name)
result = get_resource_by_id(url)
if result.nil?
raise AzureNoFoundError, "detach_disk_from_virtual_machine - cannot find the virtual machine by name \"#{name}\""
end
begin
disk = result['properties']['storageProfile']['dataDisks'].find { |disk| disk['name'] == disk_name}
raise "the given disk #{disk_name} is not attached to the given virtual machine #{name}" if disk.nil?
result['properties']['storageProfile']['dataDisks'].delete_if { |disk| disk['name'] == disk_name}
@logger.info("detach_disk_from_virtual_machine - detach disk #{disk_name} from lun #{disk['lun']}")
http_put(url, result)
rescue => e
@logger.warn("detach_disk_from_virtual_machine - error: #{e.message}\n#{e.backtrace.join("\n")}")
raise e
end
end
def get_virtual_machine_by_name(name)
url = rest_api_url(REST_API_PROVIDER_COMPUTER, REST_API_COMPUTER_VIRTUAL_MACHINES, name)
get_virtual_machine(url)
end
def get_virtual_machine(url)
vm = nil
result = get_resource_by_id(url)
unless result.nil?
vm = {}
vm[:id] = result['id']
vm[:name] = result['name']
vm[:location] = result['location']
vm[:tags] = result['tags']
properties = result['properties']
vm[:provisioning_state] = properties['provisioningState']
vm[:size] = properties['hardwareProfile']['vmSize']
storageProfile = properties['storageProfile']
vm[:os_disk] = {}
vm[:os_disk][:name] = storageProfile['osDisk']['name']
vm[:os_disk][:uri] = storageProfile['osDisk']['vhd']['uri']
vm[:os_disk][:caching] = storageProfile['osDisk']['caching']
vm[:data_disks] = []
storageProfile['dataDisks'].each do |data_disk|
disk = {}
disk[:name] = data_disk['name']
disk[:lun] = data_disk['lun']
disk[:uri] = data_disk['vhd']['uri']
disk[:caching] = data_disk['caching']
vm[:data_disks].push(disk)
end
interface_id = properties['networkProfile']['networkInterfaces'][0]['id']
vm[:network_interface] = get_network_interface(interface_id)
end
vm
end
def delete_virtual_machine(name)
@logger.debug("delete_virtual_machine - trying to delete #{name}")
url = rest_api_url(REST_API_PROVIDER_COMPUTER, REST_API_COMPUTER_VIRTUAL_MACHINES, name)
delete_resource_by_id(url)
end
# Network/Public IP
def create_public_ip(name, location, is_static = true)
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_PUBLIC_IP_ADDRESSES, name)
public_ip = {
'name' => name,
'location' => location,
'properties' => {
'publicIPAllocationMethod' => is_static ? 'Static' : 'Dynamic'
}
}
http_put(url, public_ip, 10)
rescue => e
@logger.warn("create_public_ip - error: #{e.message}\n#{e.backtrace.join("\n")}")
raise e
end
def get_public_ip_by_name(name)
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_PUBLIC_IP_ADDRESSES, name)
get_public_ip(url)
end
def get_public_ip(url)
ip_address = nil
result = get_resource_by_id(url)
unless result.nil?
ip_address = {}
ip_address[:id] = result['id']
ip_address[:name] = result['name']
ip_address[:location] = result['location']
properties = result['properties']
ip_address[:provisioning_state] = properties['provisioningState']
ip_address[:ip_address] = properties['ipAddress']
ip_address[:public_ip_allocation_method] = properties['publicIPAllocationMethod']
ip_address[:ip_configuration_id] = properties['ipConfigurations']['id'] unless properties['ipConfigurations'].nil?
unless properties['dnsSettings'].nil?
ip_address[:domain_name_label] = properties['dnsSettings']['domainNameLabel']
ip_address[:fqdn] = properties['dnsSettings']['fqdn']
end
end
ip_address
end
def list_public_ips()
ip_addresses = []
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_PUBLIC_IP_ADDRESSES)
result = get_resource_by_id(url)
unless result.nil?
result['value'].each do |ret|
ip_address = {}
ip_address[:id] = ret['id']
ip_address[:name] = ret['name']
ip_address[:location] = ret['location']
properties = ret['properties']
ip_address[:provisioning_state] = properties['provisioningState']
ip_address[:ip_address] = properties['ipAddress']
ip_address[:public_ip_allocation_method] = properties['publicIPAllocationMethod']
ip_address[:ip_configuration_id] = properties['ipConfigurations']['id'] unless properties['ipConfigurations'].nil?
unless properties['dnsSettings'].nil?
ip_address[:domain_name_label] = properties['dnsSettings']['domainNameLabel']
ip_address[:fqdn] = properties['dnsSettings']['fqdn']
end
ip_addresses.push(ip_address)
end
end
ip_addresses
end
def delete_public_ip(name)
@logger.debug("delete_public_ip - trying to delete #{name}")
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_PUBLIC_IP_ADDRESSES, name)
delete_resource_by_id(url)
end
# Network/Load Balancer
def create_load_balancer(name, public_ip, tcp_endpoints = [], udp_endpoints = [])
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_LOAD_BALANCERS, name)
load_balancer = {
'name' => name,
'location' => public_ip[:location],
'properties' => {
'frontendIPConfigurations' => [
'name' => 'LBFE',
'properties' => {
#'privateIPAllocationMethod' => 'Dynamic',
'publicIPAddress' => {
'id' => public_ip[:id]
}
}
],
'backendAddressPools' => [
'name' => 'LBBE'
],
'inboundNatRules' => [],
}
}
frontend_ip_configuration_id = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_LOAD_BALANCERS, name, 'frontendIPConfigurations/LBFE')
tcp_endpoints.each do |endpoint|
ports = endpoint.split(':')
inbound_nat_rules = {
'name' => "NatRule-TcpEndPoints-#{ports[0].to_s}",
'properties' => {
'frontendPort' => ports[0],
'backendPort' => ports[1],
'enableFloatingIP' => false,
'protocol' => 'Tcp',
'frontendIPConfiguration' => {
"id" => frontend_ip_configuration_id
}
}
}
load_balancer['properties']['inboundNatRules'].push(inbound_nat_rules)
end
udp_endpoints.each do |endpoint|
ports = endpoint.split(':')
inbound_nat_rules = {
'name' => "NatRule-UdpEndPoints-#{ports[0].to_s}",
'properties' => {
'frontendPort' => ports[0],
'backendPort' => ports[1],
'enableFloatingIP' => false,
'protocol' => 'Udp',
'frontendIPConfiguration' => {
"id" => frontend_ip_configuration_id
}
}
}
load_balancer['properties']['inboundNatRules'].push(inbound_nat_rules)
end
http_put(url, load_balancer, 10)
rescue => e
@logger.warn("create_load_balancer - error: #{e.message}\n#{e.backtrace.join("\n")}")
raise e
end
def get_load_balancer_by_name(name)
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_LOAD_BALANCERS, name)
get_load_balancer(url)
end
def get_load_balancer(url)
load_balancer = nil
result = get_resource_by_id(url)
unless result.nil?
load_balancer = {}
load_balancer[:id] = result['id']
load_balancer[:name] = result['name']
load_balancer[:location] = result['location']
properties = result['properties']
load_balancer[:provisioning_state] = properties['provisioningState']
frontend = properties['frontendIPConfigurations']
load_balancer[:frontend_ip_configurations] = []
frontend.each do |frontend_ip|
ip = {}
ip[:name] = frontend_ip['name']
ip[:id] = frontend_ip['id']
ip[:provisioning_state] = frontend_ip['properties']['provisioningState']
ip[:private_ip_allocation_method] = frontend_ip['properties']['privateIPAllocationMethod']
ip[:public_ip] = get_public_ip(frontend_ip['properties']['publicIPAddress']['id'])
ip[:inbound_nat_rules] = frontend_ip['properties']['inboundNatRules']
load_balancer[:frontend_ip_configurations].push(ip)
end
backend = properties['backendAddressPools']
load_balancer[:backend_address_pools] = []
backend.each do |backend_ip|
ip = {}
ip[:name] = backend_ip['name']
ip[:id] = backend_ip['id']
ip[:provisioning_state] = backend_ip['properties']['provisioningState']
ip[:backend_ip_configurations] = backend_ip['properties']['backendIPConfigurations']
load_balancer[:backend_address_pools].push(ip)
end
end
load_balancer
end
def delete_load_balancer(name)
@logger.debug("delete_load_balancer - trying to delete #{name}")
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_LOAD_BALANCERS, name)
delete_resource_by_id(url)
end
# Network/Network Interface
# Public: Create a network interface based on the supplied configuration.
#
# ==== Attributes
#
# @param [Hash] nic_params - Parameters for creating the network interface.
# @param [Hash] subnet - The subnet which the network interface is binded to.
# @param [Hash] load_balancer - The load balancer which the network interface is binded to.
#
# ==== Params
#
# Accepted key/value pairs are:
# * +:name+ - String. Name of network interface.
# * +:location+ - String. The location where the network interface will be created.
# * +:private_ip - String. Private IP address which the network interface will use.
# * +:dns_servers - Array. DNS servers.
# * +:public_ip - Hash. The public IP which the network interface is binded to.
#
def create_network_interface(nic_params, subnet, load_balancer = nil)
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_INTERFACES, nic_params[:name])
interface = {
'name' => nic_params[:name],
'location' => nic_params[:location],
'properties' => {
'ipConfigurations' => [
{
'name' => 'ipconfig1',
'properties' => {
'privateIPAddress' => nic_params[:private_ip],
'privateIPAllocationMethod' => nic_params[:private_ip].nil? ? 'Dynamic' : 'Static',
'publicIPAddress' => nic_params[:public_ip].nil? ? nil : { 'id' => nic_params[:public_ip][:id] },
'subnet' => {
'id' => subnet[:id]
}
}
}
],
'dnsSettings' => {
'dnsServers' => nic_params[:dns_servers].nil? ? [] : nic_params[:dns_servers]
}
}
}
unless load_balancer.nil?
interface['properties']['ipConfigurations'][0]['properties']['loadBalancerBackendAddressPools'] = [
{
'id' => load_balancer[:backend_address_pools][0][:id]
}
]
interface['properties']['ipConfigurations'][0]['properties']['loadBalancerInboundNatRules'] =
load_balancer[:frontend_ip_configurations][0][:inbound_nat_rules]
end
http_put(url, interface, 10)
rescue => e
@logger.warn("create_network_interface - error: #{e.message}\n#{e.backtrace.join("\n")}")
raise e
end
def get_network_interface_by_name(name)
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_INTERFACES, name)
get_network_interface(url)
end
def get_network_interface(url)
interface = nil
result = get_resource_by_id(url)
unless result.nil?
interface = {}
interface[:id] = result['id']
interface[:name] = result['name']
interface[:location] = result['location']
properties = result['properties']
interface[:provisioning_state] = properties['provisioningState']
unless properties['dnsSettings']['dnsServers'].nil?
interface[:dns_settings] = []
properties['dnsSettings']['dnsServers'].each { |dns| interface[:dns_settings].push(dns) }
end
ip_configuration = properties['ipConfigurations'][0]
interface[:ip_configuration_id] = ip_configuration['id']
ip_configuration_properties = ip_configuration['properties']
interface[:private_ip] = ip_configuration_properties['privateIPAddress']
interface[:private_ip_allocation_method] = ip_configuration_properties['privateIPAllocationMethod']
unless ip_configuration_properties['publicIPAddress'].nil?
interface[:public_ip] = get_public_ip(ip_configuration_properties['publicIPAddress']['id'])
end
unless ip_configuration_properties['loadBalancerBackendAddressPools'].nil?
names = parse_name_from_id(ip_configuration_properties['loadBalancerBackendAddressPools'][0]['id'])
interface[:load_balancer] = get_load_balancer_by_name(names[:resource_name])
end
end
interface
end
def delete_network_interface(name)
@logger.debug("delete_network_interface - trying to delete #{name}")
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_INTERFACES, name)
delete_resource_by_id(url)
end
# Network/Subnet
def get_network_subnet_by_name(vnet_name, subnet_name)
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_VNETS, vnet_name, "subnets/#{subnet_name}")
get_network_subnet(url)
end
def get_network_subnet(url)
subnet = nil
result = get_resource_by_id(url)
unless result.nil?
subnet = {}
subnet[:id] = result['id']
subnet[:name] = result['name']
properties = result['properties']
subnet[:provisioning_state] = properties['provisioningState']
subnet[:address_prefix] = properties['addressPrefix']
end
subnet
end
# Storage/StorageAccounts
def get_storage_account_by_name(name)
url = rest_api_url(REST_API_PROVIDER_STORAGE, REST_API_STORAGE_ACCOUNTS, name)
get_storage_account(url)
end
def get_storage_account(url)
storage_account = nil
result = get_resource_by_id(url)
unless result.nil?
storage_account = {}
storage_account[:id] = result['id']
storage_account[:name] = result['name']
storage_account[:location] = result['location']
properties = result['properties']
storage_account[:provisioning_state] = properties['provisioningState']
storage_account[:account_type] = properties['accountType']
storage_account[:primary_endpoints] = properties['primaryEndpoints']
end
storage_account
end
private
def http(uri)
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = true
# Uncomment below line for debug
#http.set_debug_output($stdout)
http
end
def get_token(force_refresh = false)
if @token.nil? || (Time.at(@token['expires_on'].to_i) - Time.now) <= 0 || force_refresh
@logger.info("get_token - trying to get/refresh Azure authentication token")
params = {}
params['api-version'] = API_VERSION
uri = URI(AZURE_ENVIRONMENTS[@azure_properties['environment']]['activeDirectoryEndpointUrl'] + '/' + @azure_properties['tenant_id'] + '/oauth2/token')
uri.query = URI.encode_www_form(params)
params = {}
params['grant_type'] = 'client_credentials'
params['client_id'] = @azure_properties['client_id']
params['client_secret'] = @azure_properties['client_secret']
params['resource'] = AZURE_ENVIRONMENTS[@azure_properties['environment']]['resourceManagerEndpointUrl']
params['scope'] = 'user_impersonation'
request = Net::HTTP::Post.new(uri.request_uri)
request['Content-Type'] = 'application/x-www-form-urlencoded'
request.body = URI.encode_www_form(params)
response = http(uri).request(request)
if response.code.to_i == HTTP_CODE_OK
@token = JSON(response.body)
@logger.debug("get_token - token is\n#{@token}")
else
raise AzureError, "get_token - http error: #{response.code}"
end
end
@token['access_token']
end
def http_url(url, params = {})
uri = URI(AZURE_ENVIRONMENTS[@azure_properties['environment']]['resourceManagerEndpointUrl'] + url)
params['api-version'] = API_VERSION if params['api-version'].nil?
uri.query = URI.encode_www_form(params)
uri
end
def http_get_response(uri, request)
response = nil
refresh_token = false
begin
request['Content-Type'] = 'application/json'
request['Authorization'] = 'Bearer ' + get_token(refresh_token)
response = http(uri).request(request)
if response.code.to_i == HTTP_CODE_UNAUTHORIZED
@logger.warn("http_get_response - Azure authentication failed: Token is invalid.")
raise AzureUnauthorizedError, "http_get_response - http error: Unauthorized"
end
rescue AzureUnauthorizedError => e
unless refresh_token
refresh_token = true
retry
end
raise e
end
response
end
def check_completion(response, api_version, retry_after = 30)
@logger.debug("check_completion - response code: #{response.code} response.body: \n#{response.body}")
retry_after = response['retry-after'].to_i if response.key?('retry-after')
operation_status_link = response['azure-asyncoperation']
if operation_status_link.nil? || operation_status_link.empty?
raise AzureError, "check_completion - operation_status_link cannot be null."
end
operation_status_link.gsub!(' ', '%20')
uri = URI(operation_status_link)
params = {}
params['api-version'] = api_version
request = Net::HTTP::Get.new(uri.request_uri)
uri.query = URI.encode_www_form(params)
request.add_field('x-ms-version', api_version)
while true
sleep(retry_after)
@logger.debug("check_completion - trying to get the status of asynchronous operation: #{uri.to_s}")
response = http_get_response(uri, request)
status_code = response.code.to_i
@logger.debug("check_completion - #{status_code}\n#{response.body}")
if status_code != HTTP_CODE_OK && status_code != HTTP_CODE_ACCEPTED
raise AzureError, "check_completion - http error: #{response.code}"
end
unless response.body.nil?
ret = JSON(response.body)
unless ret['status'].nil?
if ret['status'] != 'InProgress'
if ret['status'] == 'Succeeded'
return true
else
error_msg = "status: #{ret[:status]}\n"
error_msg += "http code: #{status_code}\n"
error_msg += "request id: #{response['x-ms-request-id']}\n"
error_msg += "error:\n#{ret['error']}"
raise AzureError, error_msg
end
else
@logger.debug("check_completion - InProgress...")
end
end
end
end
end
def http_get(url, params = {})
uri = http_url(url, params)
@logger.info("http_get - trying to get #{uri.to_s}")
request = Net::HTTP::Get.new(uri.request_uri)
response = http_get_response(uri, request)
status_code = response.code.to_i
if status_code != HTTP_CODE_OK
if status_code == HTTP_CODE_NOCONTENT
raise AzureNoFoundError, "http_get - error: #{response.code}"
elsif status_code == HTTP_CODE_NOTFOUND
raise AzureNoFoundError, "http_get - error: #{response.code}"
else
error = "http_get - error: #{response.code}"
error += " message: #{response.body}" unless response.body.nil?
raise AzureError, error
end
end
result = nil
result = JSON(response.body) unless response.body.nil?
end
def http_put(url, body = nil, retry_after = 30, params = {})
uri = http_url(url, params)
@logger.info("http_put - trying to put #{uri.to_s}")
request = Net::HTTP::Put.new(uri.request_uri)
unless body.nil?
request_body = body.to_json
request.body = request_body
request['Content-Length'] = request_body.size
@logger.debug("http_put - request body:\n#{request.body}")
end
response = http_get_response(uri, request)
status_code = response.code.to_i
if status_code != HTTP_CODE_OK && status_code != HTTP_CODE_CREATED
error = "http_put - error: #{response.code}"
error += " message: #{response.body}" unless response.body.nil?
raise AzureError, error
end
api_version = API_VERSION
api_version = params['api-version'] unless params['api-version'].nil?
check_completion(response, api_version, retry_after)
end
def http_delete(url, body = nil, retry_after = 10, params = {})
uri = http_url(url, params)
@logger.info("http_delete - trying to delete #{uri.to_s}")
request = Net::HTTP::Delete.new(uri.request_uri)
unless body.nil?
request_body = body.to_json
request.body = request_body
request['Content-Length'] = request_body.size
@logger.debug("http_put - request body:\n#{request.body}")
end
response = http_get_response(uri, request)
status_code = response.code.to_i
if status_code != HTTP_CODE_OK && status_code != HTTP_CODE_ACCEPTED && status_code != HTTP_CODE_NOCONTENT
error = "http_delete - error: #{response.code}"
error += " message: #{response.body}" unless response.body.nil?
raise AzureError, error
end
return true if status_code == HTTP_CODE_OK || status_code == HTTP_CODE_NOCONTENT
api_version = API_VERSION
api_version = params['api-version'] unless params['api-version'].nil?
check_completion(response, api_version, retry_after)
end
def http_post(url, body = nil, retry_after = 30, params = {})
uri = http_url(url, params)
@logger.info("http_post - trying to post #{uri.to_s}")
request = Net::HTTP::Post.new(uri.request_uri)
request['Content-Length'] = 0
unless body.nil?
request_body = body.to_json
request.body = request_body
request['Content-Length'] = request_body.size
@logger.debug("http_put - request body:\n#{request.body}")
end
response = http_get_response(uri, request)
status_code = response.code.to_i
if status_code != HTTP_CODE_ACCEPTED
error = "http_post - error: #{response.code}"
error += " message: #{response.body}" unless response.body.nil?
raise AzureError, error
end
api_version = API_VERSION
api_version = params['api-version'] unless params['api-version'].nil?
check_completion(response, api_version, retry_after)
end
end
end
Remove \r in azure_client2.rb
###############################################################################
# This client is for using Azure Resource Manager. It will be obsoleted soon
# after azure-sdk-for-ruby supports Azure Resource Manager.
###############################################################################
module Bosh::AzureCloud
class AzureError < Bosh::Clouds::CloudError; end
class AzureUnauthorizedError < AzureError; end
class AzureNoFoundError < AzureError; end
class AzureClient2
include Helpers
API_VERSION = '2015-05-01-preview'
API_VERSION_1 = '2015-01-01'
HTTP_CODE_OK = 200
HTTP_CODE_CREATED = 201
HTTP_CODE_ACCEPTED = 202
HTTP_CODE_NOCONTENT = 204
HTTP_CODE_PARTIALCONTENT = 206
HTTP_CODE_BADREQUEST = 400
HTTP_CODE_UNAUTHORIZED = 401
HTTP_CODE_FORBIDDEN = 403
HTTP_CODE_NOTFOUND = 404
HTTP_CODE_CONFLICT = 409
HTTP_CODE_LENGTHREQUIRED = 411
HTTP_CODE_PRECONDITIONFAILED = 412
REST_API_PROVIDER_COMPUTER = 'Microsoft.Compute'
REST_API_COMPUTER_VIRTUAL_MACHINES = 'virtualMachines'
REST_API_PROVIDER_NETWORK = 'Microsoft.Network'
REST_API_NETWORK_PUBLIC_IP_ADDRESSES = 'publicIPAddresses'
REST_API_NETWORK_LOAD_BALANCERS = 'loadBalancers'
REST_API_NETWORK_INTERFACES = 'networkInterfaces'
REST_API_NETWORK_VNETS = 'virtualNetworks'
REST_API_PROVIDER_STORAGE = 'Microsoft.Storage'
REST_API_STORAGE_ACCOUNTS = 'storageAccounts'
def initialize(azure_properties, logger)
@logger = logger
@azure_properties = azure_properties
end
# Common
def rest_api_url(resource_provider, resource_type, name = nil, others = nil)
url = "/subscriptions/#{URI.escape(@azure_properties['subscription_id'])}"
url += "/resourceGroups/#{URI.escape(@azure_properties['resource_group_name'])}"
url += "/providers/#{resource_provider}"
url += "/#{resource_type}"
url += "/#{URI.escape(name)}" unless name.nil?
url += "/#{URI.escape(others)}" unless others.nil?
url
end
def parse_name_from_id(id)
ret = id.match('/subscriptions/([^/]*)/resourceGroups/([^/]*)/providers/([^/]*)/([^/]*)/([^/]*)(.*)')
raise AzureError, "\"#{id}\" is not a valid URL." if ret.nil?
result = {}
result[:subscription_id] = ret[1]
result[:resource_group_name] = ret[2]
result[:provider_name] = ret[3]
result[:resource_type] = ret[4]
result[:resource_name] = ret[5]
result
end
def delete_resource_by_id(url)
begin
http_delete(url, nil, 10)
rescue => e
@logger.warn("delete_resource_by_id - error: #{e.message}\n#{e.backtrace.join("\n")}")
raise e
end
end
def get_resource_by_id(url, params = {})
result = nil
begin
result = http_get(url, params)
rescue AzureNoFoundError => e
result = nil
rescue => e
@logger.warn("get_resource_by_id - error: #{e.message}\n#{e.backtrace.join("\n")}")
raise e
end
result
end
# Resource Groups
def get_resource_group()
resource_group = nil
url = "/subscriptions/#{URI.escape(@azure_properties['subscription_id'])}"
url += "/resourceGroups/#{URI.escape(@azure_properties['resource_group_name'])}"
params = { 'api-version' => API_VERSION_1}
result = get_resource_by_id(url, params)
unless result.nil?
resource_group = {}
resource_group[:id] = result['id']
resource_group[:name] = result['name']
resource_group[:location] = result['location']
resource_group[:tags] = result['tags']
resource_group[:provisioning_state] = result['properties']['provisioningState']
end
resource_group
end
# Compute/Virtual Machines
# Public: Provisions a virtual machine based on the supplied configuration.
#
# ==== Attributes
#
# @param [Hash] vm_params - Parameters for creating the virtual machine.
# @param [Hash] network_interface - Network Interface Instance.
#
# ==== Params
#
# Accepted key/value pairs are:
# * +:name+ - String. Name of virtual machine.
# * +:location+ - String. The location where the virtual machine will be created.
# * +:vm_size+ - String. Specifies the size of the virtual machine instance.
# * +:username+ - String. User name for the virtual machine instance.
# * +:custom_data+ - String. Specifies a base-64 encoded string of custom data.
# * +:image_uri+ - String. The URI of the image.
# * +:os_vhd_uri+ - String. The URI of the OS disk for the virtual machine instance.
# * +:ssh_cert_data+ - String. The content of SSH certificate.
#
def create_virtual_machine(vm_params, network_interface)
url = rest_api_url(REST_API_PROVIDER_COMPUTER, REST_API_COMPUTER_VIRTUAL_MACHINES, vm_params[:name])
vm = {
'name' => vm_params[:name],
'location' => vm_params[:location],
'type' => "#{REST_API_PROVIDER_COMPUTER}/#{REST_API_COMPUTER_VIRTUAL_MACHINES}",
'properties' => {
'hardwareProfile' => {
'vmSize' => vm_params[:vm_size]
},
'osProfile' => {
'customData' => vm_params[:custom_data],
'computername' => vm_params[:name],
'adminUsername' => vm_params[:username],
'linuxConfiguration' => {
'disablePasswordAuthentication' => 'true',
'ssh' => {
'publicKeys' => [
{
'path' => "/home/#{vm_params[:username]}/.ssh/authorized_keys",
'keyData' => vm_params[:ssh_cert_data],
}
]
},
},
},
'storageProfile' => {
'osDisk' => {
'name' => "#{vm_params[:name]}_os_disk",
'osType' => 'Linux',
'createOption' => 'FromImage',
'caching' => 'ReadWrite',
'image' => {
'uri' => vm_params[:image_uri]
},
'vhd' => {
'uri' => vm_params[:os_vhd_uri]
}
},
},
'networkProfile' => {
'networkInterfaces' => [
{
'id' => network_interface[:id]
}
]
}
}
}
params = {
'validating' => 'true'
}
http_put(url, vm, 30, params)
rescue => e
@logger.warn("create_virtual_machine - error: #{e.message}\n#{e.backtrace.join("\n")}")
raise e
end
def restart_virtual_machine(name)
url = rest_api_url(REST_API_PROVIDER_COMPUTER, REST_API_COMPUTER_VIRTUAL_MACHINES, name, 'restart')
begin
http_post(url)
rescue => e
@logger.warn("restart_virtual_machine - error: #{e.message}\n#{e.backtrace.join("\n")}")
raise e
end
end
# Public: Set tags for a VM
# @param [String] name Name of virtual machine.
# @param [Hash] metadata metadata key/value pairs.
def update_tags_of_virtual_machine(name, tags)
url = rest_api_url(REST_API_PROVIDER_COMPUTER, REST_API_COMPUTER_VIRTUAL_MACHINES, name)
result = get_resource_by_id(url)
if result.nil?
raise AzureNoFoundError, "update_tags_of_virtual_machine - cannot find the virtual machine by name \"#{name}\""
end
begin
result['tags'] = tags
http_put(url, result)
rescue => e
@logger.warn("update_tags_of_virtual_machine - error: #{e.message}\n#{e.backtrace.join("\n")}")
raise e
end
end
def attach_disk_to_virtual_machine(name, disk_name, disk_uri)
url = rest_api_url(REST_API_PROVIDER_COMPUTER, REST_API_COMPUTER_VIRTUAL_MACHINES, name)
result = get_resource_by_id(url)
if result.nil?
raise AzureNoFoundError, "attach_disk_to_virtual_machine - cannot find the virtual machine by name \"#{name}\""
end
begin
lun = 0
data_disks = result['properties']['storageProfile']['dataDisks']
for i in 0..128
disk = data_disks.find { |disk| disk['lun'] == i}
if disk.nil?
lun = i
break
end
end
new_disk = {
'name' => disk_name,
'lun' => lun,
'createOption' => 'Attach',
'caching' => 'ReadWrite',
'vhd' => { 'uri' => disk_uri }
}
result['properties']['storageProfile']['dataDisks'].push(new_disk)
@logger.info("attach_disk_to_virtual_machine - attach disk #{disk_name} to #{lun}")
http_put(url, result)
disk = {
:name => disk_name,
:lun => lun,
:createOption => 'Attach',
:caching => 'ReadWrite',
:vhd => { :uri => disk_uri }
}
rescue => e
@logger.warn("attach_disk_to_virtual_machine - error: #{e.message}\n#{e.backtrace.join("\n")}")
raise e
end
end
def detach_disk_from_virtual_machine(name, disk_name)
url = rest_api_url(REST_API_PROVIDER_COMPUTER, REST_API_COMPUTER_VIRTUAL_MACHINES, name)
result = get_resource_by_id(url)
if result.nil?
raise AzureNoFoundError, "detach_disk_from_virtual_machine - cannot find the virtual machine by name \"#{name}\""
end
begin
disk = result['properties']['storageProfile']['dataDisks'].find { |disk| disk['name'] == disk_name}
raise "the given disk #{disk_name} is not attached to the given virtual machine #{name}" if disk.nil?
result['properties']['storageProfile']['dataDisks'].delete_if { |disk| disk['name'] == disk_name}
@logger.info("detach_disk_from_virtual_machine - detach disk #{disk_name} from lun #{disk['lun']}")
http_put(url, result)
rescue => e
@logger.warn("detach_disk_from_virtual_machine - error: #{e.message}\n#{e.backtrace.join("\n")}")
raise e
end
end
def get_virtual_machine_by_name(name)
url = rest_api_url(REST_API_PROVIDER_COMPUTER, REST_API_COMPUTER_VIRTUAL_MACHINES, name)
get_virtual_machine(url)
end
def get_virtual_machine(url)
vm = nil
result = get_resource_by_id(url)
unless result.nil?
vm = {}
vm[:id] = result['id']
vm[:name] = result['name']
vm[:location] = result['location']
vm[:tags] = result['tags']
properties = result['properties']
vm[:provisioning_state] = properties['provisioningState']
vm[:size] = properties['hardwareProfile']['vmSize']
storageProfile = properties['storageProfile']
vm[:os_disk] = {}
vm[:os_disk][:name] = storageProfile['osDisk']['name']
vm[:os_disk][:uri] = storageProfile['osDisk']['vhd']['uri']
vm[:os_disk][:caching] = storageProfile['osDisk']['caching']
vm[:data_disks] = []
storageProfile['dataDisks'].each do |data_disk|
disk = {}
disk[:name] = data_disk['name']
disk[:lun] = data_disk['lun']
disk[:uri] = data_disk['vhd']['uri']
disk[:caching] = data_disk['caching']
vm[:data_disks].push(disk)
end
interface_id = properties['networkProfile']['networkInterfaces'][0]['id']
vm[:network_interface] = get_network_interface(interface_id)
end
vm
end
def delete_virtual_machine(name)
@logger.debug("delete_virtual_machine - trying to delete #{name}")
url = rest_api_url(REST_API_PROVIDER_COMPUTER, REST_API_COMPUTER_VIRTUAL_MACHINES, name)
delete_resource_by_id(url)
end
# Network/Public IP
def create_public_ip(name, location, is_static = true)
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_PUBLIC_IP_ADDRESSES, name)
public_ip = {
'name' => name,
'location' => location,
'properties' => {
'publicIPAllocationMethod' => is_static ? 'Static' : 'Dynamic'
}
}
http_put(url, public_ip, 10)
rescue => e
@logger.warn("create_public_ip - error: #{e.message}\n#{e.backtrace.join("\n")}")
raise e
end
def get_public_ip_by_name(name)
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_PUBLIC_IP_ADDRESSES, name)
get_public_ip(url)
end
def get_public_ip(url)
ip_address = nil
result = get_resource_by_id(url)
unless result.nil?
ip_address = {}
ip_address[:id] = result['id']
ip_address[:name] = result['name']
ip_address[:location] = result['location']
properties = result['properties']
ip_address[:provisioning_state] = properties['provisioningState']
ip_address[:ip_address] = properties['ipAddress']
ip_address[:public_ip_allocation_method] = properties['publicIPAllocationMethod']
ip_address[:ip_configuration_id] = properties['ipConfigurations']['id'] unless properties['ipConfigurations'].nil?
unless properties['dnsSettings'].nil?
ip_address[:domain_name_label] = properties['dnsSettings']['domainNameLabel']
ip_address[:fqdn] = properties['dnsSettings']['fqdn']
end
end
ip_address
end
def list_public_ips()
ip_addresses = []
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_PUBLIC_IP_ADDRESSES)
result = get_resource_by_id(url)
unless result.nil?
result['value'].each do |ret|
ip_address = {}
ip_address[:id] = ret['id']
ip_address[:name] = ret['name']
ip_address[:location] = ret['location']
properties = ret['properties']
ip_address[:provisioning_state] = properties['provisioningState']
ip_address[:ip_address] = properties['ipAddress']
ip_address[:public_ip_allocation_method] = properties['publicIPAllocationMethod']
ip_address[:ip_configuration_id] = properties['ipConfigurations']['id'] unless properties['ipConfigurations'].nil?
unless properties['dnsSettings'].nil?
ip_address[:domain_name_label] = properties['dnsSettings']['domainNameLabel']
ip_address[:fqdn] = properties['dnsSettings']['fqdn']
end
ip_addresses.push(ip_address)
end
end
ip_addresses
end
def delete_public_ip(name)
@logger.debug("delete_public_ip - trying to delete #{name}")
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_PUBLIC_IP_ADDRESSES, name)
delete_resource_by_id(url)
end
# Network/Load Balancer
def create_load_balancer(name, public_ip, tcp_endpoints = [], udp_endpoints = [])
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_LOAD_BALANCERS, name)
load_balancer = {
'name' => name,
'location' => public_ip[:location],
'properties' => {
'frontendIPConfigurations' => [
'name' => 'LBFE',
'properties' => {
#'privateIPAllocationMethod' => 'Dynamic',
'publicIPAddress' => {
'id' => public_ip[:id]
}
}
],
'backendAddressPools' => [
'name' => 'LBBE'
],
'inboundNatRules' => [],
}
}
frontend_ip_configuration_id = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_LOAD_BALANCERS, name, 'frontendIPConfigurations/LBFE')
tcp_endpoints.each do |endpoint|
ports = endpoint.split(':')
inbound_nat_rules = {
'name' => "NatRule-TcpEndPoints-#{ports[0].to_s}",
'properties' => {
'frontendPort' => ports[0],
'backendPort' => ports[1],
'enableFloatingIP' => false,
'protocol' => 'Tcp',
'frontendIPConfiguration' => {
"id" => frontend_ip_configuration_id
}
}
}
load_balancer['properties']['inboundNatRules'].push(inbound_nat_rules)
end
udp_endpoints.each do |endpoint|
ports = endpoint.split(':')
inbound_nat_rules = {
'name' => "NatRule-UdpEndPoints-#{ports[0].to_s}",
'properties' => {
'frontendPort' => ports[0],
'backendPort' => ports[1],
'enableFloatingIP' => false,
'protocol' => 'Udp',
'frontendIPConfiguration' => {
"id" => frontend_ip_configuration_id
}
}
}
load_balancer['properties']['inboundNatRules'].push(inbound_nat_rules)
end
http_put(url, load_balancer, 10)
rescue => e
@logger.warn("create_load_balancer - error: #{e.message}\n#{e.backtrace.join("\n")}")
raise e
end
def get_load_balancer_by_name(name)
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_LOAD_BALANCERS, name)
get_load_balancer(url)
end
def get_load_balancer(url)
load_balancer = nil
result = get_resource_by_id(url)
unless result.nil?
load_balancer = {}
load_balancer[:id] = result['id']
load_balancer[:name] = result['name']
load_balancer[:location] = result['location']
properties = result['properties']
load_balancer[:provisioning_state] = properties['provisioningState']
frontend = properties['frontendIPConfigurations']
load_balancer[:frontend_ip_configurations] = []
frontend.each do |frontend_ip|
ip = {}
ip[:name] = frontend_ip['name']
ip[:id] = frontend_ip['id']
ip[:provisioning_state] = frontend_ip['properties']['provisioningState']
ip[:private_ip_allocation_method] = frontend_ip['properties']['privateIPAllocationMethod']
ip[:public_ip] = get_public_ip(frontend_ip['properties']['publicIPAddress']['id'])
ip[:inbound_nat_rules] = frontend_ip['properties']['inboundNatRules']
load_balancer[:frontend_ip_configurations].push(ip)
end
backend = properties['backendAddressPools']
load_balancer[:backend_address_pools] = []
backend.each do |backend_ip|
ip = {}
ip[:name] = backend_ip['name']
ip[:id] = backend_ip['id']
ip[:provisioning_state] = backend_ip['properties']['provisioningState']
ip[:backend_ip_configurations] = backend_ip['properties']['backendIPConfigurations']
load_balancer[:backend_address_pools].push(ip)
end
end
load_balancer
end
def delete_load_balancer(name)
@logger.debug("delete_load_balancer - trying to delete #{name}")
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_LOAD_BALANCERS, name)
delete_resource_by_id(url)
end
# Network/Network Interface
# Public: Create a network interface based on the supplied configuration.
#
# ==== Attributes
#
# @param [Hash] nic_params - Parameters for creating the network interface.
# @param [Hash] subnet - The subnet which the network interface is binded to.
# @param [Hash] load_balancer - The load balancer which the network interface is binded to.
#
# ==== Params
#
# Accepted key/value pairs are:
# * +:name+ - String. Name of network interface.
# * +:location+ - String. The location where the network interface will be created.
# * +:private_ip - String. Private IP address which the network interface will use.
# * +:dns_servers - Array. DNS servers.
# * +:public_ip - Hash. The public IP which the network interface is binded to.
#
def create_network_interface(nic_params, subnet, load_balancer = nil)
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_INTERFACES, nic_params[:name])
interface = {
'name' => nic_params[:name],
'location' => nic_params[:location],
'properties' => {
'ipConfigurations' => [
{
'name' => 'ipconfig1',
'properties' => {
'privateIPAddress' => nic_params[:private_ip],
'privateIPAllocationMethod' => nic_params[:private_ip].nil? ? 'Dynamic' : 'Static',
'publicIPAddress' => nic_params[:public_ip].nil? ? nil : { 'id' => nic_params[:public_ip][:id] },
'subnet' => {
'id' => subnet[:id]
}
}
}
],
'dnsSettings' => {
'dnsServers' => nic_params[:dns_servers].nil? ? [] : nic_params[:dns_servers]
}
}
}
unless load_balancer.nil?
interface['properties']['ipConfigurations'][0]['properties']['loadBalancerBackendAddressPools'] = [
{
'id' => load_balancer[:backend_address_pools][0][:id]
}
]
interface['properties']['ipConfigurations'][0]['properties']['loadBalancerInboundNatRules'] =
load_balancer[:frontend_ip_configurations][0][:inbound_nat_rules]
end
http_put(url, interface, 10)
rescue => e
@logger.warn("create_network_interface - error: #{e.message}\n#{e.backtrace.join("\n")}")
raise e
end
def get_network_interface_by_name(name)
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_INTERFACES, name)
get_network_interface(url)
end
def get_network_interface(url)
interface = nil
result = get_resource_by_id(url)
unless result.nil?
interface = {}
interface[:id] = result['id']
interface[:name] = result['name']
interface[:location] = result['location']
properties = result['properties']
interface[:provisioning_state] = properties['provisioningState']
unless properties['dnsSettings']['dnsServers'].nil?
interface[:dns_settings] = []
properties['dnsSettings']['dnsServers'].each { |dns| interface[:dns_settings].push(dns) }
end
ip_configuration = properties['ipConfigurations'][0]
interface[:ip_configuration_id] = ip_configuration['id']
ip_configuration_properties = ip_configuration['properties']
interface[:private_ip] = ip_configuration_properties['privateIPAddress']
interface[:private_ip_allocation_method] = ip_configuration_properties['privateIPAllocationMethod']
unless ip_configuration_properties['publicIPAddress'].nil?
interface[:public_ip] = get_public_ip(ip_configuration_properties['publicIPAddress']['id'])
end
unless ip_configuration_properties['loadBalancerBackendAddressPools'].nil?
names = parse_name_from_id(ip_configuration_properties['loadBalancerBackendAddressPools'][0]['id'])
interface[:load_balancer] = get_load_balancer_by_name(names[:resource_name])
end
end
interface
end
def delete_network_interface(name)
@logger.debug("delete_network_interface - trying to delete #{name}")
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_INTERFACES, name)
delete_resource_by_id(url)
end
# Network/Subnet
def get_network_subnet_by_name(vnet_name, subnet_name)
url = rest_api_url(REST_API_PROVIDER_NETWORK, REST_API_NETWORK_VNETS, vnet_name, "subnets/#{subnet_name}")
get_network_subnet(url)
end
def get_network_subnet(url)
subnet = nil
result = get_resource_by_id(url)
unless result.nil?
subnet = {}
subnet[:id] = result['id']
subnet[:name] = result['name']
properties = result['properties']
subnet[:provisioning_state] = properties['provisioningState']
subnet[:address_prefix] = properties['addressPrefix']
end
subnet
end
# Storage/StorageAccounts
def get_storage_account_by_name(name)
url = rest_api_url(REST_API_PROVIDER_STORAGE, REST_API_STORAGE_ACCOUNTS, name)
get_storage_account(url)
end
def get_storage_account(url)
storage_account = nil
result = get_resource_by_id(url)
unless result.nil?
storage_account = {}
storage_account[:id] = result['id']
storage_account[:name] = result['name']
storage_account[:location] = result['location']
properties = result['properties']
storage_account[:provisioning_state] = properties['provisioningState']
storage_account[:account_type] = properties['accountType']
storage_account[:primary_endpoints] = properties['primaryEndpoints']
end
storage_account
end
private
def http(uri)
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = true
# Uncomment below line for debug
#http.set_debug_output($stdout)
http
end
def get_token(force_refresh = false)
if @token.nil? || (Time.at(@token['expires_on'].to_i) - Time.now) <= 0 || force_refresh
@logger.info("get_token - trying to get/refresh Azure authentication token")
params = {}
params['api-version'] = API_VERSION
uri = URI(AZURE_ENVIRONMENTS[@azure_properties['environment']]['activeDirectoryEndpointUrl'] + '/' + @azure_properties['tenant_id'] + '/oauth2/token')
uri.query = URI.encode_www_form(params)
params = {}
params['grant_type'] = 'client_credentials'
params['client_id'] = @azure_properties['client_id']
params['client_secret'] = @azure_properties['client_secret']
params['resource'] = AZURE_ENVIRONMENTS[@azure_properties['environment']]['resourceManagerEndpointUrl']
params['scope'] = 'user_impersonation'
request = Net::HTTP::Post.new(uri.request_uri)
request['Content-Type'] = 'application/x-www-form-urlencoded'
request.body = URI.encode_www_form(params)
response = http(uri).request(request)
if response.code.to_i == HTTP_CODE_OK
@token = JSON(response.body)
@logger.debug("get_token - token is\n#{@token}")
else
raise AzureError, "get_token - http error: #{response.code}"
end
end
@token['access_token']
end
def http_url(url, params = {})
uri = URI(AZURE_ENVIRONMENTS[@azure_properties['environment']]['resourceManagerEndpointUrl'] + url)
params['api-version'] = API_VERSION if params['api-version'].nil?
uri.query = URI.encode_www_form(params)
uri
end
def http_get_response(uri, request)
response = nil
refresh_token = false
begin
request['Content-Type'] = 'application/json'
request['Authorization'] = 'Bearer ' + get_token(refresh_token)
response = http(uri).request(request)
if response.code.to_i == HTTP_CODE_UNAUTHORIZED
@logger.warn("http_get_response - Azure authentication failed: Token is invalid.")
raise AzureUnauthorizedError, "http_get_response - http error: Unauthorized"
end
rescue AzureUnauthorizedError => e
unless refresh_token
refresh_token = true
retry
end
raise e
end
response
end
def check_completion(response, api_version, retry_after = 30)
@logger.debug("check_completion - response code: #{response.code} response.body: \n#{response.body}")
retry_after = response['retry-after'].to_i if response.key?('retry-after')
operation_status_link = response['azure-asyncoperation']
if operation_status_link.nil? || operation_status_link.empty?
raise AzureError, "check_completion - operation_status_link cannot be null."
end
operation_status_link.gsub!(' ', '%20')
uri = URI(operation_status_link)
params = {}
params['api-version'] = api_version
request = Net::HTTP::Get.new(uri.request_uri)
uri.query = URI.encode_www_form(params)
request.add_field('x-ms-version', api_version)
while true
sleep(retry_after)
@logger.debug("check_completion - trying to get the status of asynchronous operation: #{uri.to_s}")
response = http_get_response(uri, request)
status_code = response.code.to_i
@logger.debug("check_completion - #{status_code}\n#{response.body}")
if status_code != HTTP_CODE_OK && status_code != HTTP_CODE_ACCEPTED
raise AzureError, "check_completion - http error: #{response.code}"
end
unless response.body.nil?
ret = JSON(response.body)
unless ret['status'].nil?
if ret['status'] != 'InProgress'
if ret['status'] == 'Succeeded'
return true
else
error_msg = "status: #{ret[:status]}\n"
error_msg += "http code: #{status_code}\n"
error_msg += "request id: #{response['x-ms-request-id']}\n"
error_msg += "error:\n#{ret['error']}"
raise AzureError, error_msg
end
else
@logger.debug("check_completion - InProgress...")
end
end
end
end
end
def http_get(url, params = {})
uri = http_url(url, params)
@logger.info("http_get - trying to get #{uri.to_s}")
request = Net::HTTP::Get.new(uri.request_uri)
response = http_get_response(uri, request)
status_code = response.code.to_i
if status_code != HTTP_CODE_OK
if status_code == HTTP_CODE_NOCONTENT
raise AzureNoFoundError, "http_get - error: #{response.code}"
elsif status_code == HTTP_CODE_NOTFOUND
raise AzureNoFoundError, "http_get - error: #{response.code}"
else
error = "http_get - error: #{response.code}"
error += " message: #{response.body}" unless response.body.nil?
raise AzureError, error
end
end
result = nil
result = JSON(response.body) unless response.body.nil?
end
def http_put(url, body = nil, retry_after = 30, params = {})
uri = http_url(url, params)
@logger.info("http_put - trying to put #{uri.to_s}")
request = Net::HTTP::Put.new(uri.request_uri)
unless body.nil?
request_body = body.to_json
request.body = request_body
request['Content-Length'] = request_body.size
@logger.debug("http_put - request body:\n#{request.body}")
end
response = http_get_response(uri, request)
status_code = response.code.to_i
if status_code != HTTP_CODE_OK && status_code != HTTP_CODE_CREATED
error = "http_put - error: #{response.code}"
error += " message: #{response.body}" unless response.body.nil?
raise AzureError, error
end
api_version = API_VERSION
api_version = params['api-version'] unless params['api-version'].nil?
check_completion(response, api_version, retry_after)
end
def http_delete(url, body = nil, retry_after = 10, params = {})
uri = http_url(url, params)
@logger.info("http_delete - trying to delete #{uri.to_s}")
request = Net::HTTP::Delete.new(uri.request_uri)
unless body.nil?
request_body = body.to_json
request.body = request_body
request['Content-Length'] = request_body.size
@logger.debug("http_put - request body:\n#{request.body}")
end
response = http_get_response(uri, request)
status_code = response.code.to_i
if status_code != HTTP_CODE_OK && status_code != HTTP_CODE_ACCEPTED && status_code != HTTP_CODE_NOCONTENT
error = "http_delete - error: #{response.code}"
error += " message: #{response.body}" unless response.body.nil?
raise AzureError, error
end
return true if status_code == HTTP_CODE_OK || status_code == HTTP_CODE_NOCONTENT
api_version = API_VERSION
api_version = params['api-version'] unless params['api-version'].nil?
check_completion(response, api_version, retry_after)
end
def http_post(url, body = nil, retry_after = 30, params = {})
uri = http_url(url, params)
@logger.info("http_post - trying to post #{uri.to_s}")
request = Net::HTTP::Post.new(uri.request_uri)
request['Content-Length'] = 0
unless body.nil?
request_body = body.to_json
request.body = request_body
request['Content-Length'] = request_body.size
@logger.debug("http_put - request body:\n#{request.body}")
end
response = http_get_response(uri, request)
status_code = response.code.to_i
if status_code != HTTP_CODE_ACCEPTED
error = "http_post - error: #{response.code}"
error += " message: #{response.body}" unless response.body.nil?
raise AzureError, error
end
api_version = API_VERSION
api_version = params['api-version'] unless params['api-version'].nil?
check_completion(response, api_version, retry_after)
end
end
end |
require "set"
module Collector
class NodeConnection < EM::Connection
attr_reader :connection_id
attr_accessor :activated_time
@@_id = 0
def initialize(queue)
@unpacker = MessagePack::Unpacker.new(symbolize_keys: true)
@connection_id = (@@_id += 1)
@authenticated = false
@closing = false
@activated_time = nil
@queue = queue
@heartbeats = Set.new
end
def unbind
if @closing
log(:info, "Connection was closed.")
else
log(:warn, "Connection was closed unexpectedly.")
NodeManager.unregister(self)
end
end
def receive_data(data)
@unpacker.feed_each(data) do |msg|
unless msg.is_a?(Hash) && msg[:event]
log(:warn, "Unknown message: #{msg}")
send_message(:error, text: "Unknown message.")
close_connection_after_writing
next
end
parse_message(msg)
end
end
def register_account(account)
send_message(event: :register,
data: { id: account.id,
consumer_key: Settings.consumer.key,
consumer_secret: Settings.consumer.secret,
oauth_token: account.oauth_token,
oauth_token_secret: account.oauth_token_secret,
user_id: account.user_id })
log(:info, "Registered account ##{account.id}/#{account.user_id}")
end
def unregister_account(account)
send_message(event: :unregister,
data: { id: account.id,
user_id: account.user_id })
log(:info, "Unregistered account ##{account.id}/#{account.user_id}")
end
private
def parse_message(msg)
unless @authenticated
if msg[:event] == "auth"
authenticate_node(msg[:data])
else
log(:error, "Unauthenticated client: #{msg}")
send_message(event: :error, data: "You aren't authenticated.")
close_connection_after_writing
end
return
end
case msg[:event]
when "unauthorized"
log(:info, "Received unauthorized: ##{msg[:data][:id]}/#{msg[:data][:user_id]}")
@queue.push_unauthorized(msg)
when "user"
log(:debug, "Received user: #{msg[:identifier]}")
@queue.push_user(msg)
when "tweet"
log(:debug, "Received tweet: #{msg[:identifier]}")
@queue.push_tweet(msg)
when "favorite"
log(:debug, "Receive favorite: #{msg[:identifier]}")
@queue.push_favorite(msg)
when "unfavorite"
log(:debug, "Receive unfavorite: #{msg[:identifier]}")
@queue.push_unfavorite(msg)
when "retweet"
log(:debug, "Receive retweet: #{msg[:identifier]}")
@queue.push_retweet(msg)
when "delete"
log(:debug, "Receive delete: #{msg[:identifier]}")
@queue.push_delete(msg)
when "exit"
log(:info, "Closing this connection...")
@closing = true
NodeManager.unregister(self)
when "heartbeat"
log(:debug, "Heartbeat reply: #{msg[:data]}")
@heartbeats.delete(msg[:data])
else
log(:warn, "Unknown message: #{msg.inspect}")
send_message(event: :error, data: "Unknown message.")
end
end
def authenticate_node(data)
if data.key?(:secret_key) && Settings.collector.secret_key == data[:secret_key]
@authenticated = true
log(:info, "Connection authenticated.")
send_message(event: :auth, data: nil)
NodeManager.register(self)
@heartbeat_timer = EM.add_periodic_timer(10, &method(:heartbeat))
else
log(:warn, "Invalid secret_key: #{secret_key.inspect}")
send_message(event: :error, data: "Invalid secret_key.")
close_connection_after_writing
return
end
end
def send_message(data)
send_data(data.to_msgpack)
end
def heartbeat
if @heartbeats.size > 2 # 30 sec
log(:warn, "Node is dead.")
@heartbeat_timer.cancel
@closing = true
close_connection_after_writing
return
end
id = Time.now.to_i
@heartbeats << id
send_message(event: :heartbeat, data: id)
end
def log(level, message)
Rails.logger.__send__(level, "Node(#{@connection_id})") { message }
end
end
end
collector: unregister connection when dead
require "set"
module Collector
class NodeConnection < EM::Connection
attr_reader :connection_id
attr_accessor :activated_time
@@_id = 0
def initialize(queue)
@unpacker = MessagePack::Unpacker.new(symbolize_keys: true)
@connection_id = (@@_id += 1)
@authenticated = false
@closing = false
@activated_time = nil
@queue = queue
@heartbeats = Set.new
end
def unbind
if @closing
log(:info, "Connection was closed.")
else
log(:warn, "Connection was closed unexpectedly.")
NodeManager.unregister(self)
end
end
def receive_data(data)
@unpacker.feed_each(data) do |msg|
unless msg.is_a?(Hash) && msg[:event]
log(:warn, "Unknown message: #{msg}")
send_message(:error, text: "Unknown message.")
close_connection_after_writing
next
end
parse_message(msg)
end
end
def register_account(account)
send_message(event: :register,
data: { id: account.id,
consumer_key: Settings.consumer.key,
consumer_secret: Settings.consumer.secret,
oauth_token: account.oauth_token,
oauth_token_secret: account.oauth_token_secret,
user_id: account.user_id })
log(:info, "Registered account ##{account.id}/#{account.user_id}")
end
def unregister_account(account)
send_message(event: :unregister,
data: { id: account.id,
user_id: account.user_id })
log(:info, "Unregistered account ##{account.id}/#{account.user_id}")
end
private
def parse_message(msg)
unless @authenticated
if msg[:event] == "auth"
authenticate_node(msg[:data])
else
log(:error, "Unauthenticated client: #{msg}")
send_message(event: :error, data: "You aren't authenticated.")
close_connection_after_writing
end
return
end
case msg[:event]
when "unauthorized"
log(:info, "Received unauthorized: ##{msg[:data][:id]}/#{msg[:data][:user_id]}")
@queue.push_unauthorized(msg)
when "user"
log(:debug, "Received user: #{msg[:identifier]}")
@queue.push_user(msg)
when "tweet"
log(:debug, "Received tweet: #{msg[:identifier]}")
@queue.push_tweet(msg)
when "favorite"
log(:debug, "Receive favorite: #{msg[:identifier]}")
@queue.push_favorite(msg)
when "unfavorite"
log(:debug, "Receive unfavorite: #{msg[:identifier]}")
@queue.push_unfavorite(msg)
when "retweet"
log(:debug, "Receive retweet: #{msg[:identifier]}")
@queue.push_retweet(msg)
when "delete"
log(:debug, "Receive delete: #{msg[:identifier]}")
@queue.push_delete(msg)
when "exit"
log(:info, "Closing this connection...")
@closing = true
NodeManager.unregister(self)
when "heartbeat"
log(:debug, "Heartbeat reply: #{msg[:data]}")
@heartbeats.delete(msg[:data])
else
log(:warn, "Unknown message: #{msg.inspect}")
send_message(event: :error, data: "Unknown message.")
end
end
def authenticate_node(data)
if data.key?(:secret_key) && Settings.collector.secret_key == data[:secret_key]
@authenticated = true
log(:info, "Connection authenticated.")
send_message(event: :auth, data: nil)
NodeManager.register(self)
@heartbeat_timer = EM.add_periodic_timer(10, &method(:heartbeat))
else
log(:warn, "Invalid secret_key: #{secret_key.inspect}")
send_message(event: :error, data: "Invalid secret_key.")
close_connection_after_writing
return
end
end
def send_message(data)
send_data(data.to_msgpack)
end
def heartbeat
if @heartbeats.size > 2 # 30 sec
log(:warn, "Node is dead.")
NodeManager.unregister(self)
@heartbeat_timer.cancel
@closing = true
close_connection_after_writing
return
end
id = Time.now.to_i
@heartbeats << id
send_message(event: :heartbeat, data: id)
end
def log(level, message)
Rails.logger.__send__(level, "Node(#{@connection_id})") { message }
end
end
end
|
module CommentExtractor
Version = '1.0.0'
VERSION = Version
end
Bump up version
module CommentExtractor
Version = '1.0.1'
VERSION = Version
end
|
module CopycopterClient
# Client version
VERSION = "1.0.0.beta8"
# API version being used to communicate with the server
API_VERSION = "2.0"
end
beta9: better syncing in dev/staging
module CopycopterClient
# Client version
VERSION = "1.0.0.beta9"
# API version being used to communicate with the server
API_VERSION = "2.0"
end
|
module CORL
module Plugin
class Configuration < Nucleon.plugin_class(:nucleon, :base)
include Mixin::SubConfig
#---
def self.register_ids
[ :name, :directory ]
end
#-----------------------------------------------------------------------------
# Configuration plugin interface
def normalize(reload)
super
logger.debug("Initializing source sub configuration")
init_subconfig(true) unless reload
logger.info("Setting source configuration project")
@project = CORL.project(extended_config(:project, {
:directory => _delete(:directory, Dir.pwd),
:url => _delete(:url),
:revision => _delete(:revision),
:create => _delete(:create, false),
:pull => true,
:internal_ip => CORL.public_ip, # Needed for seeding Vagrant VMs
:manage_ignore => _delete(:manage_ignore, true),
:new => true
}), _delete(:project_provider, nil)) unless reload
_init(:autoload, true)
_init(:autosave, false)
yield if block_given?
set_location(@project)
end
#---
def remove_plugin
CORL.remove_plugin(@project)
end
#-----------------------------------------------------------------------------
# Checks
def can_persist?
project.can_persist?
end
#-----------------------------------------------------------------------------
# Property accessors / modifiers
def project
@project
end
#---
def directory
project.directory
end
#---
def cache
project.cache
end
#---
def ignore(files)
project.ignore(files)
end
#---
def autoload(default = false)
_get(:autoload, default)
end
def autoload=autoload
_set(:autoload, test(autoload))
end
#---
def autosave(default = false)
_get(:autosave, default)
end
def autosave=autosave
_set(:autosave, test(autosave))
end
#-----------------------------------------------------------------------------
def set_location(directory)
if directory && directory.is_a?(CORL::Plugin::Project)
logger.debug("Setting source project directory from other project at #{directory.directory}")
project.set_location(directory.directory)
elsif directory && directory.is_a?(String) || directory.is_a?(Symbol)
logger.debug("Setting source project directory to #{directory}")
project.set_location(directory.to_s)
end
end
#-----------------------------------------------------------------------------
def set(keys, value = '', options = {})
super(keys, value, true)
save(options) if initialized? && autosave
end
#---
def delete(keys, options = {})
super(keys)
save(options) if initialized? && autosave
end
#---
def clear(options = {})
super
save(options) if initialized? && autosave
end
#-----------------------------------------------------------------------------
def remote(name)
project.remote(name)
end
#---
def set_remote(name, location)
project.set_remote(name, location)
end
#-----------------------------------------------------------------------------
# Import / Export
def import(properties, options = {})
super(properties, options)
save(options) if autosave
end
#-----------------------------------------------------------------------------
# Configuration loading / saving
def load(options = {})
method_config = Config.ensure(options)
success = false
if can_persist?
if extension_check(:load, { :config => method_config })
logger.info("Loading source configuration")
config.clear if method_config.get(:override, false)
properties = Config.new({}, {}, true, false)
success = yield(method_config, properties) if block_given?
if success && ! properties.export.empty?
logger.debug("Source configuration parsed properties: #{properties}")
extension(:load_process, { :properties => properties, :config => method_config })
config.import(properties, method_config)
end
end
else
logger.warn("Loading of source configuration failed")
end
success
end
#---
def save(options = {})
method_config = Config.ensure(options)
success = false
if can_persist?
if extension_check(:save, { :config => method_config })
logger.info("Saving source configuration")
logger.debug("Source configuration properties: #{config.export}")
success = yield(method_config) if block_given?
end
else
logger.warn("Can not save source configuration")
end
success
end
#---
def remove(options = {})
method_config = Config.ensure(options)
success = false
if can_persist?
if extension_check(:delete, { :config => method_config })
logger.info("Removing source configuration")
config.clear
success = yield(method_config) if block_given?
end
else
logger.warn("Can not remove source configuration")
end
success
end
#---
def attach(type, name, data, options = {})
method_config = Config.ensure(options)
new_location = nil
if can_persist?
if extension_check(:attach, { :config => method_config })
logger.info("Attaching data to source configuration")
new_location = yield(method_config) if block_given?
end
else
logger.warn("Can not attach data to source configuration")
end
new_location
end
#---
def delete_attachments(type, ids, options = {})
method_config = Config.ensure(options)
locations = []
if can_persist?
if extension_check(:remove_attachments, { :config => method_config })
logger.info("Removing attached data from source configuration")
locations = yield(method_config) if block_given?
end
else
logger.warn("Can not remove attached data from source configuration")
end
locations
end
end
end
end
Adding cache load and save to the base configuration plugin load and save methods.
module CORL
module Plugin
class Configuration < Nucleon.plugin_class(:nucleon, :base)
include Mixin::SubConfig
#---
def self.register_ids
[ :name, :directory ]
end
#-----------------------------------------------------------------------------
# Configuration plugin interface
def normalize(reload)
super
logger.debug("Initializing source sub configuration")
init_subconfig(true) unless reload
logger.info("Setting source configuration project")
@project = CORL.project(extended_config(:project, {
:directory => _delete(:directory, Dir.pwd),
:url => _delete(:url),
:revision => _delete(:revision),
:create => _delete(:create, false),
:pull => true,
:internal_ip => CORL.public_ip, # Needed for seeding Vagrant VMs
:manage_ignore => _delete(:manage_ignore, true),
:new => true
}), _delete(:project_provider, nil)) unless reload
_init(:autoload, true)
_init(:autosave, false)
yield if block_given?
set_location(@project)
end
#---
def remove_plugin
CORL.remove_plugin(@project)
end
#-----------------------------------------------------------------------------
# Checks
def can_persist?
project.can_persist?
end
#-----------------------------------------------------------------------------
# Property accessors / modifiers
def project
@project
end
#---
def directory
project.directory
end
#---
def cache
project.cache
end
#---
def ignore(files)
project.ignore(files)
end
#---
def autoload(default = false)
_get(:autoload, default)
end
def autoload=autoload
_set(:autoload, test(autoload))
end
#---
def autosave(default = false)
_get(:autosave, default)
end
def autosave=autosave
_set(:autosave, test(autosave))
end
#-----------------------------------------------------------------------------
def set_location(directory)
if directory && directory.is_a?(CORL::Plugin::Project)
logger.debug("Setting source project directory from other project at #{directory.directory}")
project.set_location(directory.directory)
elsif directory && directory.is_a?(String) || directory.is_a?(Symbol)
logger.debug("Setting source project directory to #{directory}")
project.set_location(directory.to_s)
end
end
#-----------------------------------------------------------------------------
def set(keys, value = '', options = {})
super(keys, value, true)
save(options) if initialized? && autosave
end
#---
def delete(keys, options = {})
super(keys)
save(options) if initialized? && autosave
end
#---
def clear(options = {})
super
save(options) if initialized? && autosave
end
#-----------------------------------------------------------------------------
def remote(name)
project.remote(name)
end
#---
def set_remote(name, location)
project.set_remote(name, location)
end
#-----------------------------------------------------------------------------
# Import / Export
def import(properties, options = {})
super(properties, options)
save(options) if autosave
end
#-----------------------------------------------------------------------------
# Configuration loading / saving
def load(options = {})
method_config = Config.ensure(options)
success = false
if can_persist?
if extension_check(:load, { :config => method_config })
logger.info("Loading source configuration")
config.clear if method_config.get(:override, false)
properties = Config.new({}, {}, true, false)
success = yield(method_config, properties) if block_given?
if success && ! properties.export.empty?
logger.debug("Source configuration parsed properties: #{properties}")
extension(:load_process, { :properties => properties, :config => method_config })
config.import(properties, method_config)
end
end
success = cache.load if success
else
logger.warn("Loading of source configuration failed")
end
success
end
#---
def save(options = {})
method_config = Config.ensure(options)
success = false
if can_persist?
if extension_check(:save, { :config => method_config })
logger.info("Saving source configuration")
logger.debug("Source configuration properties: #{config.export}")
success = yield(method_config) if block_given?
end
success = cache.save if success
else
logger.warn("Can not save source configuration")
end
success
end
#---
def remove(options = {})
method_config = Config.ensure(options)
success = false
if can_persist?
if extension_check(:delete, { :config => method_config })
logger.info("Removing source configuration")
config.clear
success = yield(method_config) if block_given?
end
else
logger.warn("Can not remove source configuration")
end
success
end
#---
def attach(type, name, data, options = {})
method_config = Config.ensure(options)
new_location = nil
if can_persist?
if extension_check(:attach, { :config => method_config })
logger.info("Attaching data to source configuration")
new_location = yield(method_config) if block_given?
end
else
logger.warn("Can not attach data to source configuration")
end
new_location
end
#---
def delete_attachments(type, ids, options = {})
method_config = Config.ensure(options)
locations = []
if can_persist?
if extension_check(:remove_attachments, { :config => method_config })
logger.info("Removing attached data from source configuration")
locations = yield(method_config) if block_given?
end
else
logger.warn("Can not remove attached data from source configuration")
end
locations
end
end
end
end
|
require 'active_support/inflector'
require 'active_support/core_ext/string'
module DecentExposure
class Inflector
attr_reader :string, :original
alias name string
def initialize(name)
@original = name
@string = name.to_s.demodulize
end
def constant(context=Object)
case original
when Module, Class
original
else
ConstantResolver.new(context, string.classify).constant
end
end
def parameter
singular + "_id"
end
def singular
@singular ||= string.singularize.parameterize
end
def plural
string.pluralize
end
alias collection plural
def plural?
plural == string && !uncountable?
end
def uncountable?
plural == singular
end
private
ConstantResolver = Struct.new :context, :constant_name do
def constant
immediate_child || namespace_qualified
end
private
def immediate_child
context.constants.map do |c|
context.const_get(c) if c.to_s == constant_name
end.compact.first
end
def namespace_qualified
context.to_s.deconstantize.constantize.const_get(constant_name)
end
end
end
end
Reimplement 'deconstantize'
This method was introduced in Rails 3.2
require 'active_support/inflector'
require 'active_support/core_ext/string'
module DecentExposure
class Inflector
attr_reader :string, :original
alias name string
def initialize(name)
@original = name
@string = name.to_s.demodulize
end
def constant(context=Object)
case original
when Module, Class
original
else
ConstantResolver.new(context, string.classify).constant
end
end
def parameter
singular + "_id"
end
def singular
@singular ||= string.singularize.parameterize
end
def plural
string.pluralize
end
alias collection plural
def plural?
plural == string && !uncountable?
end
def uncountable?
plural == singular
end
private
ConstantResolver = Struct.new :context, :constant_name do
def constant
immediate_child || namespace_qualified
end
private
def immediate_child
context.constants.map do |c|
context.const_get(c) if c.to_s == constant_name
end.compact.first
end
def namespace_qualified
namespace.const_get(constant_name)
end
def namespace
path = context.to_s
path[0...(path.rindex('::') || 0)].constantize
end
end
end
end
|
# This file is part of Metasm, the Ruby assembly manipulation suite
# Copyright (C) 2006-2009 Yoann GUILLOT
#
# Licence is LGPL, see LICENCE in the top-level directory
require 'metasm/main'
require 'metasm/parse_c'
module Metasm
module C
class Parser
def precompile
@toplevel.precompile(Compiler.new(self))
self
end
end
# each CPU defines a subclass of this one
class Compiler
# an ExeFormat (mostly used for unique label creation)
attr_accessor :exeformat
# the C Parser (destroyed by compilation)
attr_accessor :parser
# an array of assembler statements (strings)
attr_accessor :source
# list of unique labels generated (to recognize user-defined ones)
attr_accessor :auto_label_list
attr_accessor :curexpr
# allows 'raise self' (eg struct.offsetof)
def exception(msg='EOF unexpected')
ParseError.new "near #@curexpr: #{msg}"
end
# creates a new CCompiler from an ExeFormat and a C Parser
def initialize(parser, exeformat=ExeFormat.new, source=[])
@parser, @exeformat, @source = parser, exeformat, source
@auto_label_list = {}
end
def new_label(base='')
lbl = @exeformat.new_label base
@auto_label_list[lbl] = true
lbl
end
def toplevel ; @parser.toplevel end
def typesize ; @parser.typesize end
def sizeof(*a) @parser.sizeof(*a) end
# compiles the c parser toplevel to assembler statements in self.source (::Array of ::String)
#
# starts by precompiling parser.toplevel (destructively):
# static symbols are converted to toplevel ones, as nested functions
# uses an ExeFormat (the argument) to create unique label/variable names
#
# remove typedefs/enums
# CExpressions: all expr types are converted to __int8/__int16/__int32/__int64 (sign kept) (incl. ptr), + void
# struct member dereference/array indexes are converted to *(ptr + off)
# coma are converted to 2 statements, ?: are converted to If
# :|| and :&& are converted to If + assignment to temporary
# immediate quotedstrings/floats are converted to references to const static toplevel
# postincrements are replaced by a temporary (XXX arglist)
# compound statements are unnested
# Asm are kept (TODO precompile clobber types)
# Declarations: initializers are converted to separate assignment CExpressions
# Blocks are kept unless empty
# structure dereferences/array indexing are converted to *(ptr + offset)
# While/For/DoWhile/Switch are converted to If/Goto
# Continue/Break are converted to Goto
# Cases are converted to Labels during Switch conversion
# Label statements are removed
# Return: 'return <foo>;' => 'return <foo>; goto <end_of_func>;', 'return;' => 'goto <eof>;'
# If: 'if (a) b; else c;' => 'if (a) goto l1; { c; }; goto l2; l1: { b; } l2:'
# && and || in condition are expanded to multiple If
# functions returning struct are precompiled (in Declaration/CExpression/Return)
#
# in a second phase, unused labels are removed from functions, as noop goto (goto x; x:)
# dead code is removed ('goto foo; bar; baz:' => 'goto foo; baz:') (TODO)
#
# after that, toplevel is no longer valid C (bad types, blocks moved...)
#
# then toplevel statements are sorted (.text, .data, .rodata, .bss) and compiled into asm statements in self.source
#
# returns the asm source in a single string
def compile
cf = @exeformat.unique_labels_cache.keys & @auto_label_list.keys
raise "compile_c name conflict: #{cf.inspect}" if not cf.empty?
@exeformat.unique_labels_cache.update @auto_label_list
@parser.toplevel.precompile(self)
# reorder statements (arrays of Variables) following exe section typical order
funcs, rwdata, rodata, udata = [], [], [], []
@parser.toplevel.statements.each { |st|
if st.kind_of? Asm
@source << st.body
next
end
raise 'non-declaration at toplevel! ' + st.inspect if not st.kind_of? Declaration
v = st.var
if v.type.kind_of? Function
funcs << v if v.initializer # no initializer == storage :extern
elsif v.storage == :extern
elsif v.initializer
if v.type.qualifier.to_a.include?(:const) or
(v.type.kind_of? Array and v.type.type.qualifier.to_a.include?(:const))
rodata << v
else
rwdata << v
end
else
udata << v
end
}
if not funcs.empty?
@exeformat.compile_setsection @source, '.text'
funcs.each { |func| c_function(func) }
c_program_epilog
end
align = 1
if not rwdata.empty?
@exeformat.compile_setsection @source, '.data'
rwdata.each { |data| align = c_idata(data, align) }
end
if not rodata.empty?
@exeformat.compile_setsection @source, '.rodata'
rodata.each { |data| align = c_idata(data, align) }
end
if not udata.empty?
@exeformat.compile_setsection @source, '.bss'
udata.each { |data| align = c_udata(data, align) }
end
# needed to allow asm parser to use our autogenerated label names
@exeformat.unique_labels_cache.delete_if { |k, v| @auto_label_list[k] }
@source.join("\n")
end
# compiles a C function +func+ to asm source into the array of strings +str+
# in a first pass the stack variable offsets are computed,
# then each statement is compiled in turn
def c_function(func)
# must wait the Declaration to run the CExpr for dynamic auto offsets,
# and must run those statements once only
# TODO alloc a stack variable to maintain the size for each dynamic array
# TODO offset of arguments
# TODO nested function
c_init_state(func)
# hide the full @source while compiling, then add prolog/epilog (saves 1 pass)
@source << '' << "#{func.name}:"
presource, @source = @source, []
c_block(func.initializer)
tmpsource, @source = @source, presource
c_prolog
@source.concat tmpsource
c_epilog
@source << ''
end
def c_block(blk)
c_block_enter(blk)
blk.statements.each { |stmt|
case stmt
when CExpression; c_cexpr(stmt)
when Declaration; c_decl(stmt.var)
when If; c_ifgoto(stmt.test, stmt.bthen.target)
when Goto; c_goto(stmt.target)
when Label; c_label(stmt.name)
when Return; c_return(stmt.value)
when Asm; c_asm(stmt)
when Block; c_block(stmt)
else raise
end
}
c_block_exit(blk)
end
def c_block_enter(blk)
end
def c_block_exit(blk)
end
def c_label(name)
@source << "#{name}:"
end
# fills @state.offset (empty hash)
# automatic variable => stack offset, (recursive)
# offset is an ::Integer or a CExpression (dynamic array)
# assumes offset 0 is a ptr-size-aligned address
# TODO registerize automatic variables
def c_reserve_stack(block, off = 0)
block.statements.each { |stmt|
case stmt
when Declaration
next if stmt.var.type.kind_of? Function
off = c_reserve_stack_var(stmt.var, off)
@state.offset[stmt.var] = off
when Block
c_reserve_stack(stmt, off)
# do not update off, not nested subblocks can overlap
end
}
end
# computes the new stack offset for var
# off is either an offset from stack start (:ptr-size-aligned) or
# a CExpression [[[expr, +, 7], &, -7], +, off]
def c_reserve_stack_var(var, off)
if (arr_type = var.type).kind_of? Array and (arr_sz = arr_type.length).kind_of? CExpression
# dynamic array !
arr_sz = CExpression.new(arr_sz, :*, sizeof(nil, arr_type.type),
BaseType.new(:long, :unsigned)).precompile_inner(@parser, nil)
off = CExpression.new(arr_sz, :+, off, arr_sz.type)
off = CExpression.new(off, :+, 7, off.type)
off = CExpression.new(off, :&, -7, off.type)
CExpression.new(off, :+, 0, off.type)
else
al = var.type.align(@parser)
sz = sizeof(var)
case off
when CExpression; CExpression.new(off.lexpr, :+, ((off.rexpr + sz + al - 1) / al * al), off.type)
else (off + sz + al - 1) / al * al
end
end
end
# here you can add thing like stubs for PIC code
def c_program_epilog
end
# compiles a C static data definition into an asm string
# returns the new alignment value
def c_idata(data, align)
w = data.type.align(@parser)
@source << ".align #{align = w}" if w > align
@source << data.name.dup
len = c_idata_inner(data.type, data.initializer)
len %= w
len == 0 ? w : len
end
# dumps an anonymous variable definition, appending to the last line of source
# source.last is a label name or is empty before calling here
# return the length of the data written
def c_idata_inner(type, value)
case type
when BaseType
value ||= 0
if type.name == :void
@source.last << ':' if not @source.last.empty?
return 0
end
@source.last <<
case type.name
when :__int8; ' db '
when :__int16; ' dw '
when :__int32; ' dd '
when :__int64; ' dq '
when :ptr; " d#{%w[x b w x d x x x q][@parser.typesize[type.name]]} "
when :float; ' df ' # TODO
when :double; ' dfd '
when :longdouble; ' dfld '
else raise "unknown idata type #{type.inspect} #{value.inspect}"
end
@source.last << c_idata_inner_cexpr(value)
@parser.typesize[type.name]
when Struct
value ||= []
@source.last << ':' if not @source.last.empty?
# could .align here, but if there is our label name just before, it should have been .aligned too..
raise "unknown struct initializer #{value.inspect}" if not value.kind_of? ::Array
sz = 0
type.members.zip(value).each { |m, v|
if m.name and wsz = type.offsetof(@parser, m.name) and sz < wsz
@source << "db #{wsz-sz} dup(?)"
end
@source << ''
flen = c_idata_inner(m.type, v)
sz += flen
}
sz
when Union
value ||= []
@source.last << ':' if not @source.last.empty?
len = sizeof(nil, type)
raise "unknown union initializer #{value.inspect}" if not value.kind_of? ::Array
idx = value.rindex(value.compact.last) || 0
raise "empty union initializer" if not idx
wlen = c_idata_inner(type.members[idx].type, value[idx])
@source << "db #{'0' * (len - wlen) * ', '}" if wlen < len
len
when Array
value ||= []
if value.kind_of? CExpression and not value.op and value.rexpr.kind_of? ::String
elen = sizeof(nil, value.type.type)
@source.last <<
case elen
when 1; ' db '
when 2; ' dw '
else raise 'bad char* type ' + value.inspect
end << value.rexpr.inspect
len = type.length || (value.rexpr.length+1)
if len > value.rexpr.length
@source.last << (', 0' * (len - value.rexpr.length))
end
elen * len
elsif value.kind_of? ::Array
@source.last << ':' if not @source.last.empty?
len = type.length || value.length
value.each { |v|
@source << ''
c_idata_inner(type.type, v)
}
len -= value.length
if len > 0
@source << " db #{len * sizeof(nil, type.type)} dup(0)"
end
sizeof(nil, type.type) * len
else raise "unknown static array initializer #{value.inspect}"
end
end
end
def c_idata_inner_cexpr(expr)
expr = expr.reduce(@parser) if expr.kind_of? CExpression
case expr
when ::Integer; (expr >= 4096) ? ('0x%X' % expr) : expr.to_s
when ::Numeric; expr.to_s
when Variable
case expr.type
when Array; expr.name
else c_idata_inner_cexpr(expr.initializer)
end
when CExpression
if not expr.lexpr
case expr.op
when :&
case expr.rexpr
when Variable; expr.rexpr.name
else raise 'unhandled addrof in initializer ' + expr.rexpr.inspect
end
#when :*
when :+; c_idata_inner_cexpr(expr.rexpr)
when :-; ' -' << c_idata_inner_cexpr(expr.rexpr)
when nil
e = c_idata_inner_cexpr(expr.rexpr)
if expr.rexpr.kind_of? CExpression
e = '(' << e << " & 0#{'ff'*sizeof(expr)}h)"
end
e
else raise 'unhandled initializer expr ' + expr.inspect
end
else
case expr.op
when :+, :-, :*, :/, :%, :<<, :>>, :&, :|, :^
e = '(' << c_idata_inner_cexpr(expr.lexpr) <<
expr.op.to_s << c_idata_inner_cexpr(expr.rexpr) << ')'
if expr.type.integral?
# db are unsigned
e = '(' << e << " & 0#{'ff'*sizeof(expr)}h)"
end
e
#when :'.'
#when :'->'
#when :'[]'
else raise 'unhandled initializer expr ' + expr.inspect
end
end
else raise 'unhandled initializer ' + expr.inspect
end
end
def c_udata(data, align)
@source << "#{data.name} "
@source.last <<
case data.type
when BaseType
len = @parser.typesize[data.type.name]
case data.type.name
when :__int8; 'db ?'
when :__int16; 'dw ?'
when :__int32; 'dd ?'
when :__int64; 'dq ?'
else "db #{len} dup(?)"
end
else
len = sizeof(data)
"db #{len} dup(?)"
end
len %= align
len == 0 ? align : len
end
def check_reserved_name(var)
end
end
class Statement
# all Statements/Declaration must define a precompile(compiler, scope) method
# it must append itself to scope.statements
# turns a statement into a new block
def precompile_make_block(scope)
b = Block.new scope
b.statements << self
b
end
end
class Block
# precompile all statements, then simplifies symbols/structs types
def precompile(compiler, scope=nil)
stmts = @statements.dup
@statements.clear
stmts.each { |st|
compiler.curexpr = st
st.precompile(compiler, self)
}
# cleanup declarations
@symbol.delete_if { |n, s| not s.kind_of? Variable }
@struct.delete_if { |n, s| not s.kind_of? Union }
@symbol.each_value { |var|
CExpression.precompile_type(compiler, self, var, true)
}
@struct.each_value { |var|
next if not var.members
var.members.each { |m|
CExpression.precompile_type(compiler, self, m, true)
}
}
scope.statements << self if scope and not @statements.empty?
end
# removes unused labels, and in-place goto (goto toto; toto:)
def precompile_optimize
list = []
precompile_optimize_inner(list, 1)
precompile_optimize_inner(list, 2)
end
# step 1: list used labels/unused goto
# step 2: remove unused labels
def precompile_optimize_inner(list, step)
lastgoto = nil
hadref = false
walk = lambda { |expr|
next if not expr.kind_of? CExpression
# gcc's unary && support
if not expr.op and not expr.lexpr and expr.rexpr.kind_of? Label
list << expr.rexpr.name
else
walk[expr.lexpr]
if expr.rexpr.kind_of? ::Array
expr.rexpr.each { |r| walk[r] }
else
walk[expr.rexpr]
end
end
}
@statements.dup.each { |s|
lastgoto = nil if not s.kind_of? Label
case s
when Block
s.precompile_optimize_inner(list, step)
@statements.delete s if step == 2 and s.statements.empty?
when CExpression; walk[s] if step == 1
when Label
case step
when 1
if lastgoto and lastgoto.target == s.name
list << lastgoto
list.delete s.name if not hadref
end
when 2; @statements.delete s if not list.include? s.name
end
when Goto, If
s.kind_of?(If) ? g = s.bthen : g = s
case step
when 1
hadref = list.include? g.target
lastgoto = g
list << g.target
when 2
if list.include? g
idx = @statements.index s
@statements.delete s
@statements[idx, 0] = s.test if s != g and not s.test.constant?
end
end
end
}
list
end
# noop
def precompile_make_block(scope) self end
def continue_label ; defined?(@continue_label) ? @continue_label : @outer.continue_label end
def continue_label=(l) @continue_label = l end
def break_label ; defined?(@break_label) ? @break_label : @outer.break_label end
def break_label=(l) @break_label = l end
def return_label ; defined?(@return_label) ? @return_label : @outer.return_label end
def return_label=(l) @return_label = l end
def nonauto_label=(l) @nonauto_label = l end
def nonauto_label ; defined?(@nonauto_label) ? @nonauto_label : @outer.nonauto_label end
def function ; defined?(@function) ? @function : @outer.function end
def function=(f) @function = f end
end
class Declaration
def precompile(compiler, scope)
if (@var.type.kind_of? Function and @var.initializer and scope != compiler.toplevel) or @var.storage == :static or compiler.check_reserved_name(@var)
# TODO fix label name in export table if __exported
scope.symbol.delete @var.name
old = @var.name
@var.name = compiler.new_label @var.name until @var.name != old
compiler.toplevel.symbol[@var.name] = @var
# TODO no pure inline if addrof(func) needed
compiler.toplevel.statements << self unless @var.attributes.to_a.include? 'inline'
else
scope.symbol[@var.name] ||= @var
appendme = true
end
if i = @var.initializer
if @var.type.kind_of? Function
if @var.type.type.kind_of? Struct
s = @var.type.type
v = Variable.new
v.name = compiler.new_label('return_struct_ptr')
v.type = Pointer.new(s)
CExpression.precompile_type(compiler, scope, v)
@var.type.args.unshift v
@var.type.type = v.type
end
i.function = @var
i.return_label = compiler.new_label('epilog')
i.nonauto_label = {}
i.precompile(compiler)
Label.new(i.return_label).precompile(compiler, i)
i.precompile_optimize
# append now so that static dependencies are declared before us
scope.statements << self if appendme and not @var.attributes.to_a.include? 'inline'
elsif scope != compiler.toplevel and @var.storage != :static
scope.statements << self if appendme
Declaration.precompile_dyn_initializer(compiler, scope, @var, @var.type, i)
@var.initializer = nil
else
scope.statements << self if appendme
@var.initializer = Declaration.precompile_static_initializer(compiler, @var.type, i)
end
else
scope.statements << self if appendme
end
end
# turns an initializer to CExpressions in scope.statements
def self.precompile_dyn_initializer(compiler, scope, var, type, init)
case type = type.untypedef
when Array
# XXX TODO type.length may be dynamic !!
case init
when CExpression
# char toto[] = "42"
if not init.kind_of? CExpression or init.op or init.lexpr or not init.rexpr.kind_of? ::String
raise "unknown initializer #{init.inspect} for #{var.inspect}"
end
init = init.rexpr.unpack('C*') + [0]
init.map! { |chr| CExpression.new(nil, nil, chr, type.type) }
precompile_dyn_initializer(compiler, scope, var, type, init)
when ::Array
type.length ||= init.length
# len is an Integer
init.each_with_index { |it, idx|
next if not it
break if idx >= type.length
idx = CExpression.new(nil, nil, idx, BaseType.new(:long, :unsigned))
v = CExpression.new(var, :'[]', idx, type.type)
precompile_dyn_initializer(compiler, scope, v, type.type, it)
}
else raise "unknown initializer #{init.inspect} for #{var.inspect}"
end
when Union
case init
when CExpression, Variable
if init.type.untypedef.kind_of? BaseType
# works for struct foo bar[] = {0}; ...
type.members.each { |m|
v = CExpression.new(var, :'.', m.name, m.type)
precompile_dyn_initializer(compiler, scope, v, v.type, init)
}
elsif init.type.untypedef.kind_of? type.class
CExpression.new(var, :'=', init, type).precompile(compiler, scope)
else
raise "bad initializer #{init.inspect} for #{var.inspect}"
end
when ::Array
init.each_with_index{ |it, idx|
next if not it
m = type.members[idx]
v = CExpression.new(var, :'.', m.name, m.type)
precompile_dyn_initializer(compiler, scope, v, m.type, it)
}
else raise "unknown initializer #{init.inspect} for #{var.inspect}"
end
else
case init
when CExpression
CExpression.new(var, :'=', init, type).precompile(compiler, scope)
else raise "unknown initializer #{init.inspect} for #{var.inspect}"
end
end
end
# returns a precompiled static initializer (eg string constants)
def self.precompile_static_initializer(compiler, type, init)
# TODO
case type = type.untypedef
when Array
if init.kind_of? ::Array
init.map { |i| precompile_static_initializer(compiler, type.type, i) }
else
init
end
when Union
if init.kind_of? ::Array
init.zip(type.members).map { |i, m| precompile_static_initializer(compiler, m.type, i) }
else
init
end
else
if init.kind_of? CExpression and init = init.reduce(compiler) and init.kind_of? CExpression
if not init.op and init.rexpr.kind_of? ::String
v = Variable.new
v.storage = :static
v.name = 'char_' + init.rexpr.tr('^a-zA-Z', '')[0, 8]
v.type = Array.new(type.type)
v.type.length = init.rexpr.length + 1
v.type.type.qualifier = [:const]
v.initializer = CExpression.new(nil, nil, init.rexpr, type)
Declaration.new(v).precompile(compiler, compiler.toplevel)
init.rexpr = v
end
init.rexpr = precompile_static_initializer(compiler, init.rexpr.type, init.rexpr) if init.rexpr.kind_of? CExpression
init.lexpr = precompile_static_initializer(compiler, init.lexpr.type, init.lexpr) if init.lexpr.kind_of? CExpression
end
init
end
end
end
class If
def precompile(compiler, scope)
expr = lambda { |e| e.kind_of?(CExpression) ? e : CExpression.new(nil, nil, e, e.type) }
if @bthen.kind_of? Goto or @bthen.kind_of? Break or @bthen.kind_of? Continue
# if () goto l; else b; => if () goto l; b;
if belse
t1 = @belse
@belse = nil
end
# need to convert user-defined Goto target !
@bthen.precompile(compiler, scope)
@bthen = scope.statements.pop # break => goto break_label
elsif belse
# if () a; else b; => if () goto then; b; goto end; then: a; end:
t1 = @belse
t2 = @bthen
l2 = compiler.new_label('if_then')
@bthen = Goto.new(l2)
@belse = nil
l3 = compiler.new_label('if_end')
else
# if () a; => if (!) goto end; a; end:
t1 = @bthen
l2 = compiler.new_label('if_end')
@bthen = Goto.new(l2)
@test = CExpression.negate(@test)
end
@test = expr[@test]
case @test.op
when :'&&'
# if (c1 && c2) goto a; => if (!c1) goto b; if (c2) goto a; b:
l1 = compiler.new_label('if_nand')
If.new(CExpression.negate(@test.lexpr), Goto.new(l1)).precompile(compiler, scope)
@test = expr[@test.rexpr]
precompile(compiler, scope)
when :'||'
l1 = compiler.new_label('if_or')
If.new(expr[@test.lexpr], Goto.new(@bthen.target)).precompile(compiler, scope)
@test = expr[@test.rexpr]
precompile(compiler, scope)
else
@test = CExpression.precompile_inner(compiler, scope, @test)
t = @test.reduce(compiler)
if t.kind_of? ::Integer
if t == 0
Label.new(l1, nil).precompile(compiler, scope) if l1
t1.precompile(compiler, scope) if t1
Label.new(l2, nil).precompile(compiler, scope) if l2
Label.new(l3, nil).precompile(compiler, scope) if l3
else
scope.statements << @bthen
Label.new(l1, nil).precompile(compiler, scope) if l1
Label.new(l2, nil).precompile(compiler, scope) if l2
t2.precompile(compiler, scope) if t2
Label.new(l3, nil).precompile(compiler, scope) if l3
end
return
end
scope.statements << self
end
Label.new(l1, nil).precompile(compiler, scope) if l1
t1.precompile(compiler, scope) if t1
Goto.new(l3).precompile(compiler, scope) if l3
Label.new(l2, nil).precompile(compiler, scope) if l2
t2.precompile(compiler, scope) if t2
Label.new(l3, nil).precompile(compiler, scope) if l3
end
end
class For
def precompile(compiler, scope)
if init
@init.precompile(compiler, scope)
scope = @init if @init.kind_of? Block
end
@body = @body.precompile_make_block scope
@body.continue_label = compiler.new_label 'for_continue'
@body.break_label = compiler.new_label 'for_break'
Label.new(@body.continue_label).precompile(compiler, scope)
if test
If.new(CExpression.negate(@test), Goto.new(@body.break_label)).precompile(compiler, scope)
end
@body.precompile(compiler, scope)
if iter
@iter.precompile(compiler, scope)
end
Goto.new(@body.continue_label).precompile(compiler, scope)
Label.new(@body.break_label).precompile(compiler, scope)
end
end
class While
def precompile(compiler, scope)
@body = @body.precompile_make_block scope
@body.continue_label = compiler.new_label('while_continue')
@body.break_label = compiler.new_label('while_break')
Label.new(@body.continue_label).precompile(compiler, scope)
If.new(CExpression.negate(@test), Goto.new(@body.break_label)).precompile(compiler, scope)
@body.precompile(compiler, scope)
Goto.new(@body.continue_label).precompile(compiler, scope)
Label.new(@body.break_label).precompile(compiler, scope)
end
end
class DoWhile
def precompile(compiler, scope)
@body = @body.precompile_make_block scope
@body.continue_label = compiler.new_label('dowhile_continue')
@body.break_label = compiler.new_label('dowhile_break')
loop_start = compiler.new_label('dowhile_start')
Label.new(loop_start).precompile(compiler, scope)
@body.precompile(compiler, scope)
Label.new(@body.continue_label).precompile(compiler, scope)
If.new(@test, Goto.new(loop_start)).precompile(compiler, scope)
Label.new(@body.break_label).precompile(compiler, scope)
end
end
class Switch
def precompile(compiler, scope)
var = Variable.new
var.storage = :register
var.name = compiler.new_label('switch')
var.type = @test.type
var.initializer = @test
CExpression.precompile_type(compiler, scope, var)
Declaration.new(var).precompile(compiler, scope)
@body = @body.precompile_make_block scope
@body.break_label = compiler.new_label('switch_break')
@body.precompile(compiler)
default = @body.break_label
# recursive lambda to change Case to Labels
# dynamically creates the If sequence
walk = lambda { |blk|
blk.statements.each_with_index { |s, i|
case s
when Case
label = compiler.new_label('case')
if s.expr == 'default'
default = label
elsif s.exprup
If.new(CExpression.new(CExpression.new(var, :'>=', s.expr, BaseType.new(:int)), :'&&',
CExpression.new(var, :'<=', s.exprup, BaseType.new(:int)),
BaseType.new(:int)), Goto.new(label)).precompile(compiler, scope)
else
If.new(CExpression.new(var, :'==', s.expr, BaseType.new(:int)),
Goto.new(label)).precompile(compiler, scope)
end
blk.statements[i] = Label.new(label)
when Block
walk[s]
end
}
}
walk[@body]
Goto.new(default).precompile(compiler, scope)
scope.statements << @body
Label.new(@body.break_label).precompile(compiler, scope)
end
end
class Continue
def precompile(compiler, scope)
Goto.new(scope.continue_label).precompile(compiler, scope)
end
end
class Break
def precompile(compiler, scope)
Goto.new(scope.break_label).precompile(compiler, scope)
end
end
class Return
def precompile(compiler, scope)
if @value
@value = CExpression.new(nil, nil, @value, @value.type) if not @value.kind_of? CExpression
if @value.type.untypedef.kind_of? Struct
@value = @value.precompile_inner(compiler, scope)
func = scope.function.type
CExpression.new(CExpression.new(nil, :*, func.args.first, @value.type), :'=', @value, @value.type).precompile(compiler, scope)
@value = func.args.first
else
# cast to function return type
@value = CExpression.new(nil, nil, @value, scope.function.type.type).precompile_inner(compiler, scope)
end
scope.statements << self
end
Goto.new(scope.return_label).precompile(compiler, scope)
end
end
class Label
def precompile(compiler, scope)
if name and (not compiler.auto_label_list[@name])
@name = scope.nonauto_label[@name] ||= compiler.new_label(@name)
end
scope.statements << self
if statement
@statement.precompile(compiler, scope)
@statement = nil
end
end
end
class Case
def precompile(compiler, scope)
@expr = CExpression.precompile_inner(compiler, scope, @expr)
@exprup = CExpression.precompile_inner(compiler, scope, @exprup) if exprup
super(compiler, scope)
end
end
class Goto
def precompile(compiler, scope)
if not compiler.auto_label_list[@target]
@target = scope.nonauto_label[@target] ||= compiler.new_label(@target)
end
scope.statements << self
end
end
class Asm
def precompile(compiler, scope)
scope.statements << self
# TODO CExpr.precompile_type(clobbers)
end
end
class CExpression
def precompile(compiler, scope)
i = precompile_inner(compiler, scope, false)
scope.statements << i if i
end
# changes obj.type to a precompiled type
# keeps struct/union, change everything else to __int\d
# except Arrays if declaration is true (need to know variable allocation sizes etc)
# returns the type
def self.precompile_type(compiler, scope, obj, declaration = false)
case t = obj.type.untypedef
when BaseType
case t.name
when :void
when :float, :double, :longdouble
else t = BaseType.new("__int#{compiler.typesize[t.name]*8}".to_sym, t.specifier)
end
when Array
if declaration; precompile_type(compiler, scope, t, declaration)
else t = BaseType.new("__int#{compiler.typesize[:ptr]*8}".to_sym, :unsigned)
end
when Pointer; t = BaseType.new("__int#{compiler.typesize[:ptr]*8}".to_sym, :unsigned)
when Enum; t = BaseType.new("__int#{compiler.typesize[:int]*8}".to_sym)
when Function
precompile_type(compiler, scope, t)
t.args ||= []
t.args.each { |a| precompile_type(compiler, scope, a) }
when Union
if declaration and t.members and not t.name # anonymous struct
t.members.each { |a| precompile_type(compiler, scope, a, true) }
end
else raise 'bad type ' + t.inspect
end
(t.qualifier ||= []).concat obj.type.qualifier if obj.type.qualifier and t != obj.type
(t.attributes ||= []).concat obj.type.attributes if obj.type.attributes and t != obj.type
while obj.type.kind_of? TypeDef
obj.type = obj.type.type
(t.qualifier ||= []).concat obj.type.qualifier if obj.type.qualifier and t != obj.type
(t.attributes ||= []).concat obj.type.attributes if obj.type.attributes and t != obj.type
end
obj.type = t
end
def self.precompile_inner(compiler, scope, expr, nested = true)
case expr
when CExpression; expr.precompile_inner(compiler, scope, nested)
else expr
end
end
# returns a new CExpression with simplified self.type, computes structure offsets
# turns char[]/float immediates to reference to anonymised const
# TODO 'a = b += c' => 'b += c; a = b' (use nested argument)
# TODO handle precompile_inner return nil
# TODO struct.bits
def precompile_inner(compiler, scope, nested = true)
case @op
when :'.'
# a.b => (&a)->b
lexpr = CExpression.precompile_inner(compiler, scope, @lexpr)
ll = lexpr
ll = lexpr.rexpr while ll.kind_of? CExpression and not ll.op
if ll.kind_of? CExpression and ll.op == :'*' and not ll.lexpr
# do not change lexpr.rexpr.type directly to a pointer, might retrigger (ptr+imm) => (ptr + imm*sizeof(*ptr))
@lexpr = CExpression.new(nil, nil, ll.rexpr, Pointer.new(lexpr.type))
else
@lexpr = CExpression.new(nil, :'&', lexpr, Pointer.new(lexpr.type))
end
@op = :'->'
precompile_inner(compiler, scope)
when :'->'
# a->b => *(a + off(b))
struct = @lexpr.type.untypedef.type.untypedef
lexpr = CExpression.precompile_inner(compiler, scope, @lexpr)
@lexpr = nil
@op = nil
if struct.kind_of? Struct and (off = struct.offsetof(compiler, @rexpr)) != 0
off = CExpression.new(nil, nil, off, BaseType.new(:int, :unsigned))
@rexpr = CExpression.new(lexpr, :'+', off, lexpr.type)
# ensure the (ptr + value) is not expanded to (ptr + value * sizeof(*ptr))
CExpression.precompile_type(compiler, scope, @rexpr)
else
# union or 1st struct member
@rexpr = lexpr
end
if @type.kind_of? Array # Array member type is already an address
else
@rexpr = CExpression.new(nil, :*, @rexpr, @rexpr.type)
end
precompile_inner(compiler, scope)
when :'[]'
rexpr = CExpression.precompile_inner(compiler, scope, @rexpr)
if rexpr.kind_of? CExpression and not rexpr.op and rexpr.rexpr == 0
@rexpr = @lexpr
else
@rexpr = CExpression.new(@lexpr, :'+', rexpr, @lexpr.type)
end
@op = :'*'
@lexpr = nil
precompile_inner(compiler, scope)
when :'?:'
# cannot precompile in place, a conditionnal expression may have a coma: must turn into If
if @lexpr.kind_of? CExpression
@lexpr = @lexpr.precompile_inner(compiler, scope)
if not @lexpr.lexpr and not @lexpr.op and @lexpr.rexpr.kind_of? ::Numeric
if @lexpr.rexpr == 0
e = @rexpr[1]
else
e = @rexpr[0]
end
e = CExpression.new(nil, nil, e, e.type) if not e.kind_of? CExpression
return e.precompile_inner(compiler, scope)
end
end
raise 'conditional in toplevel' if scope == compiler.toplevel # just in case
var = Variable.new
var.storage = :register
var.name = compiler.new_label('ternary')
var.type = @rexpr[0].type
CExpression.precompile_type(compiler, scope, var)
Declaration.new(var).precompile(compiler, scope)
If.new(@lexpr, CExpression.new(var, :'=', @rexpr[0], var.type), CExpression.new(var, :'=', @rexpr[1], var.type)).precompile(compiler, scope)
@lexpr = nil
@op = nil
@rexpr = var
precompile_inner(compiler, scope)
when :'&&'
if scope == compiler.toplevel
@lexpr = CExpression.precompile_inner(compiler, scope, @lexpr)
@rexpr = CExpression.precompile_inner(compiler, scope, @rexpr)
CExpression.precompile_type(compiler, scope, self)
self
else
var = Variable.new
var.storage = :register
var.name = compiler.new_label('and')
var.type = @type
CExpression.precompile_type(compiler, scope, var)
var.initializer = CExpression.new(nil, nil, 0, var.type)
Declaration.new(var).precompile(compiler, scope)
l = @lexpr.kind_of?(CExpression) ? @lexpr : CExpression.new(nil, nil, @lexpr, @lexpr.type)
r = @rexpr.kind_of?(CExpression) ? @rexpr : CExpression.new(nil, nil, @rexpr, @rexpr.type)
If.new(l, If.new(r, CExpression.new(var, :'=', CExpression.new(nil, nil, 1, var.type), var.type))).precompile(compiler, scope)
@lexpr = nil
@op = nil
@rexpr = var
precompile_inner(compiler, scope)
end
when :'||'
if scope == compiler.toplevel
@lexpr = CExpression.precompile_inner(compiler, scope, @lexpr)
@rexpr = CExpression.precompile_inner(compiler, scope, @rexpr)
CExpression.precompile_type(compiler, scope, self)
self
else
var = Variable.new
var.storage = :register
var.name = compiler.new_label('or')
var.type = @type
CExpression.precompile_type(compiler, scope, var)
var.initializer = CExpression.new(nil, nil, 1, var.type)
Declaration.new(var).precompile(compiler, scope)
l = @lexpr.kind_of?(CExpression) ? @lexpr : CExpression.new(nil, nil, @lexpr, @lexpr.type)
l = CExpression.new(nil, :'!', l, var.type)
r = @rexpr.kind_of?(CExpression) ? @rexpr : CExpression.new(nil, nil, @rexpr, @rexpr.type)
r = CExpression.new(nil, :'!', r, var.type)
If.new(l, If.new(r, CExpression.new(var, :'=', CExpression.new(nil, nil, 0, var.type), var.type))).precompile(compiler, scope)
@lexpr = nil
@op = nil
@rexpr = var
precompile_inner(compiler, scope)
end
when :funcall
if @lexpr.kind_of? Variable and @lexpr.type.kind_of? Function and @lexpr.attributes and @lexpr.attributes.include? 'inline' and @lexpr.initializer
# TODO check recursive call (direct or indirect)
raise 'inline varargs unsupported' if @lexpr.type.varargs
rtype = @lexpr.type.type.untypedef
if not rtype.kind_of? BaseType or rtype.name != :void
rval = Variable.new
rval.name = compiler.new_label('inline_return')
rval.type = @lexpr.type.type
Declaration.new(rval).precompile(compiler, scope)
end
inline_label = {}
locals = @lexpr.type.args.zip(@rexpr).inject({}) { |h, (fa, a)|
h.update fa => CExpression.new(nil, nil, a, fa.type).precompile_inner(compiler, scope)
}
copy_inline_ce = lambda { |ce|
case ce
when CExpression; CExpression.new(copy_inline_ce[ce.lexpr], ce.op, copy_inline_ce[ce.rexpr], ce.type)
when Variable; locals[ce] || ce
when ::Array; ce.map { |e_| copy_inline_ce[e_] }
else ce
end
}
copy_inline = lambda { |stmt, scp|
case stmt
when Block
b = Block.new(scp)
stmt.statements.each { |s|
s = copy_inline[s, b]
b.statements << s if s
}
b
when If; If.new(copy_inline_ce[stmt.test], copy_inline[stmt.bthen, scp]) # re-precompile ?
when Label; Label.new(inline_label[stmt.name] ||= compiler.new_label('inline_'+stmt.name))
when Goto; Goto.new(inline_label[stmt.target] ||= compiler.new_label('inline_'+stmt.target))
when Return; CExpression.new(rval, :'=', copy_inline_ce[stmt.value], rval.type).precompile_inner(compiler, scp) if stmt.value
when CExpression; copy_inline_ce[stmt]
when Declaration
nv = stmt.var.dup
if nv.type.kind_of? Array and nv.type.length.kind_of? CExpression
nv.type = Array.new(nv.type.type, copy_inline_ce[nv.type.length]) # XXX nested dynamic?
end
locals[stmt.var] = nv
scp.symbol[nv.name] = nv
Declaration.new(nv)
else raise 'unexpected inline statement ' + stmt.inspect
end
}
scope.statements << copy_inline[@lexpr.initializer, scope] # body already precompiled
CExpression.new(nil, nil, rval, rval.type).precompile_inner(compiler, scope)
elsif @type.kind_of? Struct
var = Variable.new
var.name = compiler.new_label('return_struct')
var.type = @type
Declaration.new(var).precompile(compiler, scope)
@rexpr.unshift CExpression.new(nil, :&, var, Pointer.new(var.type))
var2 = Variable.new
var2.name = compiler.new_label('return_struct_ptr')
var2.type = Pointer.new(@type)
var2.storage = :register
CExpression.precompile_type(compiler, scope, var2)
Declaration.new(var2).precompile(compiler, scope)
@type = var2.type
CExpression.new(var2, :'=', self, var2.type).precompile(compiler, scope)
CExpression.new(nil, :'*', var2, var.type).precompile_inner(compiler, scope)
else
t = @lexpr.type.untypedef
t = t.type.untypedef if t.pointer?
@lexpr = CExpression.precompile_inner(compiler, scope, @lexpr)
types = t.args.map { |a| a.type }
# cast args to func prototype
@rexpr.map! { |e_| (types.empty? ? e_ : CExpression.new(nil, nil, e_, types.shift)).precompile_inner(compiler, scope) }
CExpression.precompile_type(compiler, scope, self)
self
end
when :','
lexpr = @lexpr.kind_of?(CExpression) ? @lexpr : CExpression.new(nil, nil, @lexpr, @lexpr.type)
rexpr = @rexpr.kind_of?(CExpression) ? @rexpr : CExpression.new(nil, nil, @rexpr, @rexpr.type)
lexpr.precompile(compiler, scope)
rexpr.precompile_inner(compiler, scope)
when :'!'
CExpression.precompile_type(compiler, scope, self)
if @rexpr.kind_of?(CExpression)
case @rexpr.op
when :'<', :'>', :'<=', :'>=', :'==', :'!='
@op = { :'<' => :'>=', :'>' => :'<=', :'<=' => :'>', :'>=' => :'<',
:'==' => :'!=', :'!=' => :'==' }[@rexpr.op]
@lexpr = @rexpr.lexpr
@rexpr = @rexpr.rexpr
precompile_inner(compiler, scope)
when :'&&', :'||'
@op = { :'&&' => :'||', :'||' => :'&&' }[@rexpr.op]
@lexpr = CExpression.new(nil, :'!', @rexpr.lexpr, @type)
@rexpr = CExpression.new(nil, :'!', @rexpr.rexpr, @type)
precompile_inner(compiler, scope)
when :'!'
if @rexpr.rexpr.kind_of? CExpression
@op = nil
@rexpr = @rexpr.rexpr
else
@op = :'!='
@lexpr = @rexpr.rexpr
@rexpr = CExpression.new(nil, nil, 0, @lexpr.type)
end
precompile_inner(compiler, scope)
else
@rexpr = CExpression.precompile_inner(compiler, scope, @rexpr)
self
end
else
@rexpr = CExpression.precompile_inner(compiler, scope, @rexpr)
self
end
when :'++', :'--'
if not @rexpr
var = Variable.new
var.storage = :register
var.name = compiler.new_label('postincrement')
var.type = @type
Declaration.new(var).precompile(compiler, scope)
CExpression.new(var, :'=', @lexpr, @type).precompile(compiler, scope)
CExpression.new(nil, @op, @lexpr, @type).precompile(compiler, scope)
@lexpr = nil
@op = nil
@rexpr = var
precompile_inner(compiler, scope)
elsif @type.pointer? and compiler.sizeof(nil, @type.untypedef.type.untypedef) != 1
# ++ptr => ptr += sizeof(*ptr) (done in += precompiler)
@op = { :'++' => :'+=', :'--' => :'-=' }[@op]
@lexpr = @rexpr
@rexpr = CExpression.new(nil, nil, 1, BaseType.new(:ptr, :unsigned))
precompile_inner(compiler, scope)
else
CExpression.precompile_type(compiler, scope, self)
@rexpr = CExpression.precompile_inner(compiler, scope, @rexpr)
self
end
when :'='
# handle structure assignment/array assignment
case @lexpr.type.untypedef
when Union
# rexpr may be a :funcall
@rexpr = CExpression.precompile_inner(compiler, scope, @rexpr)
@lexpr.type.untypedef.members.zip(@rexpr.type.untypedef.members) { |m1, m2|
# assume m1 and m2 are compatible
v1 = CExpression.new(@lexpr, :'.', m1.name, m1.type)
v2 = CExpression.new(@rexpr, :'.', m2.name, m1.type)
CExpression.new(v1, :'=', v2, v1.type).precompile(compiler, scope)
}
# (foo = bar).toto
@op = nil
@rexpr = @lexpr
@lexpr = nil
@type = @rexpr.type
precompile_inner(compiler, scope) if nested
when Array
if not len = @lexpr.type.untypedef.length
@rexpr = CExpression.precompile_inner(compiler, scope, @rexpr)
# char toto[] = "bla"
if @rexpr.kind_of? CExpression and not @rexpr.lexpr and not @rexpr.op and
@rexpr.rexpr.kind_of? Variable and @rexpr.rexpr.type.kind_of? Array
len = @rexpr.rexpr.type.length
end
end
raise 'array initializer with no length !' if not len
# TODO optimize...
len.times { |i|
i = CExpression.new(nil, nil, i, BaseType.new(:long, :unsigned))
v1 = CExpression.new(@lexpr, :'[]', i, @lexpr.type.untypedef.type)
v2 = CExpression.new(@rexpr, :'[]', i, v1.type)
CExpression.new(v1, :'=', v2, v1.type).precompile(compiler, scope)
}
@op = nil
@rexpr = @lexpr
@lexpr = nil
@type = @rexpr.type
precompile_inner(compiler, scope) if nested
else
@lexpr = CExpression.precompile_inner(compiler, scope, @lexpr)
@rexpr = CExpression.precompile_inner(compiler, scope, @rexpr)
CExpression.precompile_type(compiler, scope, self)
self
end
when nil
case @rexpr
when Block
# compound statements
raise 'compound statement in toplevel' if scope == compiler.toplevel # just in case
var = Variable.new
var.storage = :register
var.name = compiler.new_label('compoundstatement')
var.type = @type
CExpression.precompile_type(compiler, scope, var)
Declaration.new(var).precompile(compiler, scope)
if @rexpr.statements.last.kind_of? CExpression
@rexpr.statements[-1] = CExpression.new(var, :'=', @rexpr.statements[-1], var.type)
@rexpr.precompile(compiler, scope)
end
@rexpr = var
precompile_inner(compiler, scope)
when ::String
# char[] immediate
v = Variable.new
v.storage = :static
v.name = 'char_' + @rexpr.tr('^a-zA-Z', '')[0, 8]
v.type = Array.new(@type.type)
v.type.length = @rexpr.length + 1
v.type.type.qualifier = [:const]
v.initializer = CExpression.new(nil, nil, @rexpr, @type)
Declaration.new(v).precompile(compiler, scope)
@rexpr = v
precompile_inner(compiler, scope)
when ::Float
# float immediate
v = Variable.new
v.storage = :static
v.name = @type.untypedef.name.to_s
v.type = @type
v.type.qualifier = [:const]
v.initializer = CExpression.new(nil, nil, @rexpr, @type)
Declaration.new(v).precompile(compiler, scope)
@rexpr = CExpression.new(nil, :'*', v, Pointer.new(v.type))
precompile_inner(compiler, scope)
when CExpression
# simplify casts
@rexpr = @rexpr.precompile_inner(compiler, scope)
CExpression.precompile_type(compiler, scope, self)
if @type.kind_of? BaseType and @rexpr.type.kind_of? BaseType
if @rexpr.type == @type
# noop cast
@lexpr, @op, @rexpr = @rexpr.lexpr, @rexpr.op, @rexpr.rexpr
elsif not @rexpr.op and @type.integral? and @rexpr.type.integral?
if @rexpr.rexpr.kind_of? ::Numeric and (val = reduce(compiler)).kind_of? ::Numeric
@rexpr = val
elsif compiler.typesize[@type.name] < compiler.typesize[@rexpr.type.name]
# (char)(short)(int)(long)foo => (char)foo
@rexpr = @rexpr.rexpr
end
end
end
self
else
CExpression.precompile_type(compiler, scope, self)
self
end
else
# int+ptr => ptr+int
if @op == :+ and @lexpr and @lexpr.type.integral? and @rexpr.type.pointer?
@rexpr, @lexpr = @lexpr, @rexpr
end
# handle pointer + 2 == ((char *)pointer) + 2*sizeof(*pointer)
if @rexpr and [:'+', :'+=', :'-', :'-='].include? @op and
@type.pointer? and @rexpr.type.integral?
sz = compiler.sizeof(nil, @type.untypedef.type.untypedef)
if sz != 1
sz = CExpression.new(nil, nil, sz, @rexpr.type)
@rexpr = CExpression.new(@rexpr, :'*', sz, @rexpr.type)
end
end
# type promotion => cast
case @op
when :+, :-, :*, :/, :&, :|, :^, :%
if @lexpr
if @lexpr.type != @type
@lexpr = CExpression.new(nil, nil, @lexpr, @lexpr.type) if not @lexpr.kind_of? CExpression
@lexpr = CExpression.new(nil, nil, @lexpr, @type)
end
if @rexpr.type != @type
@rexpr = CExpression.new(nil, nil, @rexpr, @rexpr.type) if not @rexpr.kind_of? CExpression
@rexpr = CExpression.new(nil, nil, @rexpr, @type)
end
end
when :>>, :<<
# char => int
if @lexpr.type != @type
@lexpr = CExpression.new(nil, nil, @lexpr, @lexpr.type) if not @lexpr.kind_of? CExpression
@lexpr = CExpression.new(nil, nil, @lexpr, @type)
end
when :'+=', :'-=', :'*=', :'/=', :'&=', :'|=', :'^=', :'%='
if @rexpr.type != @lexpr.type
@rexpr = CExpression.new(nil, nil, @rexpr, @rexpr.type) if not @rexpr.kind_of? CExpression
@rexpr = CExpression.new(nil, nil, @rexpr, @type)
end
end
@lexpr = CExpression.precompile_inner(compiler, scope, @lexpr)
@rexpr = CExpression.precompile_inner(compiler, scope, @rexpr)
if @op == :'&' and not @lexpr
rr = @rexpr
rr = rr.rexpr while rr.kind_of? CExpression and not rr.op
if rr.kind_of? CExpression and rr.op == :'*' and not rr.lexpr
@lexpr = nil
@op = nil
@rexpr = rr.rexpr
return precompile_inner(compiler, scope)
elsif rr != @rexpr
@rexpr = rr
return precompile_inner(compiler, scope)
end
end
CExpression.precompile_type(compiler, scope, self)
isnumeric = lambda { |e_| e_.kind_of?(::Numeric) or (e_.kind_of? CExpression and
not e_.lexpr and not e_.op and e_.rexpr.kind_of? ::Numeric) }
# calc numeric
# XXX do not simplify operations involving variables (for type overflow etc)
if isnumeric[@rexpr] and (not @lexpr or isnumeric[@lexpr]) and (val = reduce(compiler)).kind_of? ::Numeric
@lexpr = nil
@op = nil
@rexpr = val
end
self
end
end
end
end
end
compile_c: fix return -1ULL
# This file is part of Metasm, the Ruby assembly manipulation suite
# Copyright (C) 2006-2009 Yoann GUILLOT
#
# Licence is LGPL, see LICENCE in the top-level directory
require 'metasm/main'
require 'metasm/parse_c'
module Metasm
module C
class Parser
def precompile
@toplevel.precompile(Compiler.new(self))
self
end
end
# each CPU defines a subclass of this one
class Compiler
# an ExeFormat (mostly used for unique label creation)
attr_accessor :exeformat
# the C Parser (destroyed by compilation)
attr_accessor :parser
# an array of assembler statements (strings)
attr_accessor :source
# list of unique labels generated (to recognize user-defined ones)
attr_accessor :auto_label_list
attr_accessor :curexpr
# allows 'raise self' (eg struct.offsetof)
def exception(msg='EOF unexpected')
ParseError.new "near #@curexpr: #{msg}"
end
# creates a new CCompiler from an ExeFormat and a C Parser
def initialize(parser, exeformat=ExeFormat.new, source=[])
@parser, @exeformat, @source = parser, exeformat, source
@auto_label_list = {}
end
def new_label(base='')
lbl = @exeformat.new_label base
@auto_label_list[lbl] = true
lbl
end
def toplevel ; @parser.toplevel end
def typesize ; @parser.typesize end
def sizeof(*a) @parser.sizeof(*a) end
# compiles the c parser toplevel to assembler statements in self.source (::Array of ::String)
#
# starts by precompiling parser.toplevel (destructively):
# static symbols are converted to toplevel ones, as nested functions
# uses an ExeFormat (the argument) to create unique label/variable names
#
# remove typedefs/enums
# CExpressions: all expr types are converted to __int8/__int16/__int32/__int64 (sign kept) (incl. ptr), + void
# struct member dereference/array indexes are converted to *(ptr + off)
# coma are converted to 2 statements, ?: are converted to If
# :|| and :&& are converted to If + assignment to temporary
# immediate quotedstrings/floats are converted to references to const static toplevel
# postincrements are replaced by a temporary (XXX arglist)
# compound statements are unnested
# Asm are kept (TODO precompile clobber types)
# Declarations: initializers are converted to separate assignment CExpressions
# Blocks are kept unless empty
# structure dereferences/array indexing are converted to *(ptr + offset)
# While/For/DoWhile/Switch are converted to If/Goto
# Continue/Break are converted to Goto
# Cases are converted to Labels during Switch conversion
# Label statements are removed
# Return: 'return <foo>;' => 'return <foo>; goto <end_of_func>;', 'return;' => 'goto <eof>;'
# If: 'if (a) b; else c;' => 'if (a) goto l1; { c; }; goto l2; l1: { b; } l2:'
# && and || in condition are expanded to multiple If
# functions returning struct are precompiled (in Declaration/CExpression/Return)
#
# in a second phase, unused labels are removed from functions, as noop goto (goto x; x:)
# dead code is removed ('goto foo; bar; baz:' => 'goto foo; baz:') (TODO)
#
# after that, toplevel is no longer valid C (bad types, blocks moved...)
#
# then toplevel statements are sorted (.text, .data, .rodata, .bss) and compiled into asm statements in self.source
#
# returns the asm source in a single string
def compile
cf = @exeformat.unique_labels_cache.keys & @auto_label_list.keys
raise "compile_c name conflict: #{cf.inspect}" if not cf.empty?
@exeformat.unique_labels_cache.update @auto_label_list
@parser.toplevel.precompile(self)
# reorder statements (arrays of Variables) following exe section typical order
funcs, rwdata, rodata, udata = [], [], [], []
@parser.toplevel.statements.each { |st|
if st.kind_of? Asm
@source << st.body
next
end
raise 'non-declaration at toplevel! ' + st.inspect if not st.kind_of? Declaration
v = st.var
if v.type.kind_of? Function
funcs << v if v.initializer # no initializer == storage :extern
elsif v.storage == :extern
elsif v.initializer
if v.type.qualifier.to_a.include?(:const) or
(v.type.kind_of? Array and v.type.type.qualifier.to_a.include?(:const))
rodata << v
else
rwdata << v
end
else
udata << v
end
}
if not funcs.empty?
@exeformat.compile_setsection @source, '.text'
funcs.each { |func| c_function(func) }
c_program_epilog
end
align = 1
if not rwdata.empty?
@exeformat.compile_setsection @source, '.data'
rwdata.each { |data| align = c_idata(data, align) }
end
if not rodata.empty?
@exeformat.compile_setsection @source, '.rodata'
rodata.each { |data| align = c_idata(data, align) }
end
if not udata.empty?
@exeformat.compile_setsection @source, '.bss'
udata.each { |data| align = c_udata(data, align) }
end
# needed to allow asm parser to use our autogenerated label names
@exeformat.unique_labels_cache.delete_if { |k, v| @auto_label_list[k] }
@source.join("\n")
end
# compiles a C function +func+ to asm source into the array of strings +str+
# in a first pass the stack variable offsets are computed,
# then each statement is compiled in turn
def c_function(func)
# must wait the Declaration to run the CExpr for dynamic auto offsets,
# and must run those statements once only
# TODO alloc a stack variable to maintain the size for each dynamic array
# TODO offset of arguments
# TODO nested function
c_init_state(func)
# hide the full @source while compiling, then add prolog/epilog (saves 1 pass)
@source << '' << "#{func.name}:"
presource, @source = @source, []
c_block(func.initializer)
tmpsource, @source = @source, presource
c_prolog
@source.concat tmpsource
c_epilog
@source << ''
end
def c_block(blk)
c_block_enter(blk)
blk.statements.each { |stmt|
case stmt
when CExpression; c_cexpr(stmt)
when Declaration; c_decl(stmt.var)
when If; c_ifgoto(stmt.test, stmt.bthen.target)
when Goto; c_goto(stmt.target)
when Label; c_label(stmt.name)
when Return; c_return(stmt.value)
when Asm; c_asm(stmt)
when Block; c_block(stmt)
else raise
end
}
c_block_exit(blk)
end
def c_block_enter(blk)
end
def c_block_exit(blk)
end
def c_label(name)
@source << "#{name}:"
end
# fills @state.offset (empty hash)
# automatic variable => stack offset, (recursive)
# offset is an ::Integer or a CExpression (dynamic array)
# assumes offset 0 is a ptr-size-aligned address
# TODO registerize automatic variables
def c_reserve_stack(block, off = 0)
block.statements.each { |stmt|
case stmt
when Declaration
next if stmt.var.type.kind_of? Function
off = c_reserve_stack_var(stmt.var, off)
@state.offset[stmt.var] = off
when Block
c_reserve_stack(stmt, off)
# do not update off, not nested subblocks can overlap
end
}
end
# computes the new stack offset for var
# off is either an offset from stack start (:ptr-size-aligned) or
# a CExpression [[[expr, +, 7], &, -7], +, off]
def c_reserve_stack_var(var, off)
if (arr_type = var.type).kind_of? Array and (arr_sz = arr_type.length).kind_of? CExpression
# dynamic array !
arr_sz = CExpression.new(arr_sz, :*, sizeof(nil, arr_type.type),
BaseType.new(:long, :unsigned)).precompile_inner(@parser, nil)
off = CExpression.new(arr_sz, :+, off, arr_sz.type)
off = CExpression.new(off, :+, 7, off.type)
off = CExpression.new(off, :&, -7, off.type)
CExpression.new(off, :+, 0, off.type)
else
al = var.type.align(@parser)
sz = sizeof(var)
case off
when CExpression; CExpression.new(off.lexpr, :+, ((off.rexpr + sz + al - 1) / al * al), off.type)
else (off + sz + al - 1) / al * al
end
end
end
# here you can add thing like stubs for PIC code
def c_program_epilog
end
# compiles a C static data definition into an asm string
# returns the new alignment value
def c_idata(data, align)
w = data.type.align(@parser)
@source << ".align #{align = w}" if w > align
@source << data.name.dup
len = c_idata_inner(data.type, data.initializer)
len %= w
len == 0 ? w : len
end
# dumps an anonymous variable definition, appending to the last line of source
# source.last is a label name or is empty before calling here
# return the length of the data written
def c_idata_inner(type, value)
case type
when BaseType
value ||= 0
if type.name == :void
@source.last << ':' if not @source.last.empty?
return 0
end
@source.last <<
case type.name
when :__int8; ' db '
when :__int16; ' dw '
when :__int32; ' dd '
when :__int64; ' dq '
when :ptr; " d#{%w[x b w x d x x x q][@parser.typesize[type.name]]} "
when :float; ' df ' # TODO
when :double; ' dfd '
when :longdouble; ' dfld '
else raise "unknown idata type #{type.inspect} #{value.inspect}"
end
@source.last << c_idata_inner_cexpr(value)
@parser.typesize[type.name]
when Struct
value ||= []
@source.last << ':' if not @source.last.empty?
# could .align here, but if there is our label name just before, it should have been .aligned too..
raise "unknown struct initializer #{value.inspect}" if not value.kind_of? ::Array
sz = 0
type.members.zip(value).each { |m, v|
if m.name and wsz = type.offsetof(@parser, m.name) and sz < wsz
@source << "db #{wsz-sz} dup(?)"
end
@source << ''
flen = c_idata_inner(m.type, v)
sz += flen
}
sz
when Union
value ||= []
@source.last << ':' if not @source.last.empty?
len = sizeof(nil, type)
raise "unknown union initializer #{value.inspect}" if not value.kind_of? ::Array
idx = value.rindex(value.compact.last) || 0
raise "empty union initializer" if not idx
wlen = c_idata_inner(type.members[idx].type, value[idx])
@source << "db #{'0' * (len - wlen) * ', '}" if wlen < len
len
when Array
value ||= []
if value.kind_of? CExpression and not value.op and value.rexpr.kind_of? ::String
elen = sizeof(nil, value.type.type)
@source.last <<
case elen
when 1; ' db '
when 2; ' dw '
else raise 'bad char* type ' + value.inspect
end << value.rexpr.inspect
len = type.length || (value.rexpr.length+1)
if len > value.rexpr.length
@source.last << (', 0' * (len - value.rexpr.length))
end
elen * len
elsif value.kind_of? ::Array
@source.last << ':' if not @source.last.empty?
len = type.length || value.length
value.each { |v|
@source << ''
c_idata_inner(type.type, v)
}
len -= value.length
if len > 0
@source << " db #{len * sizeof(nil, type.type)} dup(0)"
end
sizeof(nil, type.type) * len
else raise "unknown static array initializer #{value.inspect}"
end
end
end
def c_idata_inner_cexpr(expr)
expr = expr.reduce(@parser) if expr.kind_of? CExpression
case expr
when ::Integer; (expr >= 4096) ? ('0x%X' % expr) : expr.to_s
when ::Numeric; expr.to_s
when Variable
case expr.type
when Array; expr.name
else c_idata_inner_cexpr(expr.initializer)
end
when CExpression
if not expr.lexpr
case expr.op
when :&
case expr.rexpr
when Variable; expr.rexpr.name
else raise 'unhandled addrof in initializer ' + expr.rexpr.inspect
end
#when :*
when :+; c_idata_inner_cexpr(expr.rexpr)
when :-; ' -' << c_idata_inner_cexpr(expr.rexpr)
when nil
e = c_idata_inner_cexpr(expr.rexpr)
if expr.rexpr.kind_of? CExpression
e = '(' << e << " & 0#{'ff'*sizeof(expr)}h)"
end
e
else raise 'unhandled initializer expr ' + expr.inspect
end
else
case expr.op
when :+, :-, :*, :/, :%, :<<, :>>, :&, :|, :^
e = '(' << c_idata_inner_cexpr(expr.lexpr) <<
expr.op.to_s << c_idata_inner_cexpr(expr.rexpr) << ')'
if expr.type.integral?
# db are unsigned
e = '(' << e << " & 0#{'ff'*sizeof(expr)}h)"
end
e
#when :'.'
#when :'->'
#when :'[]'
else raise 'unhandled initializer expr ' + expr.inspect
end
end
else raise 'unhandled initializer ' + expr.inspect
end
end
def c_udata(data, align)
@source << "#{data.name} "
@source.last <<
case data.type
when BaseType
len = @parser.typesize[data.type.name]
case data.type.name
when :__int8; 'db ?'
when :__int16; 'dw ?'
when :__int32; 'dd ?'
when :__int64; 'dq ?'
else "db #{len} dup(?)"
end
else
len = sizeof(data)
"db #{len} dup(?)"
end
len %= align
len == 0 ? align : len
end
def check_reserved_name(var)
end
end
class Statement
# all Statements/Declaration must define a precompile(compiler, scope) method
# it must append itself to scope.statements
# turns a statement into a new block
def precompile_make_block(scope)
b = Block.new scope
b.statements << self
b
end
end
class Block
# precompile all statements, then simplifies symbols/structs types
def precompile(compiler, scope=nil)
stmts = @statements.dup
@statements.clear
stmts.each { |st|
compiler.curexpr = st
st.precompile(compiler, self)
}
# cleanup declarations
@symbol.delete_if { |n, s| not s.kind_of? Variable }
@struct.delete_if { |n, s| not s.kind_of? Union }
@symbol.each_value { |var|
CExpression.precompile_type(compiler, self, var, true)
}
@struct.each_value { |var|
next if not var.members
var.members.each { |m|
CExpression.precompile_type(compiler, self, m, true)
}
}
scope.statements << self if scope and not @statements.empty?
end
# removes unused labels, and in-place goto (goto toto; toto:)
def precompile_optimize
list = []
precompile_optimize_inner(list, 1)
precompile_optimize_inner(list, 2)
end
# step 1: list used labels/unused goto
# step 2: remove unused labels
def precompile_optimize_inner(list, step)
lastgoto = nil
hadref = false
walk = lambda { |expr|
next if not expr.kind_of? CExpression
# gcc's unary && support
if not expr.op and not expr.lexpr and expr.rexpr.kind_of? Label
list << expr.rexpr.name
else
walk[expr.lexpr]
if expr.rexpr.kind_of? ::Array
expr.rexpr.each { |r| walk[r] }
else
walk[expr.rexpr]
end
end
}
@statements.dup.each { |s|
lastgoto = nil if not s.kind_of? Label
case s
when Block
s.precompile_optimize_inner(list, step)
@statements.delete s if step == 2 and s.statements.empty?
when CExpression; walk[s] if step == 1
when Label
case step
when 1
if lastgoto and lastgoto.target == s.name
list << lastgoto
list.delete s.name if not hadref
end
when 2; @statements.delete s if not list.include? s.name
end
when Goto, If
s.kind_of?(If) ? g = s.bthen : g = s
case step
when 1
hadref = list.include? g.target
lastgoto = g
list << g.target
when 2
if list.include? g
idx = @statements.index s
@statements.delete s
@statements[idx, 0] = s.test if s != g and not s.test.constant?
end
end
end
}
list
end
# noop
def precompile_make_block(scope) self end
def continue_label ; defined?(@continue_label) ? @continue_label : @outer.continue_label end
def continue_label=(l) @continue_label = l end
def break_label ; defined?(@break_label) ? @break_label : @outer.break_label end
def break_label=(l) @break_label = l end
def return_label ; defined?(@return_label) ? @return_label : @outer.return_label end
def return_label=(l) @return_label = l end
def nonauto_label=(l) @nonauto_label = l end
def nonauto_label ; defined?(@nonauto_label) ? @nonauto_label : @outer.nonauto_label end
def function ; defined?(@function) ? @function : @outer.function end
def function=(f) @function = f end
end
class Declaration
def precompile(compiler, scope)
if (@var.type.kind_of? Function and @var.initializer and scope != compiler.toplevel) or @var.storage == :static or compiler.check_reserved_name(@var)
# TODO fix label name in export table if __exported
scope.symbol.delete @var.name
old = @var.name
@var.name = compiler.new_label @var.name until @var.name != old
compiler.toplevel.symbol[@var.name] = @var
# TODO no pure inline if addrof(func) needed
compiler.toplevel.statements << self unless @var.attributes.to_a.include? 'inline'
else
scope.symbol[@var.name] ||= @var
appendme = true
end
if i = @var.initializer
if @var.type.kind_of? Function
if @var.type.type.kind_of? Struct
s = @var.type.type
v = Variable.new
v.name = compiler.new_label('return_struct_ptr')
v.type = Pointer.new(s)
CExpression.precompile_type(compiler, scope, v)
@var.type.args.unshift v
@var.type.type = v.type
end
i.function = @var
i.return_label = compiler.new_label('epilog')
i.nonauto_label = {}
i.precompile(compiler)
Label.new(i.return_label).precompile(compiler, i)
i.precompile_optimize
# append now so that static dependencies are declared before us
scope.statements << self if appendme and not @var.attributes.to_a.include? 'inline'
elsif scope != compiler.toplevel and @var.storage != :static
scope.statements << self if appendme
Declaration.precompile_dyn_initializer(compiler, scope, @var, @var.type, i)
@var.initializer = nil
else
scope.statements << self if appendme
@var.initializer = Declaration.precompile_static_initializer(compiler, @var.type, i)
end
else
scope.statements << self if appendme
end
end
# turns an initializer to CExpressions in scope.statements
def self.precompile_dyn_initializer(compiler, scope, var, type, init)
case type = type.untypedef
when Array
# XXX TODO type.length may be dynamic !!
case init
when CExpression
# char toto[] = "42"
if not init.kind_of? CExpression or init.op or init.lexpr or not init.rexpr.kind_of? ::String
raise "unknown initializer #{init.inspect} for #{var.inspect}"
end
init = init.rexpr.unpack('C*') + [0]
init.map! { |chr| CExpression.new(nil, nil, chr, type.type) }
precompile_dyn_initializer(compiler, scope, var, type, init)
when ::Array
type.length ||= init.length
# len is an Integer
init.each_with_index { |it, idx|
next if not it
break if idx >= type.length
idx = CExpression.new(nil, nil, idx, BaseType.new(:long, :unsigned))
v = CExpression.new(var, :'[]', idx, type.type)
precompile_dyn_initializer(compiler, scope, v, type.type, it)
}
else raise "unknown initializer #{init.inspect} for #{var.inspect}"
end
when Union
case init
when CExpression, Variable
if init.type.untypedef.kind_of? BaseType
# works for struct foo bar[] = {0}; ...
type.members.each { |m|
v = CExpression.new(var, :'.', m.name, m.type)
precompile_dyn_initializer(compiler, scope, v, v.type, init)
}
elsif init.type.untypedef.kind_of? type.class
CExpression.new(var, :'=', init, type).precompile(compiler, scope)
else
raise "bad initializer #{init.inspect} for #{var.inspect}"
end
when ::Array
init.each_with_index{ |it, idx|
next if not it
m = type.members[idx]
v = CExpression.new(var, :'.', m.name, m.type)
precompile_dyn_initializer(compiler, scope, v, m.type, it)
}
else raise "unknown initializer #{init.inspect} for #{var.inspect}"
end
else
case init
when CExpression
CExpression.new(var, :'=', init, type).precompile(compiler, scope)
else raise "unknown initializer #{init.inspect} for #{var.inspect}"
end
end
end
# returns a precompiled static initializer (eg string constants)
def self.precompile_static_initializer(compiler, type, init)
# TODO
case type = type.untypedef
when Array
if init.kind_of? ::Array
init.map { |i| precompile_static_initializer(compiler, type.type, i) }
else
init
end
when Union
if init.kind_of? ::Array
init.zip(type.members).map { |i, m| precompile_static_initializer(compiler, m.type, i) }
else
init
end
else
if init.kind_of? CExpression and init = init.reduce(compiler) and init.kind_of? CExpression
if not init.op and init.rexpr.kind_of? ::String
v = Variable.new
v.storage = :static
v.name = 'char_' + init.rexpr.tr('^a-zA-Z', '')[0, 8]
v.type = Array.new(type.type)
v.type.length = init.rexpr.length + 1
v.type.type.qualifier = [:const]
v.initializer = CExpression.new(nil, nil, init.rexpr, type)
Declaration.new(v).precompile(compiler, compiler.toplevel)
init.rexpr = v
end
init.rexpr = precompile_static_initializer(compiler, init.rexpr.type, init.rexpr) if init.rexpr.kind_of? CExpression
init.lexpr = precompile_static_initializer(compiler, init.lexpr.type, init.lexpr) if init.lexpr.kind_of? CExpression
end
init
end
end
end
class If
def precompile(compiler, scope)
expr = lambda { |e| e.kind_of?(CExpression) ? e : CExpression.new(nil, nil, e, e.type) }
if @bthen.kind_of? Goto or @bthen.kind_of? Break or @bthen.kind_of? Continue
# if () goto l; else b; => if () goto l; b;
if belse
t1 = @belse
@belse = nil
end
# need to convert user-defined Goto target !
@bthen.precompile(compiler, scope)
@bthen = scope.statements.pop # break => goto break_label
elsif belse
# if () a; else b; => if () goto then; b; goto end; then: a; end:
t1 = @belse
t2 = @bthen
l2 = compiler.new_label('if_then')
@bthen = Goto.new(l2)
@belse = nil
l3 = compiler.new_label('if_end')
else
# if () a; => if (!) goto end; a; end:
t1 = @bthen
l2 = compiler.new_label('if_end')
@bthen = Goto.new(l2)
@test = CExpression.negate(@test)
end
@test = expr[@test]
case @test.op
when :'&&'
# if (c1 && c2) goto a; => if (!c1) goto b; if (c2) goto a; b:
l1 = compiler.new_label('if_nand')
If.new(CExpression.negate(@test.lexpr), Goto.new(l1)).precompile(compiler, scope)
@test = expr[@test.rexpr]
precompile(compiler, scope)
when :'||'
l1 = compiler.new_label('if_or')
If.new(expr[@test.lexpr], Goto.new(@bthen.target)).precompile(compiler, scope)
@test = expr[@test.rexpr]
precompile(compiler, scope)
else
@test = CExpression.precompile_inner(compiler, scope, @test)
t = @test.reduce(compiler)
if t.kind_of? ::Integer
if t == 0
Label.new(l1, nil).precompile(compiler, scope) if l1
t1.precompile(compiler, scope) if t1
Label.new(l2, nil).precompile(compiler, scope) if l2
Label.new(l3, nil).precompile(compiler, scope) if l3
else
scope.statements << @bthen
Label.new(l1, nil).precompile(compiler, scope) if l1
Label.new(l2, nil).precompile(compiler, scope) if l2
t2.precompile(compiler, scope) if t2
Label.new(l3, nil).precompile(compiler, scope) if l3
end
return
end
scope.statements << self
end
Label.new(l1, nil).precompile(compiler, scope) if l1
t1.precompile(compiler, scope) if t1
Goto.new(l3).precompile(compiler, scope) if l3
Label.new(l2, nil).precompile(compiler, scope) if l2
t2.precompile(compiler, scope) if t2
Label.new(l3, nil).precompile(compiler, scope) if l3
end
end
class For
def precompile(compiler, scope)
if init
@init.precompile(compiler, scope)
scope = @init if @init.kind_of? Block
end
@body = @body.precompile_make_block scope
@body.continue_label = compiler.new_label 'for_continue'
@body.break_label = compiler.new_label 'for_break'
Label.new(@body.continue_label).precompile(compiler, scope)
if test
If.new(CExpression.negate(@test), Goto.new(@body.break_label)).precompile(compiler, scope)
end
@body.precompile(compiler, scope)
if iter
@iter.precompile(compiler, scope)
end
Goto.new(@body.continue_label).precompile(compiler, scope)
Label.new(@body.break_label).precompile(compiler, scope)
end
end
class While
def precompile(compiler, scope)
@body = @body.precompile_make_block scope
@body.continue_label = compiler.new_label('while_continue')
@body.break_label = compiler.new_label('while_break')
Label.new(@body.continue_label).precompile(compiler, scope)
If.new(CExpression.negate(@test), Goto.new(@body.break_label)).precompile(compiler, scope)
@body.precompile(compiler, scope)
Goto.new(@body.continue_label).precompile(compiler, scope)
Label.new(@body.break_label).precompile(compiler, scope)
end
end
class DoWhile
def precompile(compiler, scope)
@body = @body.precompile_make_block scope
@body.continue_label = compiler.new_label('dowhile_continue')
@body.break_label = compiler.new_label('dowhile_break')
loop_start = compiler.new_label('dowhile_start')
Label.new(loop_start).precompile(compiler, scope)
@body.precompile(compiler, scope)
Label.new(@body.continue_label).precompile(compiler, scope)
If.new(@test, Goto.new(loop_start)).precompile(compiler, scope)
Label.new(@body.break_label).precompile(compiler, scope)
end
end
class Switch
def precompile(compiler, scope)
var = Variable.new
var.storage = :register
var.name = compiler.new_label('switch')
var.type = @test.type
var.initializer = @test
CExpression.precompile_type(compiler, scope, var)
Declaration.new(var).precompile(compiler, scope)
@body = @body.precompile_make_block scope
@body.break_label = compiler.new_label('switch_break')
@body.precompile(compiler)
default = @body.break_label
# recursive lambda to change Case to Labels
# dynamically creates the If sequence
walk = lambda { |blk|
blk.statements.each_with_index { |s, i|
case s
when Case
label = compiler.new_label('case')
if s.expr == 'default'
default = label
elsif s.exprup
If.new(CExpression.new(CExpression.new(var, :'>=', s.expr, BaseType.new(:int)), :'&&',
CExpression.new(var, :'<=', s.exprup, BaseType.new(:int)),
BaseType.new(:int)), Goto.new(label)).precompile(compiler, scope)
else
If.new(CExpression.new(var, :'==', s.expr, BaseType.new(:int)),
Goto.new(label)).precompile(compiler, scope)
end
blk.statements[i] = Label.new(label)
when Block
walk[s]
end
}
}
walk[@body]
Goto.new(default).precompile(compiler, scope)
scope.statements << @body
Label.new(@body.break_label).precompile(compiler, scope)
end
end
class Continue
def precompile(compiler, scope)
Goto.new(scope.continue_label).precompile(compiler, scope)
end
end
class Break
def precompile(compiler, scope)
Goto.new(scope.break_label).precompile(compiler, scope)
end
end
class Return
def precompile(compiler, scope)
if @value
@value = CExpression.new(nil, nil, @value, @value.type) if not @value.kind_of? CExpression
if @value.type.untypedef.kind_of? Struct
@value = @value.precompile_inner(compiler, scope)
func = scope.function.type
CExpression.new(CExpression.new(nil, :*, func.args.first, @value.type), :'=', @value, @value.type).precompile(compiler, scope)
@value = func.args.first
else
# cast to function return type
@value = CExpression.new(nil, nil, @value, scope.function.type.type).precompile_inner(compiler, scope)
end
scope.statements << self
end
Goto.new(scope.return_label).precompile(compiler, scope)
end
end
class Label
def precompile(compiler, scope)
if name and (not compiler.auto_label_list[@name])
@name = scope.nonauto_label[@name] ||= compiler.new_label(@name)
end
scope.statements << self
if statement
@statement.precompile(compiler, scope)
@statement = nil
end
end
end
class Case
def precompile(compiler, scope)
@expr = CExpression.precompile_inner(compiler, scope, @expr)
@exprup = CExpression.precompile_inner(compiler, scope, @exprup) if exprup
super(compiler, scope)
end
end
class Goto
def precompile(compiler, scope)
if not compiler.auto_label_list[@target]
@target = scope.nonauto_label[@target] ||= compiler.new_label(@target)
end
scope.statements << self
end
end
class Asm
def precompile(compiler, scope)
scope.statements << self
# TODO CExpr.precompile_type(clobbers)
end
end
class CExpression
def precompile(compiler, scope)
i = precompile_inner(compiler, scope, false)
scope.statements << i if i
end
# changes obj.type to a precompiled type
# keeps struct/union, change everything else to __int\d
# except Arrays if declaration is true (need to know variable allocation sizes etc)
# returns the type
def self.precompile_type(compiler, scope, obj, declaration = false)
case t = obj.type.untypedef
when BaseType
case t.name
when :void
when :float, :double, :longdouble
else t = BaseType.new("__int#{compiler.typesize[t.name]*8}".to_sym, t.specifier)
end
when Array
if declaration; precompile_type(compiler, scope, t, declaration)
else t = BaseType.new("__int#{compiler.typesize[:ptr]*8}".to_sym, :unsigned)
end
when Pointer; t = BaseType.new("__int#{compiler.typesize[:ptr]*8}".to_sym, :unsigned)
when Enum; t = BaseType.new("__int#{compiler.typesize[:int]*8}".to_sym)
when Function
precompile_type(compiler, scope, t)
t.args ||= []
t.args.each { |a| precompile_type(compiler, scope, a) }
when Union
if declaration and t.members and not t.name # anonymous struct
t.members.each { |a| precompile_type(compiler, scope, a, true) }
end
else raise 'bad type ' + t.inspect
end
(t.qualifier ||= []).concat obj.type.qualifier if obj.type.qualifier and t != obj.type
(t.attributes ||= []).concat obj.type.attributes if obj.type.attributes and t != obj.type
while obj.type.kind_of? TypeDef
obj.type = obj.type.type
(t.qualifier ||= []).concat obj.type.qualifier if obj.type.qualifier and t != obj.type
(t.attributes ||= []).concat obj.type.attributes if obj.type.attributes and t != obj.type
end
obj.type = t
end
def self.precompile_inner(compiler, scope, expr, nested = true)
case expr
when CExpression; expr.precompile_inner(compiler, scope, nested)
else expr
end
end
# returns a new CExpression with simplified self.type, computes structure offsets
# turns char[]/float immediates to reference to anonymised const
# TODO 'a = b += c' => 'b += c; a = b' (use nested argument)
# TODO handle precompile_inner return nil
# TODO struct.bits
def precompile_inner(compiler, scope, nested = true)
case @op
when :'.'
# a.b => (&a)->b
lexpr = CExpression.precompile_inner(compiler, scope, @lexpr)
ll = lexpr
ll = lexpr.rexpr while ll.kind_of? CExpression and not ll.op
if ll.kind_of? CExpression and ll.op == :'*' and not ll.lexpr
# do not change lexpr.rexpr.type directly to a pointer, might retrigger (ptr+imm) => (ptr + imm*sizeof(*ptr))
@lexpr = CExpression.new(nil, nil, ll.rexpr, Pointer.new(lexpr.type))
else
@lexpr = CExpression.new(nil, :'&', lexpr, Pointer.new(lexpr.type))
end
@op = :'->'
precompile_inner(compiler, scope)
when :'->'
# a->b => *(a + off(b))
struct = @lexpr.type.untypedef.type.untypedef
lexpr = CExpression.precompile_inner(compiler, scope, @lexpr)
@lexpr = nil
@op = nil
if struct.kind_of? Struct and (off = struct.offsetof(compiler, @rexpr)) != 0
off = CExpression.new(nil, nil, off, BaseType.new(:int, :unsigned))
@rexpr = CExpression.new(lexpr, :'+', off, lexpr.type)
# ensure the (ptr + value) is not expanded to (ptr + value * sizeof(*ptr))
CExpression.precompile_type(compiler, scope, @rexpr)
else
# union or 1st struct member
@rexpr = lexpr
end
if @type.kind_of? Array # Array member type is already an address
else
@rexpr = CExpression.new(nil, :*, @rexpr, @rexpr.type)
end
precompile_inner(compiler, scope)
when :'[]'
rexpr = CExpression.precompile_inner(compiler, scope, @rexpr)
if rexpr.kind_of? CExpression and not rexpr.op and rexpr.rexpr == 0
@rexpr = @lexpr
else
@rexpr = CExpression.new(@lexpr, :'+', rexpr, @lexpr.type)
end
@op = :'*'
@lexpr = nil
precompile_inner(compiler, scope)
when :'?:'
# cannot precompile in place, a conditionnal expression may have a coma: must turn into If
if @lexpr.kind_of? CExpression
@lexpr = @lexpr.precompile_inner(compiler, scope)
if not @lexpr.lexpr and not @lexpr.op and @lexpr.rexpr.kind_of? ::Numeric
if @lexpr.rexpr == 0
e = @rexpr[1]
else
e = @rexpr[0]
end
e = CExpression.new(nil, nil, e, e.type) if not e.kind_of? CExpression
return e.precompile_inner(compiler, scope)
end
end
raise 'conditional in toplevel' if scope == compiler.toplevel # just in case
var = Variable.new
var.storage = :register
var.name = compiler.new_label('ternary')
var.type = @rexpr[0].type
CExpression.precompile_type(compiler, scope, var)
Declaration.new(var).precompile(compiler, scope)
If.new(@lexpr, CExpression.new(var, :'=', @rexpr[0], var.type), CExpression.new(var, :'=', @rexpr[1], var.type)).precompile(compiler, scope)
@lexpr = nil
@op = nil
@rexpr = var
precompile_inner(compiler, scope)
when :'&&'
if scope == compiler.toplevel
@lexpr = CExpression.precompile_inner(compiler, scope, @lexpr)
@rexpr = CExpression.precompile_inner(compiler, scope, @rexpr)
CExpression.precompile_type(compiler, scope, self)
self
else
var = Variable.new
var.storage = :register
var.name = compiler.new_label('and')
var.type = @type
CExpression.precompile_type(compiler, scope, var)
var.initializer = CExpression.new(nil, nil, 0, var.type)
Declaration.new(var).precompile(compiler, scope)
l = @lexpr.kind_of?(CExpression) ? @lexpr : CExpression.new(nil, nil, @lexpr, @lexpr.type)
r = @rexpr.kind_of?(CExpression) ? @rexpr : CExpression.new(nil, nil, @rexpr, @rexpr.type)
If.new(l, If.new(r, CExpression.new(var, :'=', CExpression.new(nil, nil, 1, var.type), var.type))).precompile(compiler, scope)
@lexpr = nil
@op = nil
@rexpr = var
precompile_inner(compiler, scope)
end
when :'||'
if scope == compiler.toplevel
@lexpr = CExpression.precompile_inner(compiler, scope, @lexpr)
@rexpr = CExpression.precompile_inner(compiler, scope, @rexpr)
CExpression.precompile_type(compiler, scope, self)
self
else
var = Variable.new
var.storage = :register
var.name = compiler.new_label('or')
var.type = @type
CExpression.precompile_type(compiler, scope, var)
var.initializer = CExpression.new(nil, nil, 1, var.type)
Declaration.new(var).precompile(compiler, scope)
l = @lexpr.kind_of?(CExpression) ? @lexpr : CExpression.new(nil, nil, @lexpr, @lexpr.type)
l = CExpression.new(nil, :'!', l, var.type)
r = @rexpr.kind_of?(CExpression) ? @rexpr : CExpression.new(nil, nil, @rexpr, @rexpr.type)
r = CExpression.new(nil, :'!', r, var.type)
If.new(l, If.new(r, CExpression.new(var, :'=', CExpression.new(nil, nil, 0, var.type), var.type))).precompile(compiler, scope)
@lexpr = nil
@op = nil
@rexpr = var
precompile_inner(compiler, scope)
end
when :funcall
if @lexpr.kind_of? Variable and @lexpr.type.kind_of? Function and @lexpr.attributes and @lexpr.attributes.include? 'inline' and @lexpr.initializer
# TODO check recursive call (direct or indirect)
raise 'inline varargs unsupported' if @lexpr.type.varargs
rtype = @lexpr.type.type.untypedef
if not rtype.kind_of? BaseType or rtype.name != :void
rval = Variable.new
rval.name = compiler.new_label('inline_return')
rval.type = @lexpr.type.type
Declaration.new(rval).precompile(compiler, scope)
end
inline_label = {}
locals = @lexpr.type.args.zip(@rexpr).inject({}) { |h, (fa, a)|
h.update fa => CExpression.new(nil, nil, a, fa.type).precompile_inner(compiler, scope)
}
copy_inline_ce = lambda { |ce|
case ce
when CExpression; CExpression.new(copy_inline_ce[ce.lexpr], ce.op, copy_inline_ce[ce.rexpr], ce.type)
when Variable; locals[ce] || ce
when ::Array; ce.map { |e_| copy_inline_ce[e_] }
else ce
end
}
copy_inline = lambda { |stmt, scp|
case stmt
when Block
b = Block.new(scp)
stmt.statements.each { |s|
s = copy_inline[s, b]
b.statements << s if s
}
b
when If; If.new(copy_inline_ce[stmt.test], copy_inline[stmt.bthen, scp]) # re-precompile ?
when Label; Label.new(inline_label[stmt.name] ||= compiler.new_label('inline_'+stmt.name))
when Goto; Goto.new(inline_label[stmt.target] ||= compiler.new_label('inline_'+stmt.target))
when Return; CExpression.new(rval, :'=', copy_inline_ce[stmt.value], rval.type).precompile_inner(compiler, scp) if stmt.value
when CExpression; copy_inline_ce[stmt]
when Declaration
nv = stmt.var.dup
if nv.type.kind_of? Array and nv.type.length.kind_of? CExpression
nv.type = Array.new(nv.type.type, copy_inline_ce[nv.type.length]) # XXX nested dynamic?
end
locals[stmt.var] = nv
scp.symbol[nv.name] = nv
Declaration.new(nv)
else raise 'unexpected inline statement ' + stmt.inspect
end
}
scope.statements << copy_inline[@lexpr.initializer, scope] # body already precompiled
CExpression.new(nil, nil, rval, rval.type).precompile_inner(compiler, scope)
elsif @type.kind_of? Struct
var = Variable.new
var.name = compiler.new_label('return_struct')
var.type = @type
Declaration.new(var).precompile(compiler, scope)
@rexpr.unshift CExpression.new(nil, :&, var, Pointer.new(var.type))
var2 = Variable.new
var2.name = compiler.new_label('return_struct_ptr')
var2.type = Pointer.new(@type)
var2.storage = :register
CExpression.precompile_type(compiler, scope, var2)
Declaration.new(var2).precompile(compiler, scope)
@type = var2.type
CExpression.new(var2, :'=', self, var2.type).precompile(compiler, scope)
CExpression.new(nil, :'*', var2, var.type).precompile_inner(compiler, scope)
else
t = @lexpr.type.untypedef
t = t.type.untypedef if t.pointer?
@lexpr = CExpression.precompile_inner(compiler, scope, @lexpr)
types = t.args.map { |a| a.type }
# cast args to func prototype
@rexpr.map! { |e_| (types.empty? ? e_ : CExpression.new(nil, nil, e_, types.shift)).precompile_inner(compiler, scope) }
CExpression.precompile_type(compiler, scope, self)
self
end
when :','
lexpr = @lexpr.kind_of?(CExpression) ? @lexpr : CExpression.new(nil, nil, @lexpr, @lexpr.type)
rexpr = @rexpr.kind_of?(CExpression) ? @rexpr : CExpression.new(nil, nil, @rexpr, @rexpr.type)
lexpr.precompile(compiler, scope)
rexpr.precompile_inner(compiler, scope)
when :'!'
CExpression.precompile_type(compiler, scope, self)
if @rexpr.kind_of?(CExpression)
case @rexpr.op
when :'<', :'>', :'<=', :'>=', :'==', :'!='
@op = { :'<' => :'>=', :'>' => :'<=', :'<=' => :'>', :'>=' => :'<',
:'==' => :'!=', :'!=' => :'==' }[@rexpr.op]
@lexpr = @rexpr.lexpr
@rexpr = @rexpr.rexpr
precompile_inner(compiler, scope)
when :'&&', :'||'
@op = { :'&&' => :'||', :'||' => :'&&' }[@rexpr.op]
@lexpr = CExpression.new(nil, :'!', @rexpr.lexpr, @type)
@rexpr = CExpression.new(nil, :'!', @rexpr.rexpr, @type)
precompile_inner(compiler, scope)
when :'!'
if @rexpr.rexpr.kind_of? CExpression
@op = nil
@rexpr = @rexpr.rexpr
else
@op = :'!='
@lexpr = @rexpr.rexpr
@rexpr = CExpression.new(nil, nil, 0, @lexpr.type)
end
precompile_inner(compiler, scope)
else
@rexpr = CExpression.precompile_inner(compiler, scope, @rexpr)
self
end
else
@rexpr = CExpression.precompile_inner(compiler, scope, @rexpr)
self
end
when :'++', :'--'
if not @rexpr
var = Variable.new
var.storage = :register
var.name = compiler.new_label('postincrement')
var.type = @type
Declaration.new(var).precompile(compiler, scope)
CExpression.new(var, :'=', @lexpr, @type).precompile(compiler, scope)
CExpression.new(nil, @op, @lexpr, @type).precompile(compiler, scope)
@lexpr = nil
@op = nil
@rexpr = var
precompile_inner(compiler, scope)
elsif @type.pointer? and compiler.sizeof(nil, @type.untypedef.type.untypedef) != 1
# ++ptr => ptr += sizeof(*ptr) (done in += precompiler)
@op = { :'++' => :'+=', :'--' => :'-=' }[@op]
@lexpr = @rexpr
@rexpr = CExpression.new(nil, nil, 1, BaseType.new(:ptr, :unsigned))
precompile_inner(compiler, scope)
else
CExpression.precompile_type(compiler, scope, self)
@rexpr = CExpression.precompile_inner(compiler, scope, @rexpr)
self
end
when :'='
# handle structure assignment/array assignment
case @lexpr.type.untypedef
when Union
# rexpr may be a :funcall
@rexpr = CExpression.precompile_inner(compiler, scope, @rexpr)
@lexpr.type.untypedef.members.zip(@rexpr.type.untypedef.members) { |m1, m2|
# assume m1 and m2 are compatible
v1 = CExpression.new(@lexpr, :'.', m1.name, m1.type)
v2 = CExpression.new(@rexpr, :'.', m2.name, m1.type)
CExpression.new(v1, :'=', v2, v1.type).precompile(compiler, scope)
}
# (foo = bar).toto
@op = nil
@rexpr = @lexpr
@lexpr = nil
@type = @rexpr.type
precompile_inner(compiler, scope) if nested
when Array
if not len = @lexpr.type.untypedef.length
@rexpr = CExpression.precompile_inner(compiler, scope, @rexpr)
# char toto[] = "bla"
if @rexpr.kind_of? CExpression and not @rexpr.lexpr and not @rexpr.op and
@rexpr.rexpr.kind_of? Variable and @rexpr.rexpr.type.kind_of? Array
len = @rexpr.rexpr.type.length
end
end
raise 'array initializer with no length !' if not len
# TODO optimize...
len.times { |i|
i = CExpression.new(nil, nil, i, BaseType.new(:long, :unsigned))
v1 = CExpression.new(@lexpr, :'[]', i, @lexpr.type.untypedef.type)
v2 = CExpression.new(@rexpr, :'[]', i, v1.type)
CExpression.new(v1, :'=', v2, v1.type).precompile(compiler, scope)
}
@op = nil
@rexpr = @lexpr
@lexpr = nil
@type = @rexpr.type
precompile_inner(compiler, scope) if nested
else
@lexpr = CExpression.precompile_inner(compiler, scope, @lexpr)
@rexpr = CExpression.precompile_inner(compiler, scope, @rexpr)
CExpression.precompile_type(compiler, scope, self)
self
end
when nil
case @rexpr
when Block
# compound statements
raise 'compound statement in toplevel' if scope == compiler.toplevel # just in case
var = Variable.new
var.storage = :register
var.name = compiler.new_label('compoundstatement')
var.type = @type
CExpression.precompile_type(compiler, scope, var)
Declaration.new(var).precompile(compiler, scope)
if @rexpr.statements.last.kind_of? CExpression
@rexpr.statements[-1] = CExpression.new(var, :'=', @rexpr.statements[-1], var.type)
@rexpr.precompile(compiler, scope)
end
@rexpr = var
precompile_inner(compiler, scope)
when ::String
# char[] immediate
v = Variable.new
v.storage = :static
v.name = 'char_' + @rexpr.tr('^a-zA-Z', '')[0, 8]
v.type = Array.new(@type.type)
v.type.length = @rexpr.length + 1
v.type.type.qualifier = [:const]
v.initializer = CExpression.new(nil, nil, @rexpr, @type)
Declaration.new(v).precompile(compiler, scope)
@rexpr = v
precompile_inner(compiler, scope)
when ::Float
# float immediate
v = Variable.new
v.storage = :static
v.name = @type.untypedef.name.to_s
v.type = @type
v.type.qualifier = [:const]
v.initializer = CExpression.new(nil, nil, @rexpr, @type)
Declaration.new(v).precompile(compiler, scope)
@rexpr = CExpression.new(nil, :'*', v, Pointer.new(v.type))
precompile_inner(compiler, scope)
when CExpression
# simplify casts
CExpression.precompile_type(compiler, scope, self)
# propagate type first so that (__uint64)(-1) => 0xffffffffffffffff
@rexpr.type = @type
@rexpr = @rexpr.precompile_inner(compiler, scope)
if @type.kind_of? BaseType and @rexpr.type.kind_of? BaseType
if @rexpr.type == @type
# noop cast
@lexpr, @op, @rexpr = @rexpr.lexpr, @rexpr.op, @rexpr.rexpr
elsif not @rexpr.op and @type.integral? and @rexpr.type.integral?
if @rexpr.rexpr.kind_of? ::Numeric and (val = reduce(compiler)).kind_of? ::Numeric
@rexpr = val
elsif compiler.typesize[@type.name] < compiler.typesize[@rexpr.type.name]
# (char)(short)(int)(long)foo => (char)foo
@rexpr = @rexpr.rexpr
end
end
end
self
else
CExpression.precompile_type(compiler, scope, self)
self
end
else
# int+ptr => ptr+int
if @op == :+ and @lexpr and @lexpr.type.integral? and @rexpr.type.pointer?
@rexpr, @lexpr = @lexpr, @rexpr
end
# handle pointer + 2 == ((char *)pointer) + 2*sizeof(*pointer)
if @rexpr and [:'+', :'+=', :'-', :'-='].include? @op and
@type.pointer? and @rexpr.type.integral?
sz = compiler.sizeof(nil, @type.untypedef.type.untypedef)
if sz != 1
sz = CExpression.new(nil, nil, sz, @rexpr.type)
@rexpr = CExpression.new(@rexpr, :'*', sz, @rexpr.type)
end
end
# type promotion => cast
case @op
when :+, :-, :*, :/, :&, :|, :^, :%
if @lexpr
if @lexpr.type != @type
@lexpr = CExpression.new(nil, nil, @lexpr, @lexpr.type) if not @lexpr.kind_of? CExpression
@lexpr = CExpression.new(nil, nil, @lexpr, @type)
end
if @rexpr.type != @type
@rexpr = CExpression.new(nil, nil, @rexpr, @rexpr.type) if not @rexpr.kind_of? CExpression
@rexpr = CExpression.new(nil, nil, @rexpr, @type)
end
end
when :>>, :<<
# char => int
if @lexpr.type != @type
@lexpr = CExpression.new(nil, nil, @lexpr, @lexpr.type) if not @lexpr.kind_of? CExpression
@lexpr = CExpression.new(nil, nil, @lexpr, @type)
end
when :'+=', :'-=', :'*=', :'/=', :'&=', :'|=', :'^=', :'%='
if @rexpr.type != @lexpr.type
@rexpr = CExpression.new(nil, nil, @rexpr, @rexpr.type) if not @rexpr.kind_of? CExpression
@rexpr = CExpression.new(nil, nil, @rexpr, @type)
end
end
@lexpr = CExpression.precompile_inner(compiler, scope, @lexpr)
@rexpr = CExpression.precompile_inner(compiler, scope, @rexpr)
if @op == :'&' and not @lexpr
rr = @rexpr
rr = rr.rexpr while rr.kind_of? CExpression and not rr.op
if rr.kind_of? CExpression and rr.op == :'*' and not rr.lexpr
@lexpr = nil
@op = nil
@rexpr = rr.rexpr
return precompile_inner(compiler, scope)
elsif rr != @rexpr
@rexpr = rr
return precompile_inner(compiler, scope)
end
end
CExpression.precompile_type(compiler, scope, self)
isnumeric = lambda { |e_| e_.kind_of?(::Numeric) or (e_.kind_of? CExpression and
not e_.lexpr and not e_.op and e_.rexpr.kind_of? ::Numeric) }
# calc numeric
# XXX do not simplify operations involving variables (for type overflow etc)
if isnumeric[@rexpr] and (not @lexpr or isnumeric[@lexpr]) and (val = reduce(compiler)).kind_of? ::Numeric
@lexpr = nil
@op = nil
@rexpr = val
end
self
end
end
end
end
end
|
# This file is part of Metasm, the Ruby assembly manipulation suite
# Copyright (C) 2006-2009 Yoann GUILLOT
#
# Licence is LGPL, see LICENCE in the top-level directory
require 'metasm/main'
require 'metasm/decode'
require 'metasm/parse_c'
module Metasm
class C::Variable; attr_accessor :stackoff; end
class C::Block; attr_accessor :decompdata; end
class DecodedFunction; attr_accessor :decompdata; end
class CPU
def decompile_check_abi(dcmp, entry, func)
end
def get_fwdemu_binding(di)
fdi = di.backtrace_binding ||= get_backtrace_binding(di)
# find self-updated regs & revert them in simultaneous affectations
# XXX handles only a <- a+i for now, this covers all useful cases (except imul eax, eax, 42 jz foobar)
fdi.keys.grep(::Symbol).each { |s|
val = Expression[fdi[s]]
next if val.lexpr != s or (val.op != :+ and val.op != :-) #or not val.rexpr.kind_of? ::Integer
fwd = { s => val }
inv = { s => val.dup }
inv[s].op = ((inv[s].op == :+) ? :- : :+)
nxt = {}
fdi.each { |k, v|
if k == s
nxt[k] = v
else
k = k.bind(fwd).reduce_rec if k.kind_of? Indirection
nxt[k] = Expression[Expression[v].bind(inv).reduce_rec]
end
}
fdi = nxt
}
fdi
end
end
class Decompiler
# TODO add methods to C::CExpr
AssignOp = [:'=', :'+=', :'-=', :'*=', :'/=', :'%=', :'^=', :'&=', :'|=', :'>>=', :'<<=', :'++', :'--']
attr_accessor :dasm, :c_parser
attr_accessor :forbid_optimize_dataflow, :forbid_optimize_code, :forbid_decompile_ifwhile, :forbid_decompile_types, :forbid_optimize_labels
# recursive flag: for each subfunction, recurse is decremented, when 0 only the prototype is decompiled, when <0 nothing is done
attr_accessor :recurse
def initialize(dasm, cp = dasm.c_parser)
@dasm = dasm
@recurse = 1/0.0 # Infinity
@c_parser = cp || @dasm.cpu.new_cparser
end
# decompile recursively function from an entrypoint, then perform global optimisation (static vars, ...)
# should be called once after everything is decompiled (global optimizations may bring bad results otherwise)
# use decompile_func for incremental decompilation
# returns the c_parser
def decompile(*entry)
entry.each { |f| decompile_func(f) }
finalize
@c_parser
end
# decompile a function, decompiling subfunctions as needed
# may return :restart, which means that the decompilation should restart from the entrypoint (and bubble up) (eg a new codepath is found which may changes dependency in blocks etc)
def decompile_func(entry)
return if @recurse < 0
entry = @dasm.normalize entry
return if not @dasm.decoded[entry]
# create a new toplevel function to hold our code
func = C::Variable.new
func.name = @dasm.auto_label_at(entry, 'func')
if f = @dasm.function[entry] and f.decompdata and f.decompdata[:return_type]
rettype = f.decompdata[:return_type]
else
rettype = C::BaseType.new(:int)
end
func.type = C::Function.new rettype, []
if @c_parser.toplevel.symbol[func.name]
return if @recurse == 0
if not @c_parser.toplevel.statements.grep(C::Declaration).find { |decl| decl.var.name == func.name }
# recursive dependency: declare prototype
puts "function #{func.name} is recursive: predecompiling for prototype" if $VERBOSE
pre_recurse = @recurse
@recurse = 0
decompile_func(entry)
@recurse = pre_recurse
if not dcl = @c_parser.toplevel.statements.grep(C::Declaration).find { |decl| decl.var.name == func.name }
@c_parser.toplevel.statements << C::Declaration.new(func)
end
end
return
end
@c_parser.toplevel.symbol[func.name] = func
puts "decompiling #{func.name}" if $VERBOSE
while catch(:restart) { do_decompile_func(entry, func) } == :restart
retval = :restart
end
@c_parser.toplevel.symbol[func.name] = func # recursive func prototype could have overwritten us
@c_parser.toplevel.statements << C::Declaration.new(func)
puts " decompiled #{func.name}" if $VERBOSE
retval
end
# calls decompile_func with recurse -= 1 (internal use)
def decompile_func_rec(entry)
@recurse -= 1
decompile_func(entry)
ensure
@recurse += 1
end
def do_decompile_func(entry, func)
# find decodedinstruction graph of the function, decompile subfuncs
myblocks = listblocks_func(entry)
# [esp+8] => [:frameptr-12]
makestackvars entry, myblocks.map { |b, to| @dasm.decoded[b].block }
# find registry dependencies between blocks
deps = @dasm.cpu.decompile_func_finddeps(self, myblocks, func)
scope = func.initializer = C::Block.new(@c_parser.toplevel)
if df = @dasm.function[entry]
scope.decompdata = df.decompdata ||= {:stackoff_type => {}, :stackoff_name => {}}
else
scope.decompdata ||= {:stackoff_type => {}, :stackoff_name => {}}
end
# di blocks => raw c statements, declare variables
@dasm.cpu.decompile_blocks(self, myblocks, deps, func)
simplify_goto(scope)
namestackvars(scope)
unalias_vars(scope, func)
decompile_c_types(scope)
optimize(scope)
remove_unreferenced_vars(scope)
cleanup_var_decl(scope, func)
if @recurse > 0
decompile_controlseq(scope)
optimize_vars(scope)
optimize_ctrl(scope)
optimize_vars(scope)
remove_unreferenced_vars(scope)
simplify_varname_noalias(scope)
rename_variables(scope)
end
@dasm.cpu.decompile_check_abi(self, entry, func)
case ret = scope.statements.last
when C::CExpression; puts "no return at end of func" if $VERBOSE
when C::Return
if not ret.value
scope.statements.pop
else
v = ret.value
v = v.rexpr if v.kind_of? C::CExpression and not v.op and (v.rexpr.kind_of? C::CExpression or v.rexpr.kind_of? C::Variable)
func.type.type = v.type
end
end
if @recurse == 0
# we need only the prototype
func.initializer = nil
end
end
def new_global_var(addr, type)
addr = @dasm.normalize(addr)
# TODO check overlap with alreadydefined globals
ptype = type.untypedef.type.untypedef if type.pointer?
if ptype.kind_of? C::Function
name = @dasm.auto_label_at(addr, 'sub', 'xref', 'byte', 'word', 'dword', 'unk')
if @dasm.get_section_at(addr) and @recurse > 0
@dasm.disassemble(addr) if not @dasm.decoded[addr] # TODO disassemble_fast ?
f = @dasm.function[addr] ||= DecodedFunction.new
# TODO detect thunks (__noreturn)
f.decompdata ||= { :stackoff_type => {}, :stackoff_name => {} }
if not s = @c_parser.toplevel.symbol[name] or not s.initializer or not s.type.untypedef.kind_of? C::Function
@c_parser.toplevel.symbol.delete name
@c_parser.toplevel.statements.delete_if { |ts| ts.kind_of? C::Declaration and ts.var.name == name }
aoff = 1
ptype.args.to_a.each { |a|
aoff = (aoff + @c_parser.typesize[:ptr] - 1) / @c_parser.typesize[:ptr] * @c_parser.typesize[:ptr]
f.decompdata[:stackoff_type][aoff] ||= a.type
f.decompdata[:stackoff_name][aoff] ||= a.name if a.name
aoff += sizeof(a) # ary ?
}
decompile_func_rec(addr)
# else redecompile with new prototye ?
end
end
end
name = case (type.pointer? && tsz = sizeof(nil, ptype))
when 1; 'byte'
when 2; 'word'
when 4; 'dword'
else 'unk'
end
name = 'stru' if ptype.kind_of? C::Union
name = @dasm.auto_label_at(addr, name, 'xref', 'byte', 'word', 'dword', 'unk', 'stru')
if not var = @c_parser.toplevel.symbol[name]
var = C::Variable.new
var.name = name
var.type = type.pointer? ? C::Array.new(ptype) : type
@c_parser.toplevel.symbol[var.name] = var
@c_parser.toplevel.statements << C::Declaration.new(var)
end
if ptype.kind_of? C::Union and type.pointer? and s = @dasm.get_section_at(name) and s[0].ptr < s[0].length
# TODO struct init, array, fptrs..
elsif type.pointer? and s = @dasm.get_section_at(name) and s[0].ptr < s[0].length and [1, 2, 4].include? tsz and (not var.type.pointer? or sizeof(var.type.untypedef.type) != sizeof(type.untypedef.type) or not var.initializer)
# TODO do not overlap other statics (but labels may refer to elements of the array...)
data = (0..256).map {
v = s[0].decode_imm("u#{tsz*8}".to_sym, @dasm.cpu.endianness)
v = decompile_cexpr(v, @c_parser.toplevel) if v.kind_of? Expression # relocation
v
}
var.initializer = data.map { |v| C::CExpression[v, C::BaseType.new(:int)] } unless (data - [0]).empty?
if (tsz == 1 or tsz == 2) and eos = data.index(0) and (0..3).all? { |i| data[i] >= 0x20 and data[i] < 0x7f } # printable str
# XXX 0x80 with ruby1.9...
var.initializer = C::CExpression[data[0, eos].pack('C*'), C::Pointer.new(ptype)] rescue nil
end
if var.initializer.kind_of? ::Array and i = var.initializer.first and i.kind_of? C::CExpression and not i.op and i.rexpr.kind_of? C::Variable and
i.rexpr.type.kind_of? C::Function and not @dasm.get_section_at(@dasm.normalize(i.rexpr.name)) # iat_ExternalFunc
i.type = i.rexpr.type
type = var.type = C::Array.new(C::Pointer.new(i.type))
var.initializer = [i]
end
var.initializer = nil if var.initializer.kind_of? ::Array and not type.untypedef.kind_of? C::Array
end
# TODO patch existing references to addr ? (or would they have already triggered new_global_var?)
# return the object to use to replace the raw addr
var
end
# return an array of [address of block start, list of block to]]
# decompile subfunctions
def listblocks_func(entry)
@autofuncs ||= []
blocks = []
entry = dasm.normalize entry
todo = [entry]
while a = todo.pop
next if blocks.find { |aa, at| aa == a }
next if not di = @dasm.decoded[a]
next if not di.kind_of? DecodedInstruction
blocks << [a, []]
di.block.each_to { |ta, type|
next if type == :indirect
ta = dasm.normalize ta
if type != :subfuncret and not @dasm.function[ta] and
(not @dasm.function[entry] or @autofuncs.include? entry) and
di.block.list.last.opcode.props[:saveip]
# possible noreturn function
# XXX call $+5; pop eax
@autofuncs << ta
@dasm.function[ta] = DecodedFunction.new
puts "autofunc #{Expression[ta]}" if $VERBOSE
end
if @dasm.function[ta] and type != :subfuncret
f = dasm.auto_label_at(ta, 'func')
ta = dasm.normalize($1) if f =~ /^thunk_(.*)/
ret = decompile_func_rec(ta) if (ta != entry or di.block.to_subfuncret)
throw :restart, :restart if ret == :restart
else
@dasm.auto_label_at(ta, 'label') if blocks.find { |aa, at| aa == ta }
blocks.last[1] |= [ta]
todo << ta
end
}
end
blocks
end
# backtraces an expression from addr
# returns an integer, a label name, or an Expression
# XXX '(GetProcAddr("foo"))()' should not decompile to 'foo()'
def backtrace_target(expr, addr)
if n = @dasm.backtrace(expr, addr).first
return expr if n == Expression::Unknown
n = Expression[n].reduce_rec
n = @dasm.get_label_at(n) || n
n = $1 if n.kind_of? ::String and n =~ /^thunk_(.*)/
n
else
expr
end
end
# patches instruction's backtrace_binding to replace things referring to a static stack offset from func start by :frameptr+off
def makestackvars(funcstart, blocks)
blockstart = nil
cache_di = nil
cache = {} # [i_s, e, type] => backtrace
tovar = lambda { |di, e, i_s|
case e
when Expression; Expression[tovar[di, e.lexpr, i_s], e.op, tovar[di, e.rexpr, i_s]].reduce
when Indirection; Indirection[tovar[di, e.target, i_s], e.len]
when :frameptr; e
when ::Symbol
cache.clear if cache_di != di ; cache_di = di
vals = cache[[e, i_s, 0]] ||= @dasm.backtrace(e, di.address, :snapshot_addr => blockstart,
:include_start => i_s, :no_check => true, :terminals => [:frameptr])
# backtrace only to blockstart first
if vals.length == 1 and ee = vals.first and ee.kind_of? Expression and (ee == Expression[:frameptr] or
(ee.lexpr == :frameptr and ee.op == :+ and ee.rexpr.kind_of? ::Integer) or
(not ee.lexpr and ee.op == :+ and ee.rexpr.kind_of? Indirection and eep = ee.rexpr.pointer and
(eep == Expression[:frameptr] or (eep.lexpr == :frameptr and eep.op == :+ and eep.rexpr.kind_of? ::Integer))))
ee
else
# fallback on full run (could restart from blockstart with ee, but may reevaluate addr_binding..
vals = cache[[e, i_s, 1]] ||= @dasm.backtrace(e, di.address, :snapshot_addr => funcstart,
:include_start => i_s, :no_check => true, :terminals => [:frameptr])
if vals.length == 1 and ee = vals.first and (ee.kind_of? Expression and (ee == Expression[:frameptr] or
(ee.lexpr == :frameptr and ee.op == :+ and ee.rexpr.kind_of? ::Integer)))
ee
else e
end
end
else e
end
}
# must not change bt_bindings until everything is backtracked
repl_bind = {} # di => bt_bd
@dasm.cpu.decompile_makestackvars(@dasm, funcstart, blocks) { |block|
block.list.each { |di|
bd = di.backtrace_binding ||= @dasm.cpu.get_backtrace_binding(di)
newbd = repl_bind[di] = {}
bd.each { |k, v|
k = tovar[di, k, true] if k.kind_of? Indirection
next if k == Expression[:frameptr] or (k.kind_of? Expression and k.lexpr == :frameptr and k.op == :+ and k.rexpr.kind_of? ::Integer)
newbd[k] = tovar[di, v, false]
}
}
}
repl_bind.each { |di, bd| di.backtrace_binding = bd }
end
# give a name to a stackoffset (relative to start of func)
# 4 => :arg_0, -8 => :var_4 etc
def stackoff_to_varname(off)
if off >= @c_parser.typesize[:ptr]; 'arg_%X' % ( off-@c_parser.typesize[:ptr]) # 4 => arg_0, 8 => arg_4..
elsif off > 0; 'arg_0%X' % off
elsif off == 0; 'retaddr'
elsif off <= -@dasm.cpu.size/8; 'var_%X' % (-off-@dasm.cpu.size/8) # -4 => var_0, -8 => var_4..
else 'var_0%X' % -off
end
end
# turns an Expression to a CExpression, create+declares needed variables in scope
def decompile_cexpr(e, scope, itype=nil)
case e
when Expression
if e.op == :'=' and e.lexpr.kind_of? ::String and e.lexpr =~ /^dummy_metasm_/
decompile_cexpr(e.rexpr, scope, itype)
elsif e.op == :+ and e.rexpr.kind_of? ::Integer and e.rexpr < 0
decompile_cexpr(Expression[e.lexpr, :-, -e.rexpr], scope, itype)
elsif e.lexpr
a = decompile_cexpr(e.lexpr, scope, itype)
C::CExpression[a, e.op, decompile_cexpr(e.rexpr, scope, itype)]
elsif e.op == :+
decompile_cexpr(e.rexpr, scope, itype)
else
a = decompile_cexpr(e.rexpr, scope, itype)
C::CExpression[e.op, a]
end
when Indirection
itype = C::Pointer.new(C::BaseType.new("__int#{e.len*8}".to_sym))
p = decompile_cexpr(e.target, scope, itype)
p = C::CExpression[[p], itype] if not p.type.kind_of? C::Pointer
C::CExpression[:*, p]
when ::Integer
C::CExpression[e]
when C::CExpression
e
else
name = e.to_s
if not s = scope.symbol_ancestors[name]
s = C::Variable.new
s.type = C::BaseType.new(:__int32)
case e
when ::String # edata relocation (rel.length = size of pointer)
return @c_parser.toplevel.symbol[e] || new_global_var(e, itype || C::BaseType.new(:int))
when ::Symbol; s.storage = :register
else s.type.qualifier = [:volatile]
puts "decompile_cexpr unhandled #{e.inspect}, using #{e.to_s.inspect}" if $VERBOSE
end
s.name = name
scope.symbol[s.name] = s
scope.statements << C::Declaration.new(s)
end
s
end
end
# simplify goto -> goto / goto -> return
def simplify_goto(scope, keepret = false)
if not keepret and scope.statements[-1].kind_of? C::Return and not scope.statements[-2].kind_of? C::Label
scope.statements.insert(-2, C::Label.new("ret_label"))
end
jumpto = {}
walk(scope) { |s|
next if not s.kind_of? C::Block
s.statements.each_with_index { |ss, i|
case ss
when C::Goto, C::Return
while l = s.statements[i -= 1] and l.kind_of? C::Label
jumpto[l.name] = ss
end
end
}
}
simpler = lambda { |s|
case s
when C::Goto
if jumpto[s.target]
r = jumpto[s.target].dup
r.value = C::CExpression[r.value.reduce(@c_parser)] if r.kind_of? C::Return and r.value # deep_dup
r
end
when C::Return
if not keepret and scope.statements[-1].kind_of? C::Return and s.value == scope.statements[-1].value and s != scope.statements[-1]
C::Goto.new(scope.statements[-2].name)
end
end
}
walk(scope) { |s|
case s
when C::Block
s.statements.each_with_index { |ss, i|
if sp = simpler[ss]
ss = s.statements[i] = sp
end
}
when C::If
if sp = simpler[s.bthen]
s.bthen = sp
end
end
}
# remove unreferenced labels
remove_labels(scope)
walk(scope) { |s|
next if not s.kind_of? C::Block
del = false
# remove dead code goto a; goto b; if (0) { z: bla; } => rm goto b
s.statements.delete_if { |st|
case st
when C::Goto, C::Return
olddel = del
del = true
olddel
else
del = false
end
}
# if () { goto x; } x:
s.statements.each_with_index { |ss, i|
if ss.kind_of? C::If
t = ss.bthen
t = t.statements.first if t.kind_of? C::Block
if t.kind_of? C::Goto and s.statements[i+1].kind_of? C::Label and s.statements[i+1].name == t.target
ss.bthen = C::Block.new(scope)
end
end
}
}
remove_labels(scope)
end
# changes ifgoto, goto to while/ifelse..
def decompile_controlseq(scope)
# TODO replace all this crap by a method using the graph representation
scope.statements = decompile_cseq_if(scope.statements, scope)
remove_labels(scope)
scope.statements = decompile_cseq_if(scope.statements, scope)
remove_labels(scope)
# TODO harmonize _if/_while api (if returns a replacement, while patches)
decompile_cseq_while(scope.statements, scope)
decompile_cseq_switch(scope)
end
# optimize if() { a; } to if() a;
def optimize_ctrl(scope)
simplify_goto(scope, true)
# break/continue
# XXX if (foo) while (bar) goto bla; bla: should => break
walk = lambda { |e, brk, cnt|
case e
when C::Block
walk[e.statements, brk, cnt]
e
when ::Array
e.each_with_index { |st, i|
case st
when C::While, C::DoWhile
l1 = e[i+1].name if e[i+1].kind_of? C::Label
l2 = e[i-1].name if e[i-1].kind_of? C::Label
e[i] = walk[st, l1, l2]
else
e[i] = walk[st, brk, cnt]
end
}
e
when C::If
e.bthen = walk[e.bthen, brk, cnt] if e.bthen
e.belse = walk[e.belse, brk, cnt] if e.bthen
e
when C::While, C::DoWhile
e.body = walk[e.body, nil, nil]
e
when C::Goto
if e.target == brk
C::Break.new
elsif e.target == cnt
C::Continue.new
else e
end
else e
end
}
walk[scope, nil, nil]
remove_labels(scope)
# while (1) { a; if(b) { c; return; }; d; } => while (1) { a; if (b) break; d; } c;
while st = scope.statements.last and st.kind_of? C::While and st.test.kind_of? C::CExpression and
not st.test.op and st.test.rexpr == 1 and st.body.kind_of? C::Block
break if not i = st.body.statements.find { |ist|
ist.kind_of? C::If and not ist.belse and ist.bthen.kind_of? C::Block and ist.bthen.statements.last.kind_of? C::Return
}
walk(i.bthen.statements) { |sst| sst.outer = i.bthen.outer if sst.kind_of? C::Block and sst.outer == i.bthen }
scope.statements.concat i.bthen.statements
i.bthen = C::Break.new
end
patch_test = lambda { |ce|
ce = ce.rexpr if ce.kind_of? C::CExpression and ce.op == :'!'
# if (a+1) => if (a != -1)
if ce.kind_of? C::CExpression and (ce.op == :+ or ce.op == :-) and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr.kind_of? ::Integer
ce.rexpr.rexpr = -ce.rexpr.rexpr if ce.op == :+
ce.op = :'!='
end
}
walk(scope) { |ce|
case ce
when C::If
patch_test[ce.test]
if ce.bthen.kind_of? C::Block
case ce.bthen.statements.length
when 1
walk(ce.bthen.statements) { |sst| sst.outer = ce.bthen.outer if sst.kind_of? C::Block and sst.outer == ce.bthen }
ce.bthen = ce.bthen.statements.first
when 0
if not ce.belse and i = ce.bthen.outer.statements.index(ce)
ce.bthen.outer.statements[i] = ce.test # TODO remove sideeffectless parts
end
end
end
if ce.belse.kind_of? C::Block and ce.belse.statements.length == 1
walk(ce.belse.statements) { |sst| sst.outer = ce.belse.outer if sst.kind_of? C::Block and sst.outer == ce.belse }
ce.belse = ce.belse.statements.first
end
when C::While, C::DoWhile
patch_test[ce.test]
if ce.body.kind_of? C::Block
case ce.body.statements.length
when 1
walk(ce.body.statements) { |sst| sst.outer = ce.body.outer if sst.kind_of? C::Block and sst.outer == ce.body }
ce.body = ce.body.statements.first
when 0
if ce.kind_of? C::DoWhile and i = ce.body.outer.statements.index(ce)
ce = ce.body.outer.statements[i] = C::While.new(ce.test, ce.body)
end
ce.body = nil
end
end
end
}
walk(scope) { |ce|
next if not ce.kind_of? C::Block
st = ce.statements
st.length.times { |n|
while st[n].kind_of? C::If and st[n+1].kind_of? C::If and not st[n].belse and not st[n+1].belse and (
(st[n].bthen.kind_of? C::Return and st[n+1].bthen.kind_of? C::Return and st[n].bthen.value == st[n+1].bthen.value) or
(st[n].bthen.kind_of? C::Break and st[n+1].bthen.kind_of? C::Break) or
(st[n].bthen.kind_of? C::Continue and st[n+1].bthen.kind_of? C::Continue))
# if (a) return x; if (b) return x; => if (a || b) return x;
st[n].test = C::CExpression[st[n].test, :'||', st[n+1].test]
st.delete_at(n+1)
end
}
}
end
# ifgoto => ifthen
# ary is an array of statements where we try to find if () {} [else {}]
# recurses to then/else content
def decompile_cseq_if(ary, scope)
return ary if forbid_decompile_ifwhile
# the array of decompiled statements to use as replacement
ret = []
# list of labels appearing in ary
inner_labels = ary.grep(C::Label).map { |l| l.name }
while s = ary.shift
# recurse if it's not the first run
if s.kind_of? C::If
s.bthen.statements = decompile_cseq_if(s.bthen.statements, s.bthen) if s.bthen.kind_of? C::Block
s.belse.statements = decompile_cseq_if(s.belse.statements, s.belse) if s.belse.kind_of? C::Block
end
# if (a) goto x; if (b) goto x; => if (a || b) goto x;
while s.kind_of? C::If and s.bthen.kind_of? C::Goto and not s.belse and ary.first.kind_of? C::If and ary.first.bthen.kind_of? C::Goto and
not ary.first.belse and s.bthen.target == ary.first.bthen.target
s.test = C::CExpression[s.test, :'||', ary.shift.test]
end
# if (a) goto x; b; x: => if (!a) { b; }
if s.kind_of? C::If and s.bthen.kind_of? C::Goto and l = ary.grep(C::Label).find { |l_| l_.name == s.bthen.target }
# if {goto l;} a; l: => if (!) {a;}
s.test = C::CExpression.negate s.test
s.bthen = C::Block.new(scope)
s.bthen.statements = decompile_cseq_if(ary[0..ary.index(l)], s.bthen)
s.bthen.statements.pop # remove l: from bthen, it is in ary (was needed in bthen for inner ifs)
ary[0...ary.index(l)] = []
end
if s.kind_of? C::If and (s.bthen.kind_of? C::Block or s.bthen.kind_of? C::Goto)
s.bthen = C::Block.new(scope, [s.bthen]) if s.bthen.kind_of? C::Goto
bts = s.bthen.statements
# if (a) if (b) { c; } => if (a && b) { c; }
if bts.length == 1 and bts.first.kind_of? C::If and not bts.first.belse
s.test = C::CExpression[s.test, :'&&', bts.first.test]
bts = bts.first.bthen
bts = s.bthen.statements = bts.kind_of?(C::Block) ? bts.statements : [bts]
end
# if (a) { if (b) goto c; d; } c: => if (a && !b) { d; }
if bts.first.kind_of? C::If and l = bts.first.bthen and (l = l.kind_of?(C::Block) ? l.statements.first : l) and l.kind_of? C::Goto and ary[0].kind_of? C::Label and l.target == ary[0].name
s.test = C::CExpression[s.test, :'&&', C::CExpression.negate(bts.first.test)]
if e = bts.shift.belse
bts.unshift e
end
end
# if () { goto a; } a:
if bts.last.kind_of? C::Goto and ary[0].kind_of? C::Label and bts.last.target == ary[0].name
bts.pop
end
# if { a; goto outer; } b; return; => if (!) { b; return; } a; goto outer;
if bts.last.kind_of? C::Goto and not inner_labels.include? bts.last.target and g = ary.find { |ss| ss.kind_of? C::Goto or ss.kind_of? C::Return } and g.kind_of? C::Return
s.test = C::CExpression.negate s.test
ary[0..ary.index(g)], bts[0..-1] = bts, ary[0..ary.index(g)]
end
# if { a; goto l; } b; l: => if {a;} else {b;}
if bts.last.kind_of? C::Goto and l = ary.grep(C::Label).find { |l_| l_.name == bts.last.target }
s.belse = C::Block.new(scope)
s.belse.statements = decompile_cseq_if(ary[0...ary.index(l)], s.belse)
ary[0...ary.index(l)] = []
bts.pop
end
# if { a; l: b; goto any;} c; goto l; => if { a; } else { c; } b; goto any;
if not s.belse and (bts.last.kind_of? C::Goto or bts.last.kind_of? C::Return) and g = ary.grep(C::Goto).first and l = bts.grep(C::Label).find { |l_| l_.name == g.target }
s.belse = C::Block.new(scope)
s.belse.statements = decompile_cseq_if(ary[0...ary.index(g)], s.belse)
ary[0..ary.index(g)], bts[bts.index(l)..-1] = bts[bts.index(l)..-1], []
end
# if { a; b; c; } else { d; b; c; } => if {a;} else {d;} b; c;
if s.belse
bes = s.belse.statements
while not bts.empty?
if bts.last.kind_of? C::Label; ary.unshift bts.pop
elsif bes.last.kind_of? C::Label; ary.unshift bes.pop
elsif bts.last.to_s == bes.last.to_s; ary.unshift bes.pop ; bts.pop
else break
end
end
# if () { a; } else { b; } => if () { a; } else b;
# if () { a; } else {} => if () { a; }
case bes.length
when 0; s.belse = nil
#when 1; s.belse = bes.first
end
end
# if () {} else { a; } => if (!) { a; }
# if () { a; } => if () a;
case bts.length
when 0; s.test, s.bthen, s.belse = C::CExpression.negate(s.test), s.belse, nil if s.belse
#when 1; s.bthen = bts.first # later (allows simpler handling in _while)
end
end
# l1: l2: if () goto l1; goto l2; => if(!) goto l2; goto l1;
if s.kind_of? C::If
ls = s.bthen
ls = ls.statements.last if ls.kind_of? C::Block
if ls.kind_of? C::Goto
if li = inner_labels.index(ls.target)
table = inner_labels
else
table = ary.map { |st| st.name if st.kind_of? C::Label }.compact.reverse
li = table.index(ls.target) || table.length
end
g = ary.find { |ss|
break if ss.kind_of? C::Return
next if not ss.kind_of? C::Goto
table.index(ss.target).to_i > li
}
if g
s.test = C::CExpression.negate s.test
if not s.bthen.kind_of? C::Block
ls = C::Block.new(scope)
ls.statements << s.bthen
s.bthen = ls
end
ary[0..ary.index(g)], s.bthen.statements = s.bthen.statements, decompile_cseq_if(ary[0..ary.index(g)], scope)
end
end
end
ret << s
end
ret
end
def decompile_cseq_while(ary, scope)
return if forbid_decompile_ifwhile
# find the next instruction that is not a label
ni = lambda { |l| ary[ary.index(l)..-1].find { |s| not s.kind_of? C::Label } }
# TODO XXX get rid of #index
finished = false ; while not finished ; finished = true # 1.9 does not support 'retry'
ary.each { |s|
case s
when C::Label
if ss = ni[s] and ss.kind_of? C::If and not ss.belse and ss.bthen.kind_of? C::Block
if ss.bthen.statements.last.kind_of? C::Goto and ss.bthen.statements.last.target == s.name
ss.bthen.statements.pop
if l = ary[ary.index(ss)+1] and l.kind_of? C::Label
ss.bthen.statements.grep(C::If).each { |i|
i.bthen = C::Break.new if i.bthen.kind_of? C::Goto and i.bthen.target == l.name
}
end
ary[ary.index(ss)] = C::While.new(ss.test, ss.bthen)
elsif ss.bthen.statements.last.kind_of? C::Return and g = ary[ary.index(s)+1..-1].reverse.find { |_s| _s.kind_of? C::Goto and _s.target == s.name }
wb = C::Block.new(scope)
wb.statements = decompile_cseq_while(ary[ary.index(ss)+1...ary.index(g)], wb)
w = C::While.new(C::CExpression.negate(ss.test), wb)
ary[ary.index(ss)..ary.index(g)] = [w, *ss.bthen.statements]
finished = false ; break #retry
end
end
if g = ary[ary.index(s)..-1].reverse.find { |_s| _s.kind_of? C::Goto and _s.target == s.name }
wb = C::Block.new(scope)
wb.statements = decompile_cseq_while(ary[ary.index(s)...ary.index(g)], wb)
w = C::While.new(C::CExpression[1], wb)
ary[ary.index(s)..ary.index(g)] = [w]
finished = false ; break #retry
end
if g = ary[ary.index(s)..-1].reverse.find { |_s| _s.kind_of? C::If and not _s.belse and gt = _s.bthen and (gt = gt.kind_of?(C::Block) && gt.statements.length == 1 ? gt.statements.first : gt) and gt.kind_of? C::Goto and gt.target == s.name }
wb = C::Block.new(scope)
wb.statements = decompile_cseq_while(ary[ary.index(s)...ary.index(g)], wb)
w = C::DoWhile.new(g.test, wb)
ary[ary.index(s)..ary.index(g)] = [w]
finished = false ; break #retry
end
when C::If
decompile_cseq_while(s.bthen.statements, s.bthen) if s.bthen.kind_of? C::Block
decompile_cseq_while(s.belse.statements, s.belse) if s.belse.kind_of? C::Block
when C::While, C::DoWhile
decompile_cseq_while(s.body.statements, s.body) if s.body.kind_of? C::Block
end
}
end
ary
end
# TODO
def decompile_cseq_switch(scope)
uncast = lambda { |e| e = e.rexpr while e.kind_of? C::CExpression and not e.op ; e }
walk(scope) { |s|
# XXX pfff...
next if not s.kind_of? C::If
# if (v < 12) return ((void(*)())(tableaddr+4*v))();
t = s.bthen
t = t.statements.first if t.kind_of? C::Block and t.statements.length == 1
next if not t.kind_of? C::Return or not t.respond_to? :from_instr
next if t.from_instr.comment.to_a.include? 'switch'
next if not t.value.kind_of? C::CExpression or t.value.op != :funcall or t.value.rexpr != [] or not t.value.lexpr.kind_of? C::CExpression or t.value.lexpr.op
p = uncast[t.value.lexpr.rexpr]
next if not p.kind_of? C::CExpression or p.op != :* or p.lexpr
p = uncast[p.rexpr]
next if not p.kind_of? C::CExpression or p.op != :+
r, l = uncast[p.rexpr], uncast[p.lexpr]
r, l = l, r if r.kind_of? C::CExpression
next if not r.kind_of? ::Integer or not l.kind_of? C::CExpression or l.op != :* or not l.lexpr
lr, ll = uncast[l.rexpr], uncast[l.lexpr]
lr, ll = ll, lr if not ll.kind_of? ::Integer
next if ll != sizeof(nil, C::Pointer.new(C::BaseType.new(:void)))
base, index = r, lr
if s.test.kind_of? C::CExpression and (s.test.op == :<= or s.test.op == :<) and s.test.lexpr == index and
s.test.rexpr.kind_of? C::CExpression and not s.test.rexpr.op and s.test.rexpr.rexpr.kind_of? ::Integer
t.from_instr.add_comment 'switch'
sup = s.test.rexpr.rexpr
rng = ((s.test.op == :<) ? (0...sup) : (0..sup))
from = t.from_instr.address
rng.map { |i| @dasm.backtrace(Indirection[base+ll*i, ll, from], from, :type => :x, :origin => from, :maxdepth => 0) }
@dasm.disassemble
throw :restart, :restart
end
puts "unhandled switch() at #{t.from_instr}" if $VERBOSE
}
end
# remove unused labels
def remove_labels(scope)
return if forbid_optimize_labels
used = []
walk(scope) { |ss|
used |= [ss.target] if ss.kind_of? C::Goto
}
walk(scope) { |s|
next if not s.kind_of? C::Block
s.statements.delete_if { |l|
l.kind_of? C::Label and not used.include? l.name
}
}
# remove implicit continue; at end of loop
walk(scope) { |s|
next if not s.kind_of? C::While
if s.body.kind_of? C::Block and s.body.statements.last.kind_of? C::Continue
s.body.statements.pop
end
}
end
# checks if expr is a var (var or *&var)
def isvar(ce, var)
if var.stackoff and ce.kind_of? C::CExpression
return unless ce.op == :* and not ce.lexpr
ce = ce.rexpr
ce = ce.rexpr while ce.kind_of? C::CExpression and not ce.op
return unless ce.kind_of? C::CExpression and ce.op == :& and not ce.lexpr
ce = ce.rexpr
end
ce == var
end
# checks if expr reads var
def ce_read(ce_, var)
isvar(ce_, var) or
walk_ce(ce_) { |ce|
case ce.op
when :funcall; break true if isvar(ce.lexpr, var) or ce.rexpr.find { |a| isvar(a, var) }
when :'='; break true if isvar(ce.rexpr, var)
break ce_read(ce.rexpr, var) if isvar(ce.lexpr, var) # *&var = 2
else break true if isvar(ce.lexpr, var) or isvar(ce.rexpr, var)
end
}
end
# checks if expr writes var
def ce_write(ce_, var)
walk_ce(ce_) { |ce|
break true if AssignOp.include?(ce.op) and (isvar(ce.lexpr, var) or
(((ce.op == :'++' or ce.op == :'--') and isvar(ce.rexpr, var))))
}
end
# patches a set of exprs, replacing oldce by newce
def ce_patch(exprs, oldce, newce)
walk_ce(exprs) { |ce|
case ce.op
when :funcall
ce.lexpr = newce if ce.lexpr == oldce
ce.rexpr.each_with_index { |a, i| ce.rexpr[i] = newce if a == oldce }
else
ce.lexpr = newce if ce.lexpr == oldce
ce.rexpr = newce if ce.rexpr == oldce
end
}
end
# duplicate vars per domain value
# eg eax = 1; foo(eax); eax = 2; bar(eax); => eax = 1; foo(eax) eax_1 = 2; bar(eax_1);
# eax = 1; if (bla) eax = 2; foo(eax); => no change
def unalias_vars(scope, func)
g = c_to_graph(scope)
# find the domains of var aliases
scope.symbol.dup.each_value { |var|
next if var.stackoff.to_i > 0 or func.type.args.include? var # __fastcall reg
unalias_var(var, scope, g)
}
end
# duplicates a var per domain value
def unalias_var(var, scope, g = c_to_graph(scope))
# [label, index] of references to var (reading it, writing it, ro/wo it (eg eax = *eax => eax_0 = *eax_1))
read = {}
write = {}
ro = {}
wo = {}
# list of [l, i] for which domain is not known
unchecked = []
# mark all exprs of the graph
g.exprs.each { |label, exprs|
exprs.each_with_index { |ce, i|
if ce_read(ce, var)
if ce.op == :'=' and isvar(ce.lexpr, var) and not ce_write(ce.rexpr, var)
(ro[label] ||= []) << i
(wo[label] ||= []) << i
unchecked << [label, i, :up] << [label, i, :down]
else
(read[label] ||= []) << i
unchecked << [label, i]
end
elsif ce_write(ce, var)
(write[label] ||= []) << i
unchecked << [label, i]
end
}
}
# stuff when filling the domain (flood algorithm)
dom = dom_ro = dom_wo = todo_up = todo_down = nil
# flood by walking the graph up from [l, i] (excluded)
# marks stuff do walk down
walk_up = lambda { |l, i|
todo_w = [[l, i-1]]
done_w = []
while o = todo_w.pop
next if done_w.include? o
done_w << o
l, i = o
loop do
if read[l].to_a.include? i
# XXX not optimal (should mark only the uppest read)
todo_down |= [[l, i]] if not dom.include? [l, i]
dom |= [[l, i]]
elsif write[l].to_a.include? i
todo_down |= [[l, i]] if not dom.include? [l, i]
dom |= [[l, i]]
break
elsif wo[l].to_a.include? i
todo_down |= [[l, i]] if not dom_wo.include? [l, i, :down]
dom_wo |= [[l, i, :down]]
break
end
i -= 1
if i < 0
g.from_optim[l].to_a.each { |ll|
todo_w << [ll, g.exprs[ll].to_a.length-1]
}
break
end
end
end
}
# flood by walking the graph down from [l, i] (excluded)
# malks stuff to walk up
walk_down = lambda { |l, i|
todo_w = [[l, i+1]]
done_w = []
while o = todo_w.pop
next if done_w.include? o
done_w << o
l, i = o
loop do
if read[l].to_a.include? i
todo_up |= [[l, i]] if not dom.include? [l, i]
dom |= [[l, i]]
elsif write[l].to_a.include? i
break
elsif ro[l].to_a.include? i
todo_up |= [[l, i]] if not dom_ro.include? [l, i, :up]
dom_ro |= [[l, i, :up]]
break
end
i += 1
if i >= g.exprs[l].to_a.length
g.to_optim[l].to_a.each { |ll|
todo_w << [ll, 0]
}
break
end
end
end
}
# check it out
while o = unchecked.shift
dom = []
dom_ro = []
dom_wo = []
todo_up = []
todo_down = []
# init
if read[o[0]].to_a.include? o[1]
todo_up << o
todo_down << o
dom << o
elsif write[o[0]].to_a.include? o[1]
todo_down << o
dom << o
elsif o[2] == :up
todo_up << o
dom_ro << o
elsif o[2] == :down
todo_down << o
dom_wo << o
else raise
end
# loop
while todo_up.first or todo_down.first
todo_up.each { |oo| walk_up[oo[0], oo[1]] }
todo_up.clear
todo_down.each { |oo| walk_down[oo[0], oo[1]] }
todo_down.clear
end
# patch
n_i = 0
n_i += 1 while scope.symbol_ancestors[newvarname = "#{var.name}_a#{n_i}"]
nv = var.dup
nv.name = newvarname
scope.statements << C::Declaration.new(nv)
scope.symbol[nv.name] = nv
dom.each { |oo| ce_patch(g.exprs[oo[0]][oo[1]], var, nv) }
dom_ro.each { |oo|
ce = g.exprs[oo[0]][oo[1]]
if ce.rexpr.kind_of? C::CExpression
ce_patch(ce.rexpr, var, nv)
else
ce.rexpr = nv
end
}
dom_wo.each { |oo|
ce = g.exprs[oo[0]][oo[1]]
if ce.lexpr.kind_of? C::CExpression
ce_patch(ce.lexpr, var, nv)
else
ce.lexpr = nv
end
}
unchecked -= dom + dom_wo + dom_ro
end
end
# revert the unaliasing namechange of vars where no alias subsists
def simplify_varname_noalias(scope)
names = scope.symbol.keys
names.delete_if { |k|
next if not b = k[/^(.*)_a\d+$/, 1]
if not names.find { |n| n != k and (n == b or n[/^(.*)_a\d+$/, 1] == b) }
scope.symbol[b] = scope.symbol.delete(k)
scope.symbol[b].name = b
end
}
end
# patch scope to transform :frameoff-x into &var_x
def namestackvars(scope)
off2var = {}
newvar = lambda { |o, n|
if not v = off2var[o]
v = off2var[o] = C::Variable.new
v.type = C::BaseType.new(:void)
v.name = n
v.stackoff = o
scope.symbol[v.name] = v
scope.statements << C::Declaration.new(v)
end
v
}
scope.decompdata[:stackoff_name].each { |o, n| newvar[o, n] }
scope.decompdata[:stackoff_type].each { |o, t| newvar[o, stackoff_to_varname(o)] }
walk_ce(scope) { |e|
next if e.op != :+ and e.op != :-
next if not e.lexpr.kind_of? C::Variable or e.lexpr.name != 'frameptr'
next if not e.rexpr.kind_of? C::CExpression or e.rexpr.op or not e.rexpr.rexpr.kind_of? ::Integer
off = e.rexpr.rexpr
off = -off if e.op == :-
v = newvar[off, stackoff_to_varname(off)]
e.replace C::CExpression[:&, v]
}
end
# assign type to vars (regs, stack & global)
# types are found by subfunction argument types & indirections, and propagated through assignments etc
# TODO when updating the type of a var, update the type of all cexprs where it appears
def decompile_c_types(scope)
return if forbid_decompile_types
# TODO *(int8*)(ptr+8); *(int32*)(ptr+12) => automatic struct
# name => type
types = {}
pscopevar = lambda { |e|
e = e.rexpr while e.kind_of? C::CExpression and not e.op and e.rexpr.kind_of? C::CExpression
if e.kind_of? C::CExpression and e.op == :& and not e.lexpr and e.rexpr.kind_of? C::Variable
e.rexpr.name if scope.symbol[e.rexpr.name]
end
}
scopevar = lambda { |e|
e = e.rexpr if e.kind_of? C::CExpression and not e.op
if e.kind_of? C::Variable and scope.symbol[e.name]
e.name
elsif e.kind_of? C::CExpression and e.op == :* and not e.lexpr
pscopevar[e.rexpr]
end
}
globalvar = lambda { |e|
e = e.rexpr if e.kind_of? C::CExpression and not e.op
if e.kind_of? ::Integer and @dasm.get_section_at(e)
e
elsif e.kind_of? C::Variable and not scope.symbol[e.name] and @c_parser.toplevel.symbol[e.name] and @dasm.get_section_at(e.name)
e.name
end
}
# check if a newly found type for o is better than current type
# order: foo* > void* > foo
better_type = lambda { |t0, t1|
t1 == C::BaseType.new(:void) or (t0.pointer? and t1.kind_of? C::BaseType) or t0.untypedef.kind_of? C::Union or
(t0.kind_of? C::BaseType and t1.kind_of? C::BaseType and (@c_parser.typesize[t0.name] > @c_parser.typesize[t1.name] or (t0.name == t1.name and t0.qualifier))) or
(t0.pointer? and t1.pointer? and better_type[t0.untypedef.type, t1.untypedef.type])
}
update_global_type = lambda { |e, t|
if ne = new_global_var(e, t)
ne.type = t if better_type[t, ne.type] # TODO patch existing scopes using ne
# TODO rename (dword_xx -> byte_xx etc)
e = scope.symbol_ancestors[e] || e if e.kind_of? String # exe reloc
walk_ce(scope) { |ce|
ce.lexpr = ne if ce.lexpr == e
ce.rexpr = ne if ce.rexpr == e
if ce.op == :* and not ce.lexpr and ce.rexpr == ne and ne.type.pointer? and ne.type.untypedef.type.untypedef.kind_of? C::Union
# *struct -> struct->bla
ce.rexpr = structoffset(ne.type.untypedef.type.untypedef, ce.rexpr, 0, sizeof(ce.type))
elsif ce.lexpr == ne or ce.rexpr == ne
# set ce type according to l/r
# TODO set ce.parent type etc
ce.type = C::CExpression[ce.lexpr, ce.op, ce.rexpr].type
end
}
end
}
propagate_type = nil # fwd declaration
propagating = [] # recursion guard (x = &x)
# check if need to change the type of a var
# propagate_type if type is updated
update_type = lambda { |n, t|
next if propagating.include? n
o = scope.symbol[n].stackoff
next if not o and t.untypedef.kind_of? C::Union
next if o and scope.decompdata[:stackoff_type][o] and t != scope.decompdata[:stackoff_type][o]
next if t0 = types[n] and not better_type[t, t0]
next if o and (t.integral? or t.pointer?) and o % sizeof(t) != 0 # keep vars aligned
types[n] = t
next if t == t0
propagating << n
propagate_type[n, t]
propagating.delete n
next if not o
t = t.untypedef
if t.kind_of? C::Struct
t.members.to_a.each { |m|
mo = t.offsetof(@c_parser, m.name)
next if mo == 0
scope.symbol.each { |vn, vv|
update_type[vn, m.type] if vv.stackoff == o+mo
}
}
end
}
# try to update the type of a var from knowing the type of an expr (through dereferences etc)
known_type = lambda { |e, t|
loop do
e = e.rexpr while e.kind_of? C::CExpression and not e.op and e.type == t
if o = scopevar[e]
update_type[o, t]
elsif o = globalvar[e]
update_global_type[o, t]
elsif not e.kind_of? C::CExpression
elsif o = pscopevar[e] and t.pointer?
update_type[o, t.untypedef.type]
elsif e.op == :* and not e.lexpr
e = e.rexpr
t = C::Pointer.new(t)
next
elsif t.pointer? and e.op == :+ and e.lexpr.kind_of? C::CExpression and e.lexpr.type.integral? and e.rexpr.kind_of? C::Variable
e.lexpr, e.rexpr = e.rexpr, e.lexpr
next
elsif e.op == :+ and e.lexpr and e.rexpr.kind_of? C::CExpression
if not e.rexpr.op and e.rexpr.rexpr.kind_of? ::Integer
if t.pointer? and e.rexpr.rexpr < 0x1000 and (e.rexpr.rexpr % sizeof(t.untypedef.type)) == 0 # XXX relocatable + base=0..
e = e.lexpr # (int)*(x+2) === (int) *x
next
elsif globalvar[e.rexpr.rexpr]
known_type[e.lexpr, C::BaseType.new(:int)]
e = e.rexpr
next
end
elsif t.pointer? and (e.lexpr.kind_of? C::CExpression and e.lexpr.lexpr and [:<<, :>>, :*, :&].include? e.lexpr.op) or
(o = scopevar[e.lexpr] and types[o] and types[o].integral? and
!(o = scopevar[e.rexpr] and types[o] and types[o].integral?))
e.lexpr, e.rexpr = e.rexpr, e.lexpr # swap
e = e.lexpr
next
elsif t.pointer? and ((e.rexpr.kind_of? C::CExpression and e.rexpr.lexpr and [:<<, :>>, :*, :&].include? e.rexpr.op) or
(o = scopevar[e.rexpr] and types[o] and types[o].integral? and
!(o = scopevar[e.lexpr] and types[o] and types[o].integral?)))
e = e.lexpr
next
end
end
break
end
}
# we found a type for a var, propagate it through affectations
propagate_type = lambda { |var, type|
walk_ce(scope) { |ce|
next if ce.op != :'='
if ce.lexpr.kind_of? C::Variable and ce.lexpr.name == var
known_type[ce.rexpr, type]
next
end
if ce.rexpr.kind_of? C::Variable and ce.rexpr.name == var
known_type[ce.lexpr, type]
next
end
# int **x; y = **x => int y
t = type
l = ce.lexpr
while l.kind_of? C::CExpression and l.op == :* and not l.lexpr
if var == pscopevar[l.rexpr]
known_type[ce.rexpr, t]
break
elsif t.pointer?
l = l.rexpr
t = t.untypedef.type
else break
end
end
# int **x; **x = y => int y
t = type
r = ce.rexpr
while r.kind_of? C::CExpression and r.op == :* and not r.lexpr
if var == pscopevar[r.rexpr]
known_type[ce.lexpr, t]
break
elsif t.pointer?
r = r.rexpr
t = t.untypedef.type
else break
end
end
# TODO int *x; *x = *y; ?
}
}
# put all those macros in use
# use user-defined types first
scope.symbol.each_value { |v|
next if not v.kind_of? C::Variable or not v.stackoff or not t = scope.decompdata[:stackoff_type][v.stackoff]
known_type[v, t]
}
# try to infer types from C semantics
later = []
walk_ce(scope) { |ce|
if ce.op == :'=' and ce.rexpr.kind_of? C::CExpression and ce.rexpr.op == nil and ce.rexpr.rexpr.kind_of? ::Integer and
ce.rexpr.rexpr.abs < 0x10000 and (not ce.lexpr.kind_of? C::CExpression or ce.lexpr.op != :'*' or ce.lexpr.lexpr)
# var = int
known_type[ce.lexpr, ce.rexpr.type]
elsif ce.op == :funcall
f = ce.lexpr.type
f = f.untypedef.type if f.pointer?
next if not f.kind_of? C::Function
# cast func args to arg prototypes
f.args.to_a.zip(ce.rexpr).each_with_index { |(proto, arg), i| ce.rexpr[i] = C::CExpression[arg, proto.type] ; known_type[arg, proto.type] }
elsif ce.op == :* and not ce.lexpr
if e = ce.rexpr and e.kind_of? C::CExpression and not e.op and e = e.rexpr and e.kind_of? C::CExpression and
e.op == :& and not e.lexpr and e.rexpr.kind_of? C::Variable and e.rexpr.stackoff
# skip *(__int32*)&var_12 for now, avoid saying var12 is an int if it may be a ptr or anything
later << [ce.rexpr, C::Pointer.new(ce.type)]
next
end
known_type[ce.rexpr, C::Pointer.new(ce.type)]
elsif not ce.op and ce.type.pointer? and ce.type.untypedef.type.kind_of? C::Function
# cast to fptr: must be a fptr
known_type[ce.rexpr, ce.type]
end
}
later.each { |ce, t| known_type[ce, t] }
# offsets have types now
types.each { |v, t|
# keep var type qualifiers
q = scope.symbol[v].type.qualifier
scope.symbol[v].type = t
t.qualifier = q if q
}
# remove offsets to struct members
# XXX this defeats antialiasing
# off => [structoff, membername, membertype]
memb = {}
types.dup.each { |n, t|
v = scope.symbol[n]
next if not o = v.stackoff
t = t.untypedef
if t.kind_of? C::Struct
t.members.to_a.each { |tm|
moff = t.offsetof(@c_parser, tm.name)
next if moff == 0
types.delete_if { |vv, tt| scope.symbol[vv].stackoff == o+moff }
memb[o+moff] = [v, tm.name, tm.type]
}
end
}
# patch local variables into the CExprs, incl unknown offsets
varat = lambda { |n|
v = scope.symbol[n]
if s = memb[v.stackoff]
v = C::CExpression[s[0], :'.', s[1], s[2]]
else
v.type = types[n] || C::BaseType.new(:int)
end
v
}
maycast = lambda { |v, e|
if sizeof(v) != sizeof(e)
v = C::CExpression[:*, [[:&, v], C::Pointer.new(e.type)]]
end
v
}
maycast_p = lambda { |v, e|
if not e.type.pointer? or sizeof(v) != sizeof(nil, e.type.untypedef.type)
C::CExpression[[:&, v], e.type]
else
C::CExpression[:&, v]
end
}
walk_ce(scope, true) { |ce|
case
when ce.op == :funcall
ce.rexpr.map! { |re|
if o = scopevar[re]; C::CExpression[maycast[varat[o], re]]
elsif o = pscopevar[re]; C::CExpression[maycast_p[varat[o], re]]
else re
end
}
when o = scopevar[ce.lexpr]; ce.lexpr = maycast[varat[o], ce.lexpr]
when o = scopevar[ce.rexpr]; ce.rexpr = maycast[varat[o], ce.rexpr]
ce.rexpr = C::CExpression[ce.rexpr] if not ce.op and ce.rexpr.kind_of? C::Variable
when o = pscopevar[ce.lexpr]; ce.lexpr = maycast_p[varat[o], ce.lexpr]
when o = pscopevar[ce.rexpr]; ce.rexpr = maycast_p[varat[o], ce.rexpr]
when o = scopevar[ce]; ce.replace C::CExpression[maycast[varat[o], ce]]
when o = pscopevar[ce]; ce.replace C::CExpression[maycast_p[varat[o], ce]]
end
}
fix_type_overlap(scope)
fix_pointer_arithmetic(scope)
# if int32 var_4 is always var_4 & 255, change type to int8
varuse = Hash.new(0)
varandff = Hash.new(0)
varandffff = Hash.new(0)
walk_ce(scope) { |ce|
if ce.op == :& and ce.lexpr.kind_of? C::Variable and ce.lexpr.type.integral? and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr.kind_of? ::Integer
case ce.rexpr.rexpr
when 0xff; varandff[ce.lexpr.name] += 1
when 0xffff; varandffff[ce.lexpr.name] += 1
end
end
varuse[ce.lexpr.name] += 1 if ce.lexpr.kind_of? C::Variable
varuse[ce.rexpr.name] += 1 if ce.rexpr.kind_of? C::Variable
}
varandff.each { |k, v|
scope.symbol[k].type = C::BaseType.new(:__int8, :unsigned) if varuse[k] == v
}
varandffff.each { |k, v|
scope.symbol[k].type = C::BaseType.new(:__int16, :unsigned) if varuse[k] == v
}
# propagate types to cexprs
walk_ce(scope, true) { |ce|
if ce.op
ce.type = C::CExpression[ce.lexpr, ce.op, ce.rexpr].type rescue next
if ce.op == :'=' and ce.rexpr and ce.rexpr.type != ce.type and (not ce.rexpr.type.integral? or not ce.type.integral?)
ce.rexpr = C::CExpression[[ce.rexpr], ce.type]
end
end
}
end
# struct foo { int i; int j; struct { int k; int l; } m; }; bla+12 => &bla->m.l
# st is a struct, ptr is an expr pointing to a struct, off is a numeric offset from ptr, msz is the size of the pointed member (nil ignored)
def structoffset(st, ptr, off, msz)
tabidx = off / sizeof(st)
off -= tabidx * sizeof(st)
ptr = C::CExpression[[ptr], C::Pointer.new(st)] if ptr.type.untypedef.type.untypedef != st
ptr = C::CExpression[:&, [ptr, :'[]', [tabidx]]] if tabidx != 0 or ptr.type.untypedef.kind_of? C::Array
m_ptr = lambda { |m|
if ptr.kind_of? C::CExpression and ptr.op == :& and not ptr.lexpr
C::CExpression[ptr.rexpr, :'.', m.name]
else
C::CExpression[ptr, :'->', m.name]
end
}
submemb = lambda { |sm| sm.name ? sm : sm.type.kind_of?(C::Union) ? sm.type.members.to_a.map { |ssm| submemb[ssm] } : nil }
mbs = st.members.to_a.map { |m| submemb[m] }.flatten.compact
mo = mbs.inject({}) { |h, m| h.update m => st.offsetof(@c_parser, m.name) }
if sm = mbs.find { |m| mo[m] == off and (not msz or sizeof(m) == msz) } ||
mbs.find { |m| mo[m] <= off and mo[m]+sizeof(m) > off }
off -= mo[sm]
sst = sm.type.untypedef
return ptr if mo[sm] == 0 and sst.pointer? and sst.type.untypedef == st # TODO fix infinite recursion on mutually recursive ptrs
ptr = C::CExpression[:&, m_ptr[sm]]
if sst.pointer? and sst.type.untypedef.kind_of? C::Union
structoffset(sst.type.untypedef, ptr, off, msz)
elsif off != 0
C::CExpression[[ptr, C::Pointer.new(C::BaseType.new(:__int8))], :+, [off]]
else
ptr
end
elsif off != 0
C::CExpression[[[ptr], C::Pointer.new(C::BaseType.new(:__int8))], :+, [off]]
else
ptr
end
end
# fix pointer arithmetic (eg int foo += 4 => int* foo += 1)
# use struct member access (eg *(structptr+8) => structptr->bla)
# must be run only once, right after type setting
def fix_pointer_arithmetic(scope)
walk_ce(scope, true) { |ce|
if ce.lexpr and ce.lexpr.type.pointer? and [:&, :>>, :<<].include? ce.op
ce.lexpr = C::CExpression[[ce.lexpr], C::BaseType.new(:int)]
end
if ce.op == :+ and ce.lexpr and ce.lexpr.type.integral? and ce.rexpr.type.pointer?
ce.rexpr, ce.lexpr = ce.lexpr, ce.rexpr
end
if ce.op == :* and not ce.lexpr and ce.rexpr.type.pointer? and ce.rexpr.type.untypedef.type.untypedef.kind_of? C::Struct
s = ce.rexpr.type.untypedef.type.untypedef
m = s.members.to_a.find { |m_| s.offsetof(@c_parser, m_.name) == 0 }
if sizeof(m) != sizeof(ce)
ce.rexpr = C::CExpression[[ce.rexpr, C::Pointer.new(s)], C::Pointer.new(ce.type)]
next
end
# *structptr => structptr->member
ce.lexpr = ce.rexpr
ce.op = :'->'
ce.rexpr = m.name
ce.type = m.type
next
elsif ce.op == :'=' and ce.lexpr.type.untypedef.kind_of? C::Struct
s = ce.lexpr.type.untypedef
m = s.members.to_a.find { |m_| s.offsetof(@c_parser, m_.name) == 0 }
ce.lexpr = C::CExpression.new(ce.lexpr, :'.', m.name, m.type)
ce.type = m.type
next
end
if ce.op == :+ and ce.lexpr and ce.lexpr.type.pointer? and not ce.type.pointer?
ce.type = ce.lexpr.type
end
if ce.op == :& and not ce.lexpr and ce.rexpr.kind_of? C::CExpression and ce.rexpr.op == :* and not ce.rexpr.lexpr
ce.replace C::CExpression[ce.rexpr.rexpr]
end
next if not ce.lexpr or not ce.lexpr.type.pointer?
if ce.op == :+ and (s = ce.lexpr.type.untypedef.type.untypedef).kind_of? C::Union and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and
ce.rexpr.rexpr.kind_of? ::Integer and o = ce.rexpr.rexpr
# structptr + 4 => &structptr->member
ce.replace structoffset(s, ce.lexpr, o, nil)
elsif [:+, :-, :'+=', :'-='].include? ce.op and ce.rexpr.kind_of? C::CExpression and ((not ce.rexpr.op and i = ce.rexpr.rexpr) or
(ce.rexpr.op == :* and i = ce.rexpr.lexpr and ((i.kind_of? C::CExpression and not i.op and i = i.rexpr) or true))) and
i.kind_of? ::Integer and psz = sizeof(nil, ce.lexpr.type.untypedef.type) and i % psz == 0
# ptr += 4 => ptr += 1
if not ce.rexpr.op
ce.rexpr.rexpr /= psz
else
ce.rexpr.lexpr.rexpr /= psz
if ce.rexpr.lexpr.rexpr == 1
ce.rexpr = ce.rexpr.rexpr
end
end
ce.type = ce.lexpr.type
elsif (ce.op == :+ or ce.op == :-) and sizeof(nil, ce.lexpr.type.untypedef.type) != 1
# ptr+x => (ptrtype*)(((__int8*)ptr)+x)
# XXX create struct ?
ce.rexpr = C::CExpression[ce.rexpr, C::BaseType.new(:int)] if not ce.rexpr.type.integral?
if sizeof(nil, ce.lexpr.type.untypedef.type) != 1
ptype = ce.lexpr.type
p = C::CExpression[[ce.lexpr], C::Pointer.new(C::BaseType.new(:__int8))]
ce.replace C::CExpression[[p, ce.op, ce.rexpr, p.type], ptype]
end
end
}
end
# handling of var overlapping (eg __int32 var_10; __int8 var_F => replace all var_F by *(&var_10 + 1))
# must be done before fix_pointer_arithmetic
def fix_type_overlap(scope)
varinfo = {}
scope.symbol.each_value { |var|
next if not off = var.stackoff
len = sizeof(var)
varinfo[var] = [off, len]
}
varinfo.each { |v1, (o1, l1)|
next if not v1.type.integral?
varinfo.each { |v2, (o2, l2)|
# XXX o1 may overlap o2 AND another (int32 v_10; int32 v_E; int32 v_C;)
# TODO should check stuff with aliasing domains
next if v1.name == v2.name or o1 >= o2+l2 or o1+l1 <= o2 or l1 > l2 or (l2 == l1 and o2 >= o1)
# v1 => *(&v2+delta)
p = C::CExpression[:&, v2]
p = C::CExpression[p, :+, [o1-o2]]
p = C::CExpression[p, C::Pointer.new(v1.type)] if v1.type != p.type.type
p = C::CExpression[:*, p]
walk_ce(scope) { |ce|
ce.lexpr = p if ce.lexpr == v1
ce.rexpr = p if ce.rexpr == v1
}
}
}
end
# to be run with scope = function body with only CExpr/Decl/Label/Goto/IfGoto/Return, with correct variables types
# will transform += 1 to ++, inline them to prev/next statement ('++x; if (x)..' => 'if (++x)..')
# remove useless variables ('int i;', i never used or 'i = 1; j = i;', i never read after => 'j = 1;')
# remove useless casts ('(int)i' with 'int i;' => 'i')
def optimize(scope)
optimize_code(scope)
optimize_vars(scope)
optimize_vars(scope) # 1st run may transform i = i+1 into i++ which second run may coalesce into if(i)
end
# simplify cexpressions (char & 255, redundant casts, etc)
def optimize_code(scope)
return if forbid_optimize_code
sametype = lambda { |t1, t2|
t1 = t1.untypedef
t2 = t2.untypedef
t1 == t2 or
(t1.kind_of? C::BaseType and t1.integral? and t2.kind_of? C::BaseType and t2.integral? and sizeof(nil, t1) == sizeof(nil, t2)) or
(t1.pointer? and t2.pointer? and sametype[t1.type, t2.type])
}
# most of this is a CExpr#reduce
future_array = []
walk_ce(scope, true) { |ce|
# *&bla => bla if types ok
if ce.op == :* and not ce.lexpr and ce.rexpr.kind_of? C::CExpression and ce.rexpr.op == :& and not ce.rexpr.lexpr and sametype[ce.rexpr.type.untypedef.type, ce.rexpr.rexpr.type]
ce.replace C::CExpression[ce.rexpr.rexpr]
end
# int x + 0xffffffff -> x-1
if ce.lexpr and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and [:+, :-, :'+=', :'-=', :'!=', :==, :>, :<, :>=, :<=].include? ce.op and
ce.rexpr.rexpr == (1 << (8*sizeof(ce.lexpr)))-1
ce.op = {:+ => :-, :- => :+, :'+=' => :'-=', :'-=' => :'+='}[ce.op]
ce.rexpr.rexpr = 1
end
# int *ptr; *(ptr + 4) => ptr[4]
if ce.op == :* and not ce.lexpr and ce.rexpr.kind_of? C::CExpression and ce.rexpr.op == :+ and var = ce.rexpr.lexpr and var.kind_of? C::Variable and var.type.pointer?
ce.lexpr, ce.op, ce.rexpr = ce.rexpr.lexpr, :'[]', ce.rexpr.rexpr
future_array << var.name
end
# char x; x & 255 => x
if ce.op == :& and ce.lexpr and (ce.lexpr.type.integral? or ce.lexpr.type.pointer?) and ce.rexpr.kind_of? C::CExpression and
not ce.rexpr.op and ce.rexpr.rexpr.kind_of? ::Integer and m = (1 << (8*sizeof(ce.lexpr))) - 1 and
ce.rexpr.rexpr & m == m
ce.replace C::CExpression[ce.lexpr]
end
# a + -b => a - b
if ce.op == :+ and ce.lexpr and ce.rexpr.kind_of? C::CExpression and ce.rexpr.op == :- and not ce.rexpr.lexpr
ce.op, ce.rexpr = :-, ce.rexpr.rexpr
end
# (((int) i >> 31) & 1) => i < 0
if ce.op == :& and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr == 1 and
ce.lexpr.kind_of? C::CExpression and ce.lexpr.op == :>> and ce.lexpr.rexpr.kind_of? C::CExpression and
not ce.lexpr.rexpr.op and ce.lexpr.rexpr.rexpr == sizeof(ce.lexpr.lexpr) * 8 - 1
ce.replace C::CExpression[ce.lexpr.lexpr, :<, [0]]
end
# a-b == 0 => a == b
if ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr == 0 and [:==, :'!=', :<, :>, :<=, :>=].include? ce.op and
ce.lexpr.kind_of? C::CExpression and ce.lexpr.op == :- and ce.lexpr.lexpr
ce.lexpr, ce.rexpr = ce.lexpr.lexpr, ce.lexpr.rexpr
end
# (a > 0) != 0
if ce.op == :'!=' and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr == 0 and ce.lexpr.kind_of? C::CExpression and
[:<, :<=, :>, :>=, :'==', :'!=', :'!'].include? ce.lexpr.op
ce.replace ce.lexpr
end
# (a < b) != ( [(a < 0) == !(b < 0)] && [(a < 0) != (a < b)] ) => jl
# a<b => true if !r => a<0 == b<0 or a>=0 => a>=0 or b>=0
# a>=b => true if r => a<0 == b>=0 and a<0 => a<0 and b>=0
# x != (a && (b != x)) => [x && (!a || b)] || [!x && !(!a || b)]
if ce.op == :'!=' and ce.lexpr.kind_of? C::CExpression and ce.lexpr.op == :< and ce.rexpr.kind_of? C::CExpression and
ce.rexpr.op == :'&&' and ce.rexpr.rexpr.kind_of? C::CExpression and ce.rexpr.rexpr.op == :'!=' and
ce.rexpr.rexpr.rexpr == ce.lexpr and not walk_ce(ce) { |ce_| break true if ce_.op == :funcall }
x, a, b = ce.lexpr, ce.rexpr.lexpr, ce.rexpr.rexpr.lexpr
ce.replace C::CExpression[ [x, :'&&', [[:'!',a],:'||',b]] , :'||', [[:'!', x], :'&&', [:'!', [[:'!',a],:'||',b]]] ]
optimize_code(ce)
end
# (a != b) || a => a || b
if ce.op == :'||' and ce.lexpr.kind_of? C::CExpression and ce.lexpr.op == :'!=' and ce.lexpr.lexpr == ce.rexpr and not walk_ce(ce) { |ce_| break true if ce_.op == :funcall }
ce.lexpr, ce.rexpr = ce.rexpr, ce.lexpr.rexpr
optimize_code(ce)
end
# (a<b) && !(a>=0 && b<0) || (a>=b) && (a>=0 && b<0) => (signed)a < (signed)b
if ce.op == :'||' and ce.lexpr.kind_of? C::CExpression and ce.rexpr.kind_of? C::CExpression and ce.lexpr.op == :'&&' and ce.rexpr.op == :'&&' and
ce.lexpr.lexpr.kind_of? C::CExpression and ce.lexpr.lexpr.op == :<
a, b = ce.lexpr.lexpr.lexpr, ce.lexpr.lexpr.rexpr
if ce.lexpr.rexpr === C::CExpression[[a, :'>=', [0]], :'&&', [b, :'<', [0]]].negate and
ce.rexpr.lexpr === ce.lexpr.lexpr.negate and ce.rexpr.rexpr === ce.lexpr.rexpr.negate
ce.replace C::CExpression[a, :'<', b]
end
end
# (a < b) | (a == b) => a <= b
if ce.op == :| and ce.rexpr.kind_of? C::CExpression and ce.rexpr.op == :== and ce.lexpr.kind_of? C::CExpression and
(ce.lexpr.op == :< or ce.lexpr.op == :>) and ce.lexpr.lexpr == ce.rexpr.lexpr and ce.lexpr.rexpr == ce.rexpr.rexpr
ce.op = {:< => :<=, :> => :>=}[ce.lexpr.op]
ce.lexpr, ce.rexpr = ce.lexpr.lexpr, ce.lexpr.rexpr
end
# a == 0 => !a
if ce.op == :== and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr == 0
ce.lexpr, ce.op, ce.rexpr = nil, :'!', ce.lexpr
end
# !(bool) => bool
if ce.op == :'!' and ce.rexpr.kind_of? C::CExpression and [:'==', :'!=', :<, :>, :<=, :>=, :'||', :'&&', :'!'].include? ce.rexpr.op
ce.replace ce.rexpr.negate
end
# (foo)(bar)x => (foo)x
if not ce.op and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr.kind_of? C::CExpression
ce.rexpr = ce.rexpr.rexpr
end
# (foo)bla => bla if bla of type foo
if not ce.op and ce.rexpr.kind_of? C::CExpression and sametype[ce.type, ce.rexpr.type]
ce.lexpr, ce.op, ce.rexpr = ce.rexpr.lexpr, ce.rexpr.op, ce.rexpr.rexpr
end
if ce.lexpr.kind_of? C::CExpression and not ce.lexpr.op and ce.lexpr.rexpr.kind_of? C::Variable and ce.lexpr.type == ce.lexpr.rexpr.type
ce.lexpr = ce.lexpr.rexpr
end
# &struct.1stmember => &struct
if ce.op == :& and not ce.lexpr and ce.rexpr.kind_of? C::CExpression and ce.rexpr.op == :'.' and s = ce.rexpr.lexpr.type and
s.kind_of? C::Struct and s.offsetof(@c_parser, ce.rexpr.rexpr) == 0
ce.rexpr = ce.rexpr.lexpr
ce.type = C::Pointer.new(ce.rexpr.type)
end
# (1stmember*)structptr => &structptr->1stmember
if not ce.op and ce.type.pointer? and (ce.rexpr.kind_of? C::CExpression or ce.rexpr.kind_of? C::Variable) and ce.rexpr.type.pointer? and
s = ce.rexpr.type.untypedef.type.untypedef and s.kind_of? C::Union and ce.type.untypedef.type.untypedef != s
ce.replace C::CExpression[structoffset(s, ce.rexpr, 0, sizeof(ce.type.untypedef.type))]
end
# (&foo)->bar => foo.bar
if ce.op == :'->' and ce.lexpr.kind_of? C::CExpression and ce.lexpr.op == :& and not ce.lexpr.lexpr
ce.lexpr = ce.lexpr.rexpr
ce.op = :'.'
end
}
# if there is a ptr[4], change all *ptr to ptr[0] for consistency
# do this after the first pass, which may change &*ptr to ptr
walk_ce(scope) { |ce|
if ce.op == :* and not ce.lexpr and ce.rexpr.kind_of? C::Variable and future_array.include? ce.rexpr.name
ce.lexpr, ce.op, ce.rexpr = ce.rexpr, :'[]', C::CExpression[0]
end
} if not future_array.empty?
# if (x != 0) => if (x)
walk(scope) { |st|
if st.kind_of? C::If and st.test.kind_of? C::CExpression and st.test.op == :'!=' and
st.test.rexpr.kind_of? C::CExpression and not st.test.rexpr.op and st.test.rexpr.rexpr == 0
st.test = C::CExpression[st.test.lexpr]
end
}
end
# checks if an expr has sideeffects (funcall, var assignment, mem dereference, use var out of scope if specified)
def sideeffect(exp, scope=nil)
case exp
when nil, ::Numeric, ::String; false
when ::Array; exp.any? { |_e| sideeffect _e, scope }
when C::Variable; (scope and not scope.symbol[exp.name]) or exp.type.qualifier.to_a.include? :volatile
when C::CExpression; (exp.op == :* and not exp.lexpr) or exp.op == :funcall or AssignOp.include?(exp.op) or
sideeffect(exp.lexpr, scope) or sideeffect(exp.rexpr, scope)
else true # failsafe
end
end
# converts C code to a graph of cexprs (nodes = cexprs, edges = codepaths)
# returns a CGraph
class CGraph
# exprs: label => [exprs], to: label => [labels], block: label => are exprs standalone (vs If#test), start: 1st label
attr_accessor :exprs, :to, :block, :start, :to_optim, :from_optim
end
def c_to_graph(st)
g = CGraph.new
g.exprs = {} # label => [exprs]
g.to = {} # label => [labels]
g.block = {} # label => is label in a block? (vs If#test)
anon_label = 0 # when no label is there, use anon_label++
# converts C code to a graph of codepath of cexprs
to_graph = lambda { |stmt, l_cur, l_after, l_cont, l_break|
case stmt
when C::Label; g.to[l_cur] = [stmt.name] ; g.to[stmt.name] = [l_after]
when C::Goto; g.to[l_cur] = [stmt.target]
when C::Continue; g.to[l_cur] = [l_cont]
when C::Break; g.to[l_cur] = [l_break]
when C::CExpression
g.exprs[l_cur] = [stmt]
g.to[l_cur] = [l_after]
when C::Return
g.exprs[l_cur] = [stmt.value] if stmt.value
g.to[l_cur] = []
when C::Block
to_graph[stmt.statements, l_cur, l_after, l_cont, l_break]
when ::Array
g.exprs[l_cur] = []
g.block[l_cur] = true
stmt.each_with_index { |s, i|
case s
when C::Declaration
when C::CExpression
g.exprs[l_cur] << s
else
l = anon_label += 1
ll = anon_label += 1
g.to[l_cur] = [l]
g.block[l_cur] = true
to_graph[stmt[i], l, ll, l_cont, l_break]
l_cur = ll
g.exprs[l_cur] = []
end
}
g.to[l_cur] = [l_after].compact
when C::If
g.exprs[l_cur] = [stmt.test]
lt = anon_label += 1
to_graph[stmt.bthen, lt, l_after, l_cont, l_break]
le = anon_label += 1
to_graph[stmt.belse, le, l_after, l_cont, l_break]
g.to[l_cur] = [lt, le]
when C::While, C::DoWhile
la = anon_label += 1
if stmt.kind_of? C::DoWhile
lt, lb = la, l_cur
else
lt, lb = l_cur, la
end
g.exprs[lt] = [stmt.test]
g.to[lt] = [lb, l_after]
to_graph[stmt.body, lb, lt, lt, l_after]
when C::Asm, nil; g.to[l_cur] = [l_after]
else puts "to_graph unhandled #{stmt.class}: #{stmt}" if $VERBOSE
end
}
g.start = anon_label
to_graph[st, g.start, nil, nil, nil]
# optimize graph
g.to_optim = {}
g.to.each { |k, v| g.to_optim[k] = v.uniq }
g.exprs.delete_if { |k, v| v == [] }
g.to_optim.delete_if { |k, v|
if v.length == 1 and not g.exprs[k] and v != [k]
g.to_optim.each_value { |t| if i = t.index(k) ; t[i] = v.first ; end }
true
elsif v.length == 0 and not g.exprs[k]
g.to_optim.each_value { |t| t.delete k }
true
end
}
g.from_optim = {}
g.to_optim.each { |k, v| v.each { |t| (g.from_optim[t] ||= []) << k } }
g
end
# dataflow optimization
# condenses expressions (++x; if (x) => if (++x))
# remove local var assignment (x = 1; f(x); x = 2; g(x); => f(1); g(2); etc)
def optimize_vars(scope)
return if forbid_optimize_dataflow
g = c_to_graph(scope)
# walks a cexpr in evaluation order (not strictly, but this is not strictly defined anyway..)
# returns the first subexpr to read var in ce
# returns :write if var is rewritten
# returns nil if var not read
# may return a cexpr var += 2
find_next_read_ce = lambda { |ce_, var|
walk_ce(ce_, true) { |ce|
case ce.op
when :funcall
break ce if ce.lexpr == var or ce.rexpr.find { |a| a == var }
when :'='
# a=a / a=a+1 => yield a, not :write
break ce if ce.rexpr == var
break :write if ce.lexpr == var
else
break ce if ce.lexpr == var or ce.rexpr == var
end
}
}
# badlabels is a list of labels that may be reached without passing through the first invocation block
find_next_read_rec = lambda { |label, idx, var, done, badlabels|
next if done.include? label
done << label if idx == 0
idx += 1 while ce = g.exprs[label].to_a[idx] and not ret = find_next_read_ce[ce, var]
next ret if ret
to = g.to_optim[label].to_a.map { |t|
break [:split] if badlabels.include? t
find_next_read_rec[t, 0, var, done, badlabels]
}.compact
tw = to - [:write]
if to.include? :split or tw.length > 1
:split
elsif tw.length == 1
tw.first
elsif to.include? :write
:write
end
}
# return the previous subexpr reading var with no fwd path to another reading (otherwise split), see loop comment for reason
find_next_read = nil
find_prev_read_rec = lambda { |label, idx, var, done|
next if done.include? label
done << label if idx == g.exprs[label].length-1
idx -= 1 while idx >= 0 and ce = g.exprs[label].to_a[idx] and not ret = find_next_read_ce[ce, var]
if ret.kind_of? C::CExpression
fwchk = find_next_read[label, idx+1, var]
ret = fwchk if not fwchk.kind_of? C::CExpression
end
next ret if ret
from = g.from_optim[label].to_a.map { |f|
find_prev_read_rec[f, g.exprs[f].to_a.length-1, var, done]
}.compact
next :split if from.include? :split
fw = from - [:write]
if fw.length == 1
fw.first
elsif fw.length > 1
:split
elsif from.include? :write
:write
end
}
# list of labels reachable without using a label
badlab = {}
build_badlabel = lambda { |label|
next if badlab[label]
badlab[label] = []
todo = [g.start]
while l = todo.pop
next if l == label or badlab[label].include? l
badlab[label] << l
todo.concat g.to_optim[l].to_a
end
}
# returns the next subexpr where var is read
# returns :write if var is written before being read
# returns :split if the codepath splits with both subpath reading or codepath merges with another
# returns nil if var is never read
# idx is the index of the first cexpr at g.exprs[label] to look at
find_next_read = lambda { |label, idx, var|
find_next_read_rec[label, idx, var, [], []]
}
find_prev_read = lambda { |label, idx, var|
find_prev_read_rec[label, idx, var, []]
}
# same as find_next_read, but returns :split if there exist a path from g.start to the read without passing through label
find_next_read_bl = lambda { |label, idx, var|
build_badlabel[label]
find_next_read_rec[label, idx, var, [], badlab[label]]
}
# walk each node, optimize data accesses there
# replace no longer useful exprs with CExpr[nil, nil, nil], those are wiped later.
g.exprs.each { |label, exprs|
next if not g.block[label]
i = 0
while i < exprs.length
e = exprs[i]
i += 1
# TODO x = x + 1 => x += 1 => ++x here, move all other optimizations after (in optim_code)
# needs also int & 0xffffffff -> int, *&var etc (decomp_type? optim_type?)
if (e.op == :'++' or e.op == :'--') and v = (e.lexpr || e.rexpr) and v.kind_of? C::Variable and
scope.symbol[v.name] and not v.type.qualifier.to_a.include? :volatile
next if !(pos = :post and oe = find_next_read_bl[label, i, v] and oe.kind_of? C::CExpression) and
!(pos = :prev and oe = find_prev_read[label, i-2, v] and oe.kind_of? C::CExpression)
next if oe.op == :& and not oe.lexpr # no &(++eax)
# merge pre/postincrement into next/prev var usage
# find_prev_read must fwd check when it finds something, to avoid
# while(x) x++; return x; to be converted to while(x++); return x; (return wrong value)
case oe.op
when e.op
# bla(i--); --i bla(--i); --i ++i; bla(i++) => ignore
next if pos == :pre or oe.lexpr
# ++i; bla(++i) => bla(i += 2)
oe.lexpr = oe.rexpr
oe.op = ((oe.op == :'++') ? :'+=' : :'-=')
oe.rexpr = C::CExpression[2]
when :'++', :'--' # opposite of e.op
if (pos == :post and not oe.lexpr) or (pos == :pre and not oe.rexpr)
# ++i; bla(--i) => bla(i)
# bla(i--); ++i => bla(i)
oe.op = nil
elsif pos == :post
# ++i; bla(i--) => bla(i+1)
oe.op = ((oe.op == :'++') ? :- : :+)
oe.rexpr = C::CExpression[1]
elsif pos == :pre
# bla(--i); ++i => bla(i-1)
oe.lexpr = oe.rexpr
oe.op = ((oe.op == :'++') ? :+ : :-)
oe.rexpr = C::CExpression[1]
end
when :'+=', :'-='
# TODO i++; i += 4 => i += 5
next
when *AssignOp
next # ++i; i |= 4 => ignore
else
if pos == :post and v == oe.lexpr; oe.lexpr = C::CExpression[e.op, v]
elsif pos == :post and v == oe.rexpr; oe.rexpr = C::CExpression[e.op, v]
elsif pos == :prev and v == oe.rexpr; oe.rexpr = C::CExpression[v, e.op]
elsif pos == :prev and v == oe.lexpr; oe.lexpr = C::CExpression[v, e.op]
else raise 'foobar' # find_dir_read failed
end
end
i -= 1
exprs.delete_at(i)
e.lexpr = e.op = e.rexpr = nil
elsif e.op == :'=' and v = e.lexpr and v.kind_of? C::Variable and scope.symbol[v.name] and
not v.type.qualifier.to_a.include? :volatile and not find_next_read_ce[e.rexpr, v]
# reduce trivial static assignments
if (e.rexpr.kind_of? C::CExpression and iv = e.rexpr.reduce(@c_parser) and iv.kind_of? ::Integer) or
(e.rexpr.kind_of? C::CExpression and e.rexpr.op == :& and not e.rexpr.lexpr) or
(e.rexpr.kind_of? C::Variable and e.rexpr.type.kind_of? C::Array)
rewritten = false
readers = []
discard = [e]
g.exprs.each { |l, el|
el.each_with_index { |ce, ci|
if ce_write(ce, v) and [label, i-1] != [l, ci]
if ce == e
discard << ce
else
rewritten = true
break
end
elsif ce_read(ce, v)
if walk_ce(ce) { |_ce| break true if _ce.op == :& and not _ce.lexpr and _ce.rexpr == v }
# i = 2 ; j = &i =!> j = &2
rewritten = true
break
end
readers << ce
end
} if not rewritten
}
if not rewritten
ce_patch(readers, v, C::CExpression[iv || e.rexpr])
discard.each { |d| d.lexpr = d.op = d.rexpr = nil }
next
end
end
case nr = find_next_read[label, i, v]
when C::CExpression
# read in one place only, try to patch rexpr in there
r = e.rexpr
# must check for conflicts (x = y; y += 1; foo(x) =!> foo(y))
# XXX x = a[1]; *(a+1) = 28; foo(x)...
isfunc = false
depend_vars = []
walk_ce(C::CExpression[r]) { |ce|
isfunc = true if ce.op == :func and (not ce.lexpr.kind_of? C::Variable or
not ce.lexpr.has_attribute('pure')) # XXX is there a C attr for func depending only on staticvars+param ?
depend_vars << ce.lexpr if ce.lexpr.kind_of? C::Variable
depend_vars << ce.rexpr if ce.rexpr.kind_of? C::Variable and (ce.lexpr or ce.op != :&) # a = &v; v = 12; func(a) => func(&v)
depend_vars << ce if ce.lvalue?
depend_vars.concat(ce.rexpr.grep(C::Variable)) if ce.rexpr.kind_of? ::Array
}
depend_vars.uniq!
# XXX x = 1; if () { x = 2; } foo(x) =!> foo(1) (find_next_read will return this)
# we'll just redo a find_next_read like
# XXX b = &a; a = 1; *b = 2; foo(a) unhandled & generate bad C
l_l = label
l_i = i
while g.exprs[l_l].to_a.each_with_index { |ce_, n_i|
next if n_i < l_i
# count occurences of read v in ce_
cnt = 0
bad = false
walk_ce(ce_) { |ce|
case ce.op
when :funcall
bad = true if isfunc
ce.rexpr.each { |a| cnt += 1 if a == v }
cnt += 1 if ce.lexpr == v
when :'='
bad = true if depend_vars.include? ce.lexpr
cnt += 1 if ce.rexpr == v
else
bad = true if (ce.op == :'++' or ce.op == :'--') and depend_vars.include? ce.rexpr
bad = true if AssignOp.include? ce.op and depend_vars.include? ce.lexpr
cnt += 1 if ce.lexpr == v
cnt += 1 if ce.rexpr == v
end
}
case cnt
when 0
break if bad
next
when 1 # good
break if e.complexity > 10 and ce_.complexity > 3 # try to keep the C readable
# x = 1; y = x; z = x; => cannot suppress x
nr = find_next_read[l_l, n_i+1, v]
break if (nr.kind_of? C::CExpression or nr == :split) and not walk_ce(ce_) { |ce| break true if ce.op == :'=' and ce.lexpr == v }
else break # a = 1; b = a + a => fail
end
# TODO XXX x = 1; y = x; z = x;
res = walk_ce(ce_, true) { |ce|
case ce.op
when :funcall
if ce.rexpr.to_a.each_with_index { |a,i_|
next if a != v
ce.rexpr[i_] = r
break :done
} == :done
break :done
elsif ce.lexpr == v
ce.lexpr = r
break :done
elsif isfunc
break :fail
end
when *AssignOp
break :fail if not ce.lexpr and depend_vars.include? ce.rexpr # ++depend
if ce.rexpr == v
ce.rexpr = r
break :done
elsif ce.lexpr == v or depend_vars.include? ce.lexpr
break :fail
end
else
break :fail if ce.op == :& and not ce.lexpr and ce.rexpr == v
if ce.lexpr == v
ce.lexpr = r
break :done
elsif ce.rexpr == v
ce_.type = r.type if not ce_.op and ce_.rexpr == v # return (int32)eax
ce.rexpr = r
break :done
end
end
}
case res
when :done
i -= 1
exprs.delete_at(i)
e.lexpr = e.op = e.rexpr = nil
break
when :fail
break
end
}
# ignore branches that will never reuse v
may_to = g.to_optim[l_l].find_all { |to| find_next_read[to, 0, v].kind_of? C::CExpression }
if may_to.length == 1 and to = may_to.first and to != l_l and g.from_optim[to] == [l_l]
l_i = 0
l_l = to
else break
end
end
when nil, :write
# useless assignment (value never read later)
# XXX foo = &bar; bar = 12; baz(*foo)
e.replace(C::CExpression[e.rexpr])
# remove sideeffectless subexprs
loop do
case e.op
when :funcall, *AssignOp
else
l = (e.lexpr.kind_of? C::CExpression and sideeffect(e.lexpr))
r = (e.rexpr.kind_of? C::CExpression and sideeffect(e.rexpr))
if l and r # could split...
elsif l
e.replace(e.lexpr)
next
elsif r
e.replace(e.rexpr)
next
else # remove the assignment altogether
i -= 1
exprs.delete_at(i)
e.lexpr = e.op = e.rexpr = nil
end
end
break
end
end
end
end
}
# wipe cexprs marked in the previous step
walk(scope) { |st|
next if not st.kind_of? C::Block
st.statements.delete_if { |e| e.kind_of? C::CExpression and not e.lexpr and not e.op and not e.rexpr }
}
# reoptimize cexprs
walk_ce(scope, true) { |ce|
# redo some simplification that may become available after variable propagation
# int8 & 255 => int8
if ce.op == :& and ce.lexpr and ce.lexpr.type.integral? and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr == (1 << (8*sizeof(ce.lexpr))) - 1
ce.replace C::CExpression[ce.lexpr]
end
# int *ptr; *(ptr + 4) => ptr[4]
if ce.op == :* and not ce.lexpr and ce.rexpr.kind_of? C::CExpression and ce.rexpr.op == :+ and var = ce.rexpr.lexpr and var.kind_of? C::Variable and var.type.pointer?
ce.lexpr, ce.op, ce.rexpr = ce.rexpr.lexpr, :'[]', ce.rexpr.rexpr
end
# useless casts
if not ce.op and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and (ce.rexpr.rexpr.kind_of? C::CExpression or
(ce.type.pointer? and ce.rexpr.rexpr == 0))
ce.rexpr = ce.rexpr.rexpr
end
if not ce.op and ce.rexpr.kind_of? C::CExpression and (ce.type == ce.rexpr.type or (ce.type.integral? and ce.rexpr.type.integral?))
ce.replace ce.rexpr
end
# useless casts (type)*((oeua)Ptype)
if not ce.op and ce.rexpr.kind_of? C::CExpression and ce.rexpr.op == :* and not ce.rexpr.lexpr and ce.rexpr.rexpr.kind_of? C::CExpression and not ce.rexpr.rexpr.op and
p = ce.rexpr.rexpr.rexpr and (p.kind_of? C::CExpression or p.kind_of? C::Variable) and p.type.pointer? and ce.type == p.type.untypedef.type
ce.op = ce.rexpr.op
ce.rexpr = ce.rexpr.rexpr.rexpr
end
# (a > 0) != 0
if ce.op == :'!=' and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr == 0 and ce.lexpr.kind_of? C::CExpression and
[:<, :<=, :>, :>=, :'==', :'!=', :'!'].include? ce.lexpr.op
ce.replace ce.lexpr
end
# a == 0 => !a
if ce.op == :== and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr == 0
ce.replace C::CExpression[:'!', ce.lexpr]
end
# !(int)a => !a
if ce.op == :'!' and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr.kind_of? C::CExpression
ce.rexpr = ce.rexpr.rexpr
end
# (int)a < (int)b => a < b TODO uint <-> int
if [:<, :<=, :>, :>=].include? ce.op and ce.rexpr.kind_of? C::CExpression and ce.lexpr.kind_of? C::CExpression and not ce.rexpr.op and not ce.lexpr.op and
ce.rexpr.rexpr.kind_of? C::CExpression and ce.rexpr.rexpr.type.pointer? and ce.lexpr.rexpr.kind_of? C::CExpression and ce.lexpr.rexpr.type.pointer?
ce.rexpr = ce.rexpr.rexpr
ce.lexpr = ce.lexpr.rexpr
end
# a & 3 & 1
while (ce.op == :& or ce.op == :|) and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr.kind_of? ::Integer and
ce.lexpr.kind_of? C::CExpression and ce.lexpr.op == ce.op and ce.lexpr.lexpr and
ce.lexpr.rexpr.kind_of? C::CExpression and ce.lexpr.rexpr.rexpr.kind_of? ::Integer
ce.lexpr, ce.rexpr.rexpr = ce.lexpr.lexpr, ce.lexpr.rexpr.rexpr.send(ce.op, ce.rexpr.rexpr)
end
# x = x | 4 => x |= 4
if ce.op == :'=' and ce.rexpr.kind_of? C::CExpression and [:+, :-, :*, :/, :|, :&, :^, :>>, :<<].include? ce.rexpr.op and ce.rexpr.lexpr == ce.lexpr
ce.op = (ce.rexpr.op.to_s + '=').to_sym
ce.rexpr = ce.rexpr.rexpr
end
# x += 1 => ++x
if (ce.op == :'+=' or ce.op == :'-=') and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr == 1
ce.lexpr, ce.op, ce.rexpr = nil, {:'+=' => :'++', :'-=' => :'--'}[ce.op], ce.lexpr
end
# --x+1 => x--
if (ce.op == :+ or ce.op == :-) and ce.lexpr.kind_of? C::CExpression and ce.lexpr.op == {:+ => :'--', :- => :'++'}[ce.op] and
ce.lexpr.rexpr and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr == 1
ce.lexpr, ce.op, ce.rexpr = ce.lexpr.rexpr, ce.lexpr.op, nil
end
}
end
def remove_unreferenced_vars(scope)
used = {}
walk_ce(scope) { |ce|
# remove unreferenced local vars
used[ce.rexpr.name] = true if ce.rexpr.kind_of? C::Variable
used[ce.lexpr.name] = true if ce.lexpr.kind_of? C::Variable
ce.rexpr.each { |v| used[v.name] = true if v.kind_of? C::Variable } if ce.rexpr.kind_of?(::Array)
}
unused = scope.symbol.keys.find_all { |n| not used[n] }
unused.each { |v| scope.symbol[v].add_attribute 'unused' } # fastcall args need it
scope.statements.delete_if { |sm| sm.kind_of? C::Declaration and unused.include? sm.var.name }
scope.symbol.delete_if { |n, v| unused.include? n }
end
def finalize
optimize_global
true
end
def optimize_global
# check all global vars (pointers to global data)
tl = @c_parser.toplevel
vars = tl.symbol.keys.find_all { |k| tl.symbol[k].kind_of? C::Variable and not tl.symbol[k].type.kind_of? C::Function }
countref = Hash.new(0)
walk_ce(tl) { |ce|
# XXX int foo; void bar() { int foo; } => false negative
countref[ce.rexpr.name] += 1 if ce.rexpr.kind_of? C::Variable
countref[ce.lexpr.name] += 1 if ce.lexpr.kind_of? C::Variable
}
vars.delete_if { |v| countref[v] == 0 }
countref.delete_if { |k, v| not vars.include? k }
# by default globals are C::Arrays
# if all references are *foo, dereference the var type
# TODO allow foo to appear (change to &foo) (but still disallow casts/foo+12 etc)
countderef = Hash.new(0)
walk_ce(tl) { |ce|
if ce.op == :* and not ce.lexpr
r = ce.rexpr
elsif ce.op == :'->'
r = C::CExpression[ce.lexpr]
else next
end
# compare type.type cause var is an Array and the cast is a Pointer
countderef[r.rexpr.name] += 1 if r.kind_of? C::CExpression and not r.op and r.rexpr.kind_of? C::Variable and
sizeof(nil, r.type.type) == sizeof(nil, r.rexpr.type.type) rescue nil
}
vars.each { |n|
if countref[n] == countderef[n]
v = tl.symbol[n]
target = C::CExpression[:*, [v]]
v.type = v.type.type
v.initializer = v.initializer.first if v.initializer.kind_of? ::Array
walk_ce(tl) { |ce|
if ce.op == :'->' and C::CExpression[ce.lexpr] == C::CExpression[v]
ce.op = :'.'
elsif ce.lexpr == target
ce.lexpr = v
end
ce.rexpr = v if ce.rexpr == target
ce.lexpr, ce.op, ce.rexpr = nil, nil, v if ce == target
}
end
}
# if a global var appears only in one function, make it a static variable
tl.statements.each { |st|
next if not st.kind_of? C::Declaration or not st.var.type.kind_of? C::Function or not scope = st.var.initializer
localcountref = Hash.new(0)
walk_ce(scope) { |ce|
localcountref[ce.rexpr.name] += 1 if ce.rexpr.kind_of? C::Variable
localcountref[ce.lexpr.name] += 1 if ce.lexpr.kind_of? C::Variable
}
vars.delete_if { |n|
next if scope.symbol[n]
next if localcountref[n] != countref[n]
v = tl.symbol.delete(n)
tl.statements.delete_if { |d| d.kind_of? C::Declaration and d.var.name == n }
if countref[n] == 1 and v.initializer.kind_of? C::CExpression and v.initializer.rexpr.kind_of? String
walk_ce(scope) { |ce|
if ce.rexpr.kind_of? C::Variable and ce.rexpr.name == n
if not ce.op
ce.replace v.initializer
else
ce.rexpr = v.initializer
end
elsif ce.lexpr.kind_of? C::Variable and ce.lexpr.name == n
ce.lexpr = v.initializer
end
}
else
v.storage = :static
scope.symbol[v.name] = v
scope.statements.unshift C::Declaration.new(v)
end
true
}
}
end
# reorder statements to put decl first, move assignments to decl, move args to func prototype
def cleanup_var_decl(scope, func)
args = func.type.args
decl = []
scope.statements.delete_if { |sm|
next if not sm.kind_of? C::Declaration
if sm.var.stackoff.to_i > 0
args << sm.var
else
decl << sm
end
true
}
# move trivial affectations to initialiser
# XXX a = 1 ; b = a ; a = 2
go = true # break from delete_if does not delete..
scope.statements.delete_if { |st|
if go and st.kind_of? C::CExpression and st.op == :'=' and st.rexpr.kind_of? C::CExpression and not st.rexpr.op and
st.rexpr.rexpr.kind_of? ::Integer and st.lexpr.kind_of? C::Variable and scope.symbol[st.lexpr.name]
st.lexpr.initializer = st.rexpr
else
go = false
end
}
# reorder declarations
scope.statements[0, 0] = decl.sort_by { |sm| [-sm.var.stackoff.to_i, sm.var.name] }
# ensure arglist has no hole (create&add unreferenced args)
func.type.args = []
argoff = @c_parser.typesize[:ptr]
args.sort_by { |sm| sm.stackoff.to_i }.each { |a|
# XXX misalignment ?
if not curoff = a.stackoff
func.type.args << a # __fastcall
next
end
while curoff > argoff
wantarg = C::Variable.new
wantarg.name = scope.decompdata[:stackoff_name][argoff] || stackoff_to_varname(argoff)
wantarg.type = C::BaseType.new(:int)
wantarg.attributes = ['unused']
func.type.args << wantarg
scope.symbol[wantarg.name] = wantarg
argoff += @c_parser.typesize[:ptr]
end
func.type.args << a
argoff += @c_parser.typesize[:ptr]
}
end
# rename local variables from subfunc arg names
def rename_variables(scope)
funcs = []
cntrs = []
cmpi = []
walk_ce(scope) { |ce|
funcs << ce if ce.op == :funcall
cntrs << (ce.lexpr || ce.rexpr) if ce.op == :'++'
cmpi << ce.lexpr if [:<, :>, :<=, :>=, :==, :'!='].include? ce.op and ce.rexpr.kind_of? C::CExpression and ce.rexpr.rexpr.kind_of? ::Integer
}
rename = lambda { |var, name|
var = var.rexpr if var.kind_of? C::CExpression and not var.op
next if not var.kind_of? C::Variable or not scope.symbol[var.name] or not name
next if (var.name !~ /^(var|arg)_/ and not var.storage == :register) or not scope.symbol[var.name] or name =~ /^(var|arg)_/
s = scope.symbol_ancestors
n = name
i = 0
n = name + "#{i+=1}" while s[n]
scope.symbol[n] = scope.symbol.delete(var.name)
var.name = n
}
funcs.each { |ce|
next if not ce.lexpr.kind_of? C::Variable or not ce.lexpr.type.kind_of? C::Function
ce.rexpr.to_a.zip(ce.lexpr.type.args.to_a).each { |a, fa| rename[a, fa.name] if fa }
}
funcs.each { |ce|
next if not ce.lexpr.kind_of? C::Variable or not ce.lexpr.type.kind_of? C::Function
ce.rexpr.to_a.zip(ce.lexpr.type.args.to_a).each { |a, fa|
next if not a.kind_of? C::CExpression or a.op != :& or a.lexpr
next if not fa or not fa.name
rename[a.rexpr, fa.name.sub(/^l?p/, '')]
}
}
(cntrs & cmpi).each { |v| rename[v, 'cntr'] }
end
# yield each CExpr member (recursive, allows arrays, order: self(!post), lexpr, rexpr, self(post))
# if given a non-CExpr, walks it until it finds a CExpr to yield
def walk_ce(ce, post=false, &b)
case ce
when C::CExpression
yield ce if not post
walk_ce(ce.lexpr, post, &b)
walk_ce(ce.rexpr, post, &b)
yield ce if post
when ::Array
ce.each { |ce_| walk_ce(ce_, post, &b) }
when C::Statement
case ce
when C::Block; walk_ce(ce.statements, post, &b)
when C::If
walk_ce(ce.test, post, &b)
walk_ce(ce.bthen, post, &b)
walk_ce(ce.belse, post, &b) if ce.belse
when C::While, C::DoWhile
walk_ce(ce.test, post, &b)
walk_ce(ce.body, post, &b)
when C::Return
walk_ce(ce.value, post, &b) if ce.value
end
when C::Declaration
walk_ce(ce.var.initializer, post, &b) if ce.var.initializer
end
nil
end
# yields each statement (recursive)
def walk(scope, post=false, &b)
case scope
when ::Array; scope.each { |s| walk(s, post, &b) }
when C::Statement
yield scope if not post
case scope
when C::Block; walk(scope.statements, post, &b)
when C::If
yield scope.test
walk(scope.bthen, post, &b)
walk(scope.belse, post, &b) if scope.belse
when C::While, C::DoWhile
yield scope.test
walk(scope.body, post, &b)
when C::Return
yield scope.value
end
yield scope if post
when C::Declaration
walk(scope.var.initializer, post, &b) if scope.var.initializer
end
end
# forwards to @c_parser, handles cast to Array (these should not happen btw...)
def sizeof(var, type=nil)
var, type = nil, var if var.kind_of? C::Type and not type
type ||= var.type
return @c_parser.typesize[:ptr] if type.kind_of? C::Array and not var.kind_of? C::Variable
@c_parser.sizeof(var, type) rescue -1
end
end
end
dcmp: aoenthu
# This file is part of Metasm, the Ruby assembly manipulation suite
# Copyright (C) 2006-2009 Yoann GUILLOT
#
# Licence is LGPL, see LICENCE in the top-level directory
require 'metasm/main'
require 'metasm/decode'
require 'metasm/parse_c'
module Metasm
class C::Variable; attr_accessor :stackoff; end
class C::Block; attr_accessor :decompdata; end
class DecodedFunction; attr_accessor :decompdata; end
class CPU
def decompile_check_abi(dcmp, entry, func)
end
def get_fwdemu_binding(di)
fdi = di.backtrace_binding ||= get_backtrace_binding(di)
# find self-updated regs & revert them in simultaneous affectations
# XXX handles only a <- a+i for now, this covers all useful cases (except imul eax, eax, 42 jz foobar)
fdi.keys.grep(::Symbol).each { |s|
val = Expression[fdi[s]]
next if val.lexpr != s or (val.op != :+ and val.op != :-) #or not val.rexpr.kind_of? ::Integer
fwd = { s => val }
inv = { s => val.dup }
inv[s].op = ((inv[s].op == :+) ? :- : :+)
nxt = {}
fdi.each { |k, v|
if k == s
nxt[k] = v
else
k = k.bind(fwd).reduce_rec if k.kind_of? Indirection
nxt[k] = Expression[Expression[v].bind(inv).reduce_rec]
end
}
fdi = nxt
}
fdi
end
end
class Decompiler
# TODO add methods to C::CExpr
AssignOp = [:'=', :'+=', :'-=', :'*=', :'/=', :'%=', :'^=', :'&=', :'|=', :'>>=', :'<<=', :'++', :'--']
attr_accessor :dasm, :c_parser
attr_accessor :forbid_optimize_dataflow, :forbid_optimize_code, :forbid_decompile_ifwhile, :forbid_decompile_types, :forbid_optimize_labels
# recursive flag: for each subfunction, recurse is decremented, when 0 only the prototype is decompiled, when <0 nothing is done
attr_accessor :recurse
def initialize(dasm, cp = dasm.c_parser)
@dasm = dasm
@recurse = 1/0.0 # Infinity
@c_parser = cp || @dasm.cpu.new_cparser
end
# decompile recursively function from an entrypoint, then perform global optimisation (static vars, ...)
# should be called once after everything is decompiled (global optimizations may bring bad results otherwise)
# use decompile_func for incremental decompilation
# returns the c_parser
def decompile(*entry)
entry.each { |f| decompile_func(f) }
finalize
@c_parser
end
# decompile a function, decompiling subfunctions as needed
# may return :restart, which means that the decompilation should restart from the entrypoint (and bubble up) (eg a new codepath is found which may changes dependency in blocks etc)
def decompile_func(entry)
return if @recurse < 0
entry = @dasm.normalize entry
return if not @dasm.decoded[entry]
# create a new toplevel function to hold our code
func = C::Variable.new
func.name = @dasm.auto_label_at(entry, 'func')
if f = @dasm.function[entry] and f.decompdata and f.decompdata[:return_type]
rettype = f.decompdata[:return_type]
else
rettype = C::BaseType.new(:int)
end
func.type = C::Function.new rettype, []
if @c_parser.toplevel.symbol[func.name]
return if @recurse == 0
if not @c_parser.toplevel.statements.grep(C::Declaration).find { |decl| decl.var.name == func.name }
# recursive dependency: declare prototype
puts "function #{func.name} is recursive: predecompiling for prototype" if $VERBOSE
pre_recurse = @recurse
@recurse = 0
@c_parser.toplevel.symbol.delete func.name
decompile_func(entry)
@recurse = pre_recurse
if not dcl = @c_parser.toplevel.statements.grep(C::Declaration).find { |decl| decl.var.name == func.name }
@c_parser.toplevel.statements << C::Declaration.new(func)
end
end
return
end
@c_parser.toplevel.symbol[func.name] = func
puts "decompiling #{func.name}" if $VERBOSE
while catch(:restart) { do_decompile_func(entry, func) } == :restart
retval = :restart
end
@c_parser.toplevel.symbol[func.name] = func # recursive func prototype could have overwritten us
@c_parser.toplevel.statements << C::Declaration.new(func)
puts " decompiled #{func.name}" if $VERBOSE
retval
end
# calls decompile_func with recurse -= 1 (internal use)
def decompile_func_rec(entry)
@recurse -= 1
decompile_func(entry)
ensure
@recurse += 1
end
def do_decompile_func(entry, func)
# find decodedinstruction graph of the function, decompile subfuncs
myblocks = listblocks_func(entry)
# [esp+8] => [:frameptr-12]
makestackvars entry, myblocks.map { |b, to| @dasm.decoded[b].block }
# find registry dependencies between blocks
deps = @dasm.cpu.decompile_func_finddeps(self, myblocks, func)
scope = func.initializer = C::Block.new(@c_parser.toplevel)
if df = @dasm.function[entry]
scope.decompdata = df.decompdata ||= {:stackoff_type => {}, :stackoff_name => {}}
else
scope.decompdata ||= {:stackoff_type => {}, :stackoff_name => {}}
end
# di blocks => raw c statements, declare variables
@dasm.cpu.decompile_blocks(self, myblocks, deps, func)
simplify_goto(scope)
namestackvars(scope)
unalias_vars(scope, func)
decompile_c_types(scope)
optimize(scope)
remove_unreferenced_vars(scope)
cleanup_var_decl(scope, func)
if @recurse > 0
decompile_controlseq(scope)
optimize_vars(scope)
optimize_ctrl(scope)
optimize_vars(scope)
remove_unreferenced_vars(scope)
simplify_varname_noalias(scope)
rename_variables(scope)
end
@dasm.cpu.decompile_check_abi(self, entry, func)
case ret = scope.statements.last
when C::CExpression; puts "no return at end of func" if $VERBOSE
when C::Return
if not ret.value
scope.statements.pop
else
v = ret.value
v = v.rexpr if v.kind_of? C::CExpression and not v.op and (v.rexpr.kind_of? C::CExpression or v.rexpr.kind_of? C::Variable)
func.type.type = v.type
end
end
if @recurse == 0
# we need only the prototype
func.initializer = nil
end
end
def new_global_var(addr, type)
addr = @dasm.normalize(addr)
# TODO check overlap with alreadydefined globals
ptype = type.untypedef.type.untypedef if type.pointer?
if ptype.kind_of? C::Function
name = @dasm.auto_label_at(addr, 'sub', 'xref', 'byte', 'word', 'dword', 'unk')
if @dasm.get_section_at(addr) and @recurse > 0
@dasm.disassemble(addr) if not @dasm.decoded[addr] # TODO disassemble_fast ?
f = @dasm.function[addr] ||= DecodedFunction.new
# TODO detect thunks (__noreturn)
f.decompdata ||= { :stackoff_type => {}, :stackoff_name => {} }
if not s = @c_parser.toplevel.symbol[name] or not s.initializer or not s.type.untypedef.kind_of? C::Function
@c_parser.toplevel.symbol.delete name
@c_parser.toplevel.statements.delete_if { |ts| ts.kind_of? C::Declaration and ts.var.name == name }
aoff = 1
ptype.args.to_a.each { |a|
aoff = (aoff + @c_parser.typesize[:ptr] - 1) / @c_parser.typesize[:ptr] * @c_parser.typesize[:ptr]
f.decompdata[:stackoff_type][aoff] ||= a.type
f.decompdata[:stackoff_name][aoff] ||= a.name if a.name
aoff += sizeof(a) # ary ?
}
decompile_func_rec(addr)
# else redecompile with new prototye ?
end
end
end
name = case (type.pointer? && tsz = sizeof(nil, ptype))
when 1; 'byte'
when 2; 'word'
when 4; 'dword'
else 'unk'
end
name = 'stru' if ptype.kind_of? C::Union
name = @dasm.auto_label_at(addr, name, 'xref', 'byte', 'word', 'dword', 'unk', 'stru')
if not var = @c_parser.toplevel.symbol[name]
var = C::Variable.new
var.name = name
var.type = type.pointer? ? C::Array.new(ptype) : type
@c_parser.toplevel.symbol[var.name] = var
@c_parser.toplevel.statements << C::Declaration.new(var)
end
if ptype.kind_of? C::Union and type.pointer? and s = @dasm.get_section_at(name) and s[0].ptr < s[0].length
# TODO struct init, array, fptrs..
elsif type.pointer? and s = @dasm.get_section_at(name) and s[0].ptr < s[0].length and [1, 2, 4].include? tsz and (not var.type.pointer? or sizeof(var.type.untypedef.type) != sizeof(type.untypedef.type) or not var.initializer)
# TODO do not overlap other statics (but labels may refer to elements of the array...)
data = (0..256).map {
v = s[0].decode_imm("u#{tsz*8}".to_sym, @dasm.cpu.endianness)
v = decompile_cexpr(v, @c_parser.toplevel) if v.kind_of? Expression # relocation
v
}
var.initializer = data.map { |v| C::CExpression[v, C::BaseType.new(:int)] } unless (data - [0]).empty?
if (tsz == 1 or tsz == 2) and eos = data.index(0) and (0..3).all? { |i| data[i] >= 0x20 and data[i] < 0x7f } # printable str
# XXX 0x80 with ruby1.9...
var.initializer = C::CExpression[data[0, eos].pack('C*'), C::Pointer.new(ptype)] rescue nil
end
if var.initializer.kind_of? ::Array and i = var.initializer.first and i.kind_of? C::CExpression and not i.op and i.rexpr.kind_of? C::Variable and
i.rexpr.type.kind_of? C::Function and not @dasm.get_section_at(@dasm.normalize(i.rexpr.name)) # iat_ExternalFunc
i.type = i.rexpr.type
type = var.type = C::Array.new(C::Pointer.new(i.type))
var.initializer = [i]
end
var.initializer = nil if var.initializer.kind_of? ::Array and not type.untypedef.kind_of? C::Array
end
# TODO patch existing references to addr ? (or would they have already triggered new_global_var?)
# return the object to use to replace the raw addr
var
end
# return an array of [address of block start, list of block to]]
# decompile subfunctions
def listblocks_func(entry)
@autofuncs ||= []
blocks = []
entry = dasm.normalize entry
todo = [entry]
while a = todo.pop
next if blocks.find { |aa, at| aa == a }
next if not di = @dasm.decoded[a]
next if not di.kind_of? DecodedInstruction
blocks << [a, []]
di.block.each_to { |ta, type|
next if type == :indirect
ta = dasm.normalize ta
if type != :subfuncret and not @dasm.function[ta] and
(not @dasm.function[entry] or @autofuncs.include? entry) and
di.block.list.last.opcode.props[:saveip]
# possible noreturn function
# XXX call $+5; pop eax
@autofuncs << ta
@dasm.function[ta] = DecodedFunction.new
puts "autofunc #{Expression[ta]}" if $VERBOSE
end
if @dasm.function[ta] and type != :subfuncret
f = dasm.auto_label_at(ta, 'func')
ta = dasm.normalize($1) if f =~ /^thunk_(.*)/
ret = decompile_func_rec(ta) if (ta != entry or di.block.to_subfuncret)
throw :restart, :restart if ret == :restart
else
@dasm.auto_label_at(ta, 'label') if blocks.find { |aa, at| aa == ta }
blocks.last[1] |= [ta]
todo << ta
end
}
end
blocks
end
# backtraces an expression from addr
# returns an integer, a label name, or an Expression
# XXX '(GetProcAddr("foo"))()' should not decompile to 'foo()'
def backtrace_target(expr, addr)
if n = @dasm.backtrace(expr, addr).first
return expr if n == Expression::Unknown
n = Expression[n].reduce_rec
n = @dasm.get_label_at(n) || n
n = $1 if n.kind_of? ::String and n =~ /^thunk_(.*)/
n
else
expr
end
end
# patches instruction's backtrace_binding to replace things referring to a static stack offset from func start by :frameptr+off
def makestackvars(funcstart, blocks)
blockstart = nil
cache_di = nil
cache = {} # [i_s, e, type] => backtrace
tovar = lambda { |di, e, i_s|
case e
when Expression; Expression[tovar[di, e.lexpr, i_s], e.op, tovar[di, e.rexpr, i_s]].reduce
when Indirection; Indirection[tovar[di, e.target, i_s], e.len]
when :frameptr; e
when ::Symbol
cache.clear if cache_di != di ; cache_di = di
vals = cache[[e, i_s, 0]] ||= @dasm.backtrace(e, di.address, :snapshot_addr => blockstart,
:include_start => i_s, :no_check => true, :terminals => [:frameptr])
# backtrace only to blockstart first
if vals.length == 1 and ee = vals.first and ee.kind_of? Expression and (ee == Expression[:frameptr] or
(ee.lexpr == :frameptr and ee.op == :+ and ee.rexpr.kind_of? ::Integer) or
(not ee.lexpr and ee.op == :+ and ee.rexpr.kind_of? Indirection and eep = ee.rexpr.pointer and
(eep == Expression[:frameptr] or (eep.lexpr == :frameptr and eep.op == :+ and eep.rexpr.kind_of? ::Integer))))
ee
else
# fallback on full run (could restart from blockstart with ee, but may reevaluate addr_binding..
vals = cache[[e, i_s, 1]] ||= @dasm.backtrace(e, di.address, :snapshot_addr => funcstart,
:include_start => i_s, :no_check => true, :terminals => [:frameptr])
if vals.length == 1 and ee = vals.first and (ee.kind_of? Expression and (ee == Expression[:frameptr] or
(ee.lexpr == :frameptr and ee.op == :+ and ee.rexpr.kind_of? ::Integer)))
ee
else e
end
end
else e
end
}
# must not change bt_bindings until everything is backtracked
repl_bind = {} # di => bt_bd
@dasm.cpu.decompile_makestackvars(@dasm, funcstart, blocks) { |block|
block.list.each { |di|
bd = di.backtrace_binding ||= @dasm.cpu.get_backtrace_binding(di)
newbd = repl_bind[di] = {}
bd.each { |k, v|
k = tovar[di, k, true] if k.kind_of? Indirection
next if k == Expression[:frameptr] or (k.kind_of? Expression and k.lexpr == :frameptr and k.op == :+ and k.rexpr.kind_of? ::Integer)
newbd[k] = tovar[di, v, false]
}
}
}
repl_bind.each { |di, bd| di.backtrace_binding = bd }
end
# give a name to a stackoffset (relative to start of func)
# 4 => :arg_0, -8 => :var_4 etc
def stackoff_to_varname(off)
if off >= @c_parser.typesize[:ptr]; 'arg_%X' % ( off-@c_parser.typesize[:ptr]) # 4 => arg_0, 8 => arg_4..
elsif off > 0; 'arg_0%X' % off
elsif off == 0; 'retaddr'
elsif off <= -@dasm.cpu.size/8; 'var_%X' % (-off-@dasm.cpu.size/8) # -4 => var_0, -8 => var_4..
else 'var_0%X' % -off
end
end
# turns an Expression to a CExpression, create+declares needed variables in scope
def decompile_cexpr(e, scope, itype=nil)
case e
when Expression
if e.op == :'=' and e.lexpr.kind_of? ::String and e.lexpr =~ /^dummy_metasm_/
decompile_cexpr(e.rexpr, scope, itype)
elsif e.op == :+ and e.rexpr.kind_of? ::Integer and e.rexpr < 0
decompile_cexpr(Expression[e.lexpr, :-, -e.rexpr], scope, itype)
elsif e.lexpr
a = decompile_cexpr(e.lexpr, scope, itype)
C::CExpression[a, e.op, decompile_cexpr(e.rexpr, scope, itype)]
elsif e.op == :+
decompile_cexpr(e.rexpr, scope, itype)
else
a = decompile_cexpr(e.rexpr, scope, itype)
C::CExpression[e.op, a]
end
when Indirection
itype = C::Pointer.new(C::BaseType.new("__int#{e.len*8}".to_sym))
p = decompile_cexpr(e.target, scope, itype)
p = C::CExpression[[p], itype] if not p.type.kind_of? C::Pointer
C::CExpression[:*, p]
when ::Integer
C::CExpression[e]
when C::CExpression
e
else
name = e.to_s
if not s = scope.symbol_ancestors[name]
s = C::Variable.new
s.type = C::BaseType.new(:__int32)
case e
when ::String # edata relocation (rel.length = size of pointer)
return @c_parser.toplevel.symbol[e] || new_global_var(e, itype || C::BaseType.new(:int))
when ::Symbol; s.storage = :register
else s.type.qualifier = [:volatile]
puts "decompile_cexpr unhandled #{e.inspect}, using #{e.to_s.inspect}" if $VERBOSE
end
s.name = name
scope.symbol[s.name] = s
scope.statements << C::Declaration.new(s)
end
s
end
end
# simplify goto -> goto / goto -> return
def simplify_goto(scope, keepret = false)
if not keepret and scope.statements[-1].kind_of? C::Return and not scope.statements[-2].kind_of? C::Label
scope.statements.insert(-2, C::Label.new("ret_label"))
end
jumpto = {}
walk(scope) { |s|
next if not s.kind_of? C::Block
s.statements.each_with_index { |ss, i|
case ss
when C::Goto, C::Return
while l = s.statements[i -= 1] and l.kind_of? C::Label
jumpto[l.name] = ss
end
end
}
}
simpler = lambda { |s|
case s
when C::Goto
if jumpto[s.target]
r = jumpto[s.target].dup
r.value = C::CExpression[r.value.reduce(@c_parser)] if r.kind_of? C::Return and r.value # deep_dup
r
end
when C::Return
if not keepret and scope.statements[-1].kind_of? C::Return and s.value == scope.statements[-1].value and s != scope.statements[-1]
C::Goto.new(scope.statements[-2].name)
end
end
}
walk(scope) { |s|
case s
when C::Block
s.statements.each_with_index { |ss, i|
if sp = simpler[ss]
ss = s.statements[i] = sp
end
}
when C::If
if sp = simpler[s.bthen]
s.bthen = sp
end
end
}
# remove unreferenced labels
remove_labels(scope)
walk(scope) { |s|
next if not s.kind_of? C::Block
del = false
# remove dead code goto a; goto b; if (0) { z: bla; } => rm goto b
s.statements.delete_if { |st|
case st
when C::Goto, C::Return
olddel = del
del = true
olddel
else
del = false
end
}
# if () { goto x; } x:
s.statements.each_with_index { |ss, i|
if ss.kind_of? C::If
t = ss.bthen
t = t.statements.first if t.kind_of? C::Block
if t.kind_of? C::Goto and s.statements[i+1].kind_of? C::Label and s.statements[i+1].name == t.target
ss.bthen = C::Block.new(scope)
end
end
}
}
remove_labels(scope)
end
# changes ifgoto, goto to while/ifelse..
def decompile_controlseq(scope)
# TODO replace all this crap by a method using the graph representation
scope.statements = decompile_cseq_if(scope.statements, scope)
remove_labels(scope)
scope.statements = decompile_cseq_if(scope.statements, scope)
remove_labels(scope)
# TODO harmonize _if/_while api (if returns a replacement, while patches)
decompile_cseq_while(scope.statements, scope)
decompile_cseq_switch(scope)
end
# optimize if() { a; } to if() a;
def optimize_ctrl(scope)
simplify_goto(scope, true)
# break/continue
# XXX if (foo) while (bar) goto bla; bla: should => break
walk = lambda { |e, brk, cnt|
case e
when C::Block
walk[e.statements, brk, cnt]
e
when ::Array
e.each_with_index { |st, i|
case st
when C::While, C::DoWhile
l1 = e[i+1].name if e[i+1].kind_of? C::Label
l2 = e[i-1].name if e[i-1].kind_of? C::Label
e[i] = walk[st, l1, l2]
else
e[i] = walk[st, brk, cnt]
end
}
e
when C::If
e.bthen = walk[e.bthen, brk, cnt] if e.bthen
e.belse = walk[e.belse, brk, cnt] if e.bthen
e
when C::While, C::DoWhile
e.body = walk[e.body, nil, nil]
e
when C::Goto
if e.target == brk
C::Break.new
elsif e.target == cnt
C::Continue.new
else e
end
else e
end
}
walk[scope, nil, nil]
remove_labels(scope)
# while (1) { a; if(b) { c; return; }; d; } => while (1) { a; if (b) break; d; } c;
while st = scope.statements.last and st.kind_of? C::While and st.test.kind_of? C::CExpression and
not st.test.op and st.test.rexpr == 1 and st.body.kind_of? C::Block
break if not i = st.body.statements.find { |ist|
ist.kind_of? C::If and not ist.belse and ist.bthen.kind_of? C::Block and ist.bthen.statements.last.kind_of? C::Return
}
walk(i.bthen.statements) { |sst| sst.outer = i.bthen.outer if sst.kind_of? C::Block and sst.outer == i.bthen }
scope.statements.concat i.bthen.statements
i.bthen = C::Break.new
end
patch_test = lambda { |ce|
ce = ce.rexpr if ce.kind_of? C::CExpression and ce.op == :'!'
# if (a+1) => if (a != -1)
if ce.kind_of? C::CExpression and (ce.op == :+ or ce.op == :-) and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr.kind_of? ::Integer
ce.rexpr.rexpr = -ce.rexpr.rexpr if ce.op == :+
ce.op = :'!='
end
}
walk(scope) { |ce|
case ce
when C::If
patch_test[ce.test]
if ce.bthen.kind_of? C::Block
case ce.bthen.statements.length
when 1
walk(ce.bthen.statements) { |sst| sst.outer = ce.bthen.outer if sst.kind_of? C::Block and sst.outer == ce.bthen }
ce.bthen = ce.bthen.statements.first
when 0
if not ce.belse and i = ce.bthen.outer.statements.index(ce)
ce.bthen.outer.statements[i] = ce.test # TODO remove sideeffectless parts
end
end
end
if ce.belse.kind_of? C::Block and ce.belse.statements.length == 1
walk(ce.belse.statements) { |sst| sst.outer = ce.belse.outer if sst.kind_of? C::Block and sst.outer == ce.belse }
ce.belse = ce.belse.statements.first
end
when C::While, C::DoWhile
patch_test[ce.test]
if ce.body.kind_of? C::Block
case ce.body.statements.length
when 1
walk(ce.body.statements) { |sst| sst.outer = ce.body.outer if sst.kind_of? C::Block and sst.outer == ce.body }
ce.body = ce.body.statements.first
when 0
if ce.kind_of? C::DoWhile and i = ce.body.outer.statements.index(ce)
ce = ce.body.outer.statements[i] = C::While.new(ce.test, ce.body)
end
ce.body = nil
end
end
end
}
walk(scope) { |ce|
next if not ce.kind_of? C::Block
st = ce.statements
st.length.times { |n|
while st[n].kind_of? C::If and st[n+1].kind_of? C::If and not st[n].belse and not st[n+1].belse and (
(st[n].bthen.kind_of? C::Return and st[n+1].bthen.kind_of? C::Return and st[n].bthen.value == st[n+1].bthen.value) or
(st[n].bthen.kind_of? C::Break and st[n+1].bthen.kind_of? C::Break) or
(st[n].bthen.kind_of? C::Continue and st[n+1].bthen.kind_of? C::Continue))
# if (a) return x; if (b) return x; => if (a || b) return x;
st[n].test = C::CExpression[st[n].test, :'||', st[n+1].test]
st.delete_at(n+1)
end
}
}
end
# ifgoto => ifthen
# ary is an array of statements where we try to find if () {} [else {}]
# recurses to then/else content
def decompile_cseq_if(ary, scope)
return ary if forbid_decompile_ifwhile
# the array of decompiled statements to use as replacement
ret = []
# list of labels appearing in ary
inner_labels = ary.grep(C::Label).map { |l| l.name }
while s = ary.shift
# recurse if it's not the first run
if s.kind_of? C::If
s.bthen.statements = decompile_cseq_if(s.bthen.statements, s.bthen) if s.bthen.kind_of? C::Block
s.belse.statements = decompile_cseq_if(s.belse.statements, s.belse) if s.belse.kind_of? C::Block
end
# if (a) goto x; if (b) goto x; => if (a || b) goto x;
while s.kind_of? C::If and s.bthen.kind_of? C::Goto and not s.belse and ary.first.kind_of? C::If and ary.first.bthen.kind_of? C::Goto and
not ary.first.belse and s.bthen.target == ary.first.bthen.target
s.test = C::CExpression[s.test, :'||', ary.shift.test]
end
# if (a) goto x; b; x: => if (!a) { b; }
if s.kind_of? C::If and s.bthen.kind_of? C::Goto and l = ary.grep(C::Label).find { |l_| l_.name == s.bthen.target }
# if {goto l;} a; l: => if (!) {a;}
s.test = C::CExpression.negate s.test
s.bthen = C::Block.new(scope)
s.bthen.statements = decompile_cseq_if(ary[0..ary.index(l)], s.bthen)
s.bthen.statements.pop # remove l: from bthen, it is in ary (was needed in bthen for inner ifs)
ary[0...ary.index(l)] = []
end
if s.kind_of? C::If and (s.bthen.kind_of? C::Block or s.bthen.kind_of? C::Goto)
s.bthen = C::Block.new(scope, [s.bthen]) if s.bthen.kind_of? C::Goto
bts = s.bthen.statements
# if (a) if (b) { c; } => if (a && b) { c; }
if bts.length == 1 and bts.first.kind_of? C::If and not bts.first.belse
s.test = C::CExpression[s.test, :'&&', bts.first.test]
bts = bts.first.bthen
bts = s.bthen.statements = bts.kind_of?(C::Block) ? bts.statements : [bts]
end
# if (a) { if (b) goto c; d; } c: => if (a && !b) { d; }
if bts.first.kind_of? C::If and l = bts.first.bthen and (l = l.kind_of?(C::Block) ? l.statements.first : l) and l.kind_of? C::Goto and ary[0].kind_of? C::Label and l.target == ary[0].name
s.test = C::CExpression[s.test, :'&&', C::CExpression.negate(bts.first.test)]
if e = bts.shift.belse
bts.unshift e
end
end
# if () { goto a; } a:
if bts.last.kind_of? C::Goto and ary[0].kind_of? C::Label and bts.last.target == ary[0].name
bts.pop
end
# if { a; goto outer; } b; return; => if (!) { b; return; } a; goto outer;
if bts.last.kind_of? C::Goto and not inner_labels.include? bts.last.target and g = ary.find { |ss| ss.kind_of? C::Goto or ss.kind_of? C::Return } and g.kind_of? C::Return
s.test = C::CExpression.negate s.test
ary[0..ary.index(g)], bts[0..-1] = bts, ary[0..ary.index(g)]
end
# if { a; goto l; } b; l: => if {a;} else {b;}
if bts.last.kind_of? C::Goto and l = ary.grep(C::Label).find { |l_| l_.name == bts.last.target }
s.belse = C::Block.new(scope)
s.belse.statements = decompile_cseq_if(ary[0...ary.index(l)], s.belse)
ary[0...ary.index(l)] = []
bts.pop
end
# if { a; l: b; goto any;} c; goto l; => if { a; } else { c; } b; goto any;
if not s.belse and (bts.last.kind_of? C::Goto or bts.last.kind_of? C::Return) and g = ary.grep(C::Goto).first and l = bts.grep(C::Label).find { |l_| l_.name == g.target }
s.belse = C::Block.new(scope)
s.belse.statements = decompile_cseq_if(ary[0...ary.index(g)], s.belse)
ary[0..ary.index(g)], bts[bts.index(l)..-1] = bts[bts.index(l)..-1], []
end
# if { a; b; c; } else { d; b; c; } => if {a;} else {d;} b; c;
if s.belse
bes = s.belse.statements
while not bts.empty?
if bts.last.kind_of? C::Label; ary.unshift bts.pop
elsif bes.last.kind_of? C::Label; ary.unshift bes.pop
elsif bts.last.to_s == bes.last.to_s; ary.unshift bes.pop ; bts.pop
else break
end
end
# if () { a; } else { b; } => if () { a; } else b;
# if () { a; } else {} => if () { a; }
case bes.length
when 0; s.belse = nil
#when 1; s.belse = bes.first
end
end
# if () {} else { a; } => if (!) { a; }
# if () { a; } => if () a;
case bts.length
when 0; s.test, s.bthen, s.belse = C::CExpression.negate(s.test), s.belse, nil if s.belse
#when 1; s.bthen = bts.first # later (allows simpler handling in _while)
end
end
# l1: l2: if () goto l1; goto l2; => if(!) goto l2; goto l1;
if s.kind_of? C::If
ls = s.bthen
ls = ls.statements.last if ls.kind_of? C::Block
if ls.kind_of? C::Goto
if li = inner_labels.index(ls.target)
table = inner_labels
else
table = ary.map { |st| st.name if st.kind_of? C::Label }.compact.reverse
li = table.index(ls.target) || table.length
end
g = ary.find { |ss|
break if ss.kind_of? C::Return
next if not ss.kind_of? C::Goto
table.index(ss.target).to_i > li
}
if g
s.test = C::CExpression.negate s.test
if not s.bthen.kind_of? C::Block
ls = C::Block.new(scope)
ls.statements << s.bthen
s.bthen = ls
end
ary[0..ary.index(g)], s.bthen.statements = s.bthen.statements, decompile_cseq_if(ary[0..ary.index(g)], scope)
end
end
end
ret << s
end
ret
end
def decompile_cseq_while(ary, scope)
return if forbid_decompile_ifwhile
# find the next instruction that is not a label
ni = lambda { |l| ary[ary.index(l)..-1].find { |s| not s.kind_of? C::Label } }
# TODO XXX get rid of #index
finished = false ; while not finished ; finished = true # 1.9 does not support 'retry'
ary.each { |s|
case s
when C::Label
if ss = ni[s] and ss.kind_of? C::If and not ss.belse and ss.bthen.kind_of? C::Block
if ss.bthen.statements.last.kind_of? C::Goto and ss.bthen.statements.last.target == s.name
ss.bthen.statements.pop
if l = ary[ary.index(ss)+1] and l.kind_of? C::Label
ss.bthen.statements.grep(C::If).each { |i|
i.bthen = C::Break.new if i.bthen.kind_of? C::Goto and i.bthen.target == l.name
}
end
ary[ary.index(ss)] = C::While.new(ss.test, ss.bthen)
elsif ss.bthen.statements.last.kind_of? C::Return and g = ary[ary.index(s)+1..-1].reverse.find { |_s| _s.kind_of? C::Goto and _s.target == s.name }
wb = C::Block.new(scope)
wb.statements = decompile_cseq_while(ary[ary.index(ss)+1...ary.index(g)], wb)
w = C::While.new(C::CExpression.negate(ss.test), wb)
ary[ary.index(ss)..ary.index(g)] = [w, *ss.bthen.statements]
finished = false ; break #retry
end
end
if g = ary[ary.index(s)..-1].reverse.find { |_s| _s.kind_of? C::Goto and _s.target == s.name }
wb = C::Block.new(scope)
wb.statements = decompile_cseq_while(ary[ary.index(s)...ary.index(g)], wb)
w = C::While.new(C::CExpression[1], wb)
ary[ary.index(s)..ary.index(g)] = [w]
finished = false ; break #retry
end
if g = ary[ary.index(s)..-1].reverse.find { |_s| _s.kind_of? C::If and not _s.belse and gt = _s.bthen and (gt = gt.kind_of?(C::Block) && gt.statements.length == 1 ? gt.statements.first : gt) and gt.kind_of? C::Goto and gt.target == s.name }
wb = C::Block.new(scope)
wb.statements = decompile_cseq_while(ary[ary.index(s)...ary.index(g)], wb)
w = C::DoWhile.new(g.test, wb)
ary[ary.index(s)..ary.index(g)] = [w]
finished = false ; break #retry
end
when C::If
decompile_cseq_while(s.bthen.statements, s.bthen) if s.bthen.kind_of? C::Block
decompile_cseq_while(s.belse.statements, s.belse) if s.belse.kind_of? C::Block
when C::While, C::DoWhile
decompile_cseq_while(s.body.statements, s.body) if s.body.kind_of? C::Block
end
}
end
ary
end
# TODO
def decompile_cseq_switch(scope)
uncast = lambda { |e| e = e.rexpr while e.kind_of? C::CExpression and not e.op ; e }
walk(scope) { |s|
# XXX pfff...
next if not s.kind_of? C::If
# if (v < 12) return ((void(*)())(tableaddr+4*v))();
t = s.bthen
t = t.statements.first if t.kind_of? C::Block and t.statements.length == 1
next if not t.kind_of? C::Return or not t.respond_to? :from_instr
next if t.from_instr.comment.to_a.include? 'switch'
next if not t.value.kind_of? C::CExpression or t.value.op != :funcall or t.value.rexpr != [] or not t.value.lexpr.kind_of? C::CExpression or t.value.lexpr.op
p = uncast[t.value.lexpr.rexpr]
next if not p.kind_of? C::CExpression or p.op != :* or p.lexpr
p = uncast[p.rexpr]
next if not p.kind_of? C::CExpression or p.op != :+
r, l = uncast[p.rexpr], uncast[p.lexpr]
r, l = l, r if r.kind_of? C::CExpression
next if not r.kind_of? ::Integer or not l.kind_of? C::CExpression or l.op != :* or not l.lexpr
lr, ll = uncast[l.rexpr], uncast[l.lexpr]
lr, ll = ll, lr if not ll.kind_of? ::Integer
next if ll != sizeof(nil, C::Pointer.new(C::BaseType.new(:void)))
base, index = r, lr
if s.test.kind_of? C::CExpression and (s.test.op == :<= or s.test.op == :<) and s.test.lexpr == index and
s.test.rexpr.kind_of? C::CExpression and not s.test.rexpr.op and s.test.rexpr.rexpr.kind_of? ::Integer
t.from_instr.add_comment 'switch'
sup = s.test.rexpr.rexpr
rng = ((s.test.op == :<) ? (0...sup) : (0..sup))
from = t.from_instr.address
rng.map { |i| @dasm.backtrace(Indirection[base+ll*i, ll, from], from, :type => :x, :origin => from, :maxdepth => 0) }
@dasm.disassemble
throw :restart, :restart
end
puts "unhandled switch() at #{t.from_instr}" if $VERBOSE
}
end
# remove unused labels
def remove_labels(scope)
return if forbid_optimize_labels
used = []
walk(scope) { |ss|
used |= [ss.target] if ss.kind_of? C::Goto
}
walk(scope) { |s|
next if not s.kind_of? C::Block
s.statements.delete_if { |l|
l.kind_of? C::Label and not used.include? l.name
}
}
# remove implicit continue; at end of loop
walk(scope) { |s|
next if not s.kind_of? C::While
if s.body.kind_of? C::Block and s.body.statements.last.kind_of? C::Continue
s.body.statements.pop
end
}
end
# checks if expr is a var (var or *&var)
def isvar(ce, var)
if var.stackoff and ce.kind_of? C::CExpression
return unless ce.op == :* and not ce.lexpr
ce = ce.rexpr
ce = ce.rexpr while ce.kind_of? C::CExpression and not ce.op
return unless ce.kind_of? C::CExpression and ce.op == :& and not ce.lexpr
ce = ce.rexpr
end
ce == var
end
# checks if expr reads var
def ce_read(ce_, var)
isvar(ce_, var) or
walk_ce(ce_) { |ce|
case ce.op
when :funcall; break true if isvar(ce.lexpr, var) or ce.rexpr.find { |a| isvar(a, var) }
when :'='; break true if isvar(ce.rexpr, var)
break ce_read(ce.rexpr, var) if isvar(ce.lexpr, var) # *&var = 2
else break true if isvar(ce.lexpr, var) or isvar(ce.rexpr, var)
end
}
end
# checks if expr writes var
def ce_write(ce_, var)
walk_ce(ce_) { |ce|
break true if AssignOp.include?(ce.op) and (isvar(ce.lexpr, var) or
(((ce.op == :'++' or ce.op == :'--') and isvar(ce.rexpr, var))))
}
end
# patches a set of exprs, replacing oldce by newce
def ce_patch(exprs, oldce, newce)
walk_ce(exprs) { |ce|
case ce.op
when :funcall
ce.lexpr = newce if ce.lexpr == oldce
ce.rexpr.each_with_index { |a, i| ce.rexpr[i] = newce if a == oldce }
else
ce.lexpr = newce if ce.lexpr == oldce
ce.rexpr = newce if ce.rexpr == oldce
end
}
end
# duplicate vars per domain value
# eg eax = 1; foo(eax); eax = 2; bar(eax); => eax = 1; foo(eax) eax_1 = 2; bar(eax_1);
# eax = 1; if (bla) eax = 2; foo(eax); => no change
def unalias_vars(scope, func)
g = c_to_graph(scope)
# find the domains of var aliases
scope.symbol.dup.each_value { |var|
next if var.stackoff.to_i > 0 or func.type.args.include? var # __fastcall reg
unalias_var(var, scope, g)
}
end
# duplicates a var per domain value
def unalias_var(var, scope, g = c_to_graph(scope))
# [label, index] of references to var (reading it, writing it, ro/wo it (eg eax = *eax => eax_0 = *eax_1))
read = {}
write = {}
ro = {}
wo = {}
# list of [l, i] for which domain is not known
unchecked = []
# mark all exprs of the graph
g.exprs.each { |label, exprs|
exprs.each_with_index { |ce, i|
if ce_read(ce, var)
if ce.op == :'=' and isvar(ce.lexpr, var) and not ce_write(ce.rexpr, var)
(ro[label] ||= []) << i
(wo[label] ||= []) << i
unchecked << [label, i, :up] << [label, i, :down]
else
(read[label] ||= []) << i
unchecked << [label, i]
end
elsif ce_write(ce, var)
(write[label] ||= []) << i
unchecked << [label, i]
end
}
}
# stuff when filling the domain (flood algorithm)
dom = dom_ro = dom_wo = todo_up = todo_down = nil
# flood by walking the graph up from [l, i] (excluded)
# marks stuff do walk down
walk_up = lambda { |l, i|
todo_w = [[l, i-1]]
done_w = []
while o = todo_w.pop
next if done_w.include? o
done_w << o
l, i = o
loop do
if read[l].to_a.include? i
# XXX not optimal (should mark only the uppest read)
todo_down |= [[l, i]] if not dom.include? [l, i]
dom |= [[l, i]]
elsif write[l].to_a.include? i
todo_down |= [[l, i]] if not dom.include? [l, i]
dom |= [[l, i]]
break
elsif wo[l].to_a.include? i
todo_down |= [[l, i]] if not dom_wo.include? [l, i, :down]
dom_wo |= [[l, i, :down]]
break
end
i -= 1
if i < 0
g.from_optim[l].to_a.each { |ll|
todo_w << [ll, g.exprs[ll].to_a.length-1]
}
break
end
end
end
}
# flood by walking the graph down from [l, i] (excluded)
# malks stuff to walk up
walk_down = lambda { |l, i|
todo_w = [[l, i+1]]
done_w = []
while o = todo_w.pop
next if done_w.include? o
done_w << o
l, i = o
loop do
if read[l].to_a.include? i
todo_up |= [[l, i]] if not dom.include? [l, i]
dom |= [[l, i]]
elsif write[l].to_a.include? i
break
elsif ro[l].to_a.include? i
todo_up |= [[l, i]] if not dom_ro.include? [l, i, :up]
dom_ro |= [[l, i, :up]]
break
end
i += 1
if i >= g.exprs[l].to_a.length
g.to_optim[l].to_a.each { |ll|
todo_w << [ll, 0]
}
break
end
end
end
}
# check it out
while o = unchecked.shift
dom = []
dom_ro = []
dom_wo = []
todo_up = []
todo_down = []
# init
if read[o[0]].to_a.include? o[1]
todo_up << o
todo_down << o
dom << o
elsif write[o[0]].to_a.include? o[1]
todo_down << o
dom << o
elsif o[2] == :up
todo_up << o
dom_ro << o
elsif o[2] == :down
todo_down << o
dom_wo << o
else raise
end
# loop
while todo_up.first or todo_down.first
todo_up.each { |oo| walk_up[oo[0], oo[1]] }
todo_up.clear
todo_down.each { |oo| walk_down[oo[0], oo[1]] }
todo_down.clear
end
# patch
n_i = 0
n_i += 1 while scope.symbol_ancestors[newvarname = "#{var.name}_a#{n_i}"]
nv = var.dup
nv.name = newvarname
scope.statements << C::Declaration.new(nv)
scope.symbol[nv.name] = nv
dom.each { |oo| ce_patch(g.exprs[oo[0]][oo[1]], var, nv) }
dom_ro.each { |oo|
ce = g.exprs[oo[0]][oo[1]]
if ce.rexpr.kind_of? C::CExpression
ce_patch(ce.rexpr, var, nv)
else
ce.rexpr = nv
end
}
dom_wo.each { |oo|
ce = g.exprs[oo[0]][oo[1]]
if ce.lexpr.kind_of? C::CExpression
ce_patch(ce.lexpr, var, nv)
else
ce.lexpr = nv
end
}
unchecked -= dom + dom_wo + dom_ro
end
end
# revert the unaliasing namechange of vars where no alias subsists
def simplify_varname_noalias(scope)
names = scope.symbol.keys
names.delete_if { |k|
next if not b = k[/^(.*)_a\d+$/, 1]
if not names.find { |n| n != k and (n == b or n[/^(.*)_a\d+$/, 1] == b) }
scope.symbol[b] = scope.symbol.delete(k)
scope.symbol[b].name = b
end
}
end
# patch scope to transform :frameoff-x into &var_x
def namestackvars(scope)
off2var = {}
newvar = lambda { |o, n|
if not v = off2var[o]
v = off2var[o] = C::Variable.new
v.type = C::BaseType.new(:void)
v.name = n
v.stackoff = o
scope.symbol[v.name] = v
scope.statements << C::Declaration.new(v)
end
v
}
scope.decompdata[:stackoff_name].each { |o, n| newvar[o, n] }
scope.decompdata[:stackoff_type].each { |o, t| newvar[o, stackoff_to_varname(o)] }
walk_ce(scope) { |e|
next if e.op != :+ and e.op != :-
next if not e.lexpr.kind_of? C::Variable or e.lexpr.name != 'frameptr'
next if not e.rexpr.kind_of? C::CExpression or e.rexpr.op or not e.rexpr.rexpr.kind_of? ::Integer
off = e.rexpr.rexpr
off = -off if e.op == :-
v = newvar[off, stackoff_to_varname(off)]
e.replace C::CExpression[:&, v]
}
end
# assign type to vars (regs, stack & global)
# types are found by subfunction argument types & indirections, and propagated through assignments etc
# TODO when updating the type of a var, update the type of all cexprs where it appears
def decompile_c_types(scope)
return if forbid_decompile_types
# TODO *(int8*)(ptr+8); *(int32*)(ptr+12) => automatic struct
# name => type
types = {}
pscopevar = lambda { |e|
e = e.rexpr while e.kind_of? C::CExpression and not e.op and e.rexpr.kind_of? C::CExpression
if e.kind_of? C::CExpression and e.op == :& and not e.lexpr and e.rexpr.kind_of? C::Variable
e.rexpr.name if scope.symbol[e.rexpr.name]
end
}
scopevar = lambda { |e|
e = e.rexpr if e.kind_of? C::CExpression and not e.op
if e.kind_of? C::Variable and scope.symbol[e.name]
e.name
elsif e.kind_of? C::CExpression and e.op == :* and not e.lexpr
pscopevar[e.rexpr]
end
}
globalvar = lambda { |e|
e = e.rexpr if e.kind_of? C::CExpression and not e.op
if e.kind_of? ::Integer and @dasm.get_section_at(e)
e
elsif e.kind_of? C::Variable and not scope.symbol[e.name] and @c_parser.toplevel.symbol[e.name] and @dasm.get_section_at(e.name)
e.name
end
}
# check if a newly found type for o is better than current type
# order: foo* > void* > foo
better_type = lambda { |t0, t1|
t1 == C::BaseType.new(:void) or (t0.pointer? and t1.kind_of? C::BaseType) or t0.untypedef.kind_of? C::Union or
(t0.kind_of? C::BaseType and t1.kind_of? C::BaseType and (@c_parser.typesize[t0.name] > @c_parser.typesize[t1.name] or (t0.name == t1.name and t0.qualifier))) or
(t0.pointer? and t1.pointer? and better_type[t0.untypedef.type, t1.untypedef.type])
}
update_global_type = lambda { |e, t|
if ne = new_global_var(e, t)
ne.type = t if better_type[t, ne.type] # TODO patch existing scopes using ne
# TODO rename (dword_xx -> byte_xx etc)
e = scope.symbol_ancestors[e] || e if e.kind_of? String # exe reloc
walk_ce(scope) { |ce|
ce.lexpr = ne if ce.lexpr == e
ce.rexpr = ne if ce.rexpr == e
if ce.op == :* and not ce.lexpr and ce.rexpr == ne and ne.type.pointer? and ne.type.untypedef.type.untypedef.kind_of? C::Union
# *struct -> struct->bla
ce.rexpr = structoffset(ne.type.untypedef.type.untypedef, ce.rexpr, 0, sizeof(ce.type))
elsif ce.lexpr == ne or ce.rexpr == ne
# set ce type according to l/r
# TODO set ce.parent type etc
ce.type = C::CExpression[ce.lexpr, ce.op, ce.rexpr].type
end
}
end
}
propagate_type = nil # fwd declaration
propagating = [] # recursion guard (x = &x)
# check if need to change the type of a var
# propagate_type if type is updated
update_type = lambda { |n, t|
next if propagating.include? n
o = scope.symbol[n].stackoff
next if not o and t.untypedef.kind_of? C::Union
next if o and scope.decompdata[:stackoff_type][o] and t != scope.decompdata[:stackoff_type][o]
next if t0 = types[n] and not better_type[t, t0]
next if o and (t.integral? or t.pointer?) and o % sizeof(t) != 0 # keep vars aligned
types[n] = t
next if t == t0
propagating << n
propagate_type[n, t]
propagating.delete n
next if not o
t = t.untypedef
if t.kind_of? C::Struct
t.members.to_a.each { |m|
mo = t.offsetof(@c_parser, m.name)
next if mo == 0
scope.symbol.each { |vn, vv|
update_type[vn, m.type] if vv.stackoff == o+mo
}
}
end
}
# try to update the type of a var from knowing the type of an expr (through dereferences etc)
known_type = lambda { |e, t|
loop do
e = e.rexpr while e.kind_of? C::CExpression and not e.op and e.type == t
if o = scopevar[e]
update_type[o, t]
elsif o = globalvar[e]
update_global_type[o, t]
elsif not e.kind_of? C::CExpression
elsif o = pscopevar[e] and t.pointer?
update_type[o, t.untypedef.type]
elsif e.op == :* and not e.lexpr
e = e.rexpr
t = C::Pointer.new(t)
next
elsif t.pointer? and e.op == :+ and e.lexpr.kind_of? C::CExpression and e.lexpr.type.integral? and e.rexpr.kind_of? C::Variable
e.lexpr, e.rexpr = e.rexpr, e.lexpr
next
elsif e.op == :+ and e.lexpr and e.rexpr.kind_of? C::CExpression
if not e.rexpr.op and e.rexpr.rexpr.kind_of? ::Integer
if t.pointer? and e.rexpr.rexpr < 0x1000 and (e.rexpr.rexpr % sizeof(t.untypedef.type)) == 0 # XXX relocatable + base=0..
e = e.lexpr # (int)*(x+2) === (int) *x
next
elsif globalvar[e.rexpr.rexpr]
known_type[e.lexpr, C::BaseType.new(:int)]
e = e.rexpr
next
end
elsif t.pointer? and (e.lexpr.kind_of? C::CExpression and e.lexpr.lexpr and [:<<, :>>, :*, :&].include? e.lexpr.op) or
(o = scopevar[e.lexpr] and types[o] and types[o].integral? and
!(o = scopevar[e.rexpr] and types[o] and types[o].integral?))
e.lexpr, e.rexpr = e.rexpr, e.lexpr # swap
e = e.lexpr
next
elsif t.pointer? and ((e.rexpr.kind_of? C::CExpression and e.rexpr.lexpr and [:<<, :>>, :*, :&].include? e.rexpr.op) or
(o = scopevar[e.rexpr] and types[o] and types[o].integral? and
!(o = scopevar[e.lexpr] and types[o] and types[o].integral?)))
e = e.lexpr
next
end
end
break
end
}
# we found a type for a var, propagate it through affectations
propagate_type = lambda { |var, type|
walk_ce(scope) { |ce|
next if ce.op != :'='
if ce.lexpr.kind_of? C::Variable and ce.lexpr.name == var
known_type[ce.rexpr, type]
next
end
if ce.rexpr.kind_of? C::Variable and ce.rexpr.name == var
known_type[ce.lexpr, type]
next
end
# int **x; y = **x => int y
t = type
l = ce.lexpr
while l.kind_of? C::CExpression and l.op == :* and not l.lexpr
if var == pscopevar[l.rexpr]
known_type[ce.rexpr, t]
break
elsif t.pointer?
l = l.rexpr
t = t.untypedef.type
else break
end
end
# int **x; **x = y => int y
t = type
r = ce.rexpr
while r.kind_of? C::CExpression and r.op == :* and not r.lexpr
if var == pscopevar[r.rexpr]
known_type[ce.lexpr, t]
break
elsif t.pointer?
r = r.rexpr
t = t.untypedef.type
else break
end
end
# TODO int *x; *x = *y; ?
}
}
# put all those macros in use
# use user-defined types first
scope.symbol.each_value { |v|
next if not v.kind_of? C::Variable or not v.stackoff or not t = scope.decompdata[:stackoff_type][v.stackoff]
known_type[v, t]
}
# try to infer types from C semantics
later = []
walk_ce(scope) { |ce|
if ce.op == :'=' and ce.rexpr.kind_of? C::CExpression and ce.rexpr.op == nil and ce.rexpr.rexpr.kind_of? ::Integer and
ce.rexpr.rexpr.abs < 0x10000 and (not ce.lexpr.kind_of? C::CExpression or ce.lexpr.op != :'*' or ce.lexpr.lexpr)
# var = int
known_type[ce.lexpr, ce.rexpr.type]
elsif ce.op == :funcall
f = ce.lexpr.type
f = f.untypedef.type if f.pointer?
next if not f.kind_of? C::Function
# cast func args to arg prototypes
f.args.to_a.zip(ce.rexpr).each_with_index { |(proto, arg), i| ce.rexpr[i] = C::CExpression[arg, proto.type] ; known_type[arg, proto.type] }
elsif ce.op == :* and not ce.lexpr
if e = ce.rexpr and e.kind_of? C::CExpression and not e.op and e = e.rexpr and e.kind_of? C::CExpression and
e.op == :& and not e.lexpr and e.rexpr.kind_of? C::Variable and e.rexpr.stackoff
# skip *(__int32*)&var_12 for now, avoid saying var12 is an int if it may be a ptr or anything
later << [ce.rexpr, C::Pointer.new(ce.type)]
next
end
known_type[ce.rexpr, C::Pointer.new(ce.type)]
elsif not ce.op and ce.type.pointer? and ce.type.untypedef.type.kind_of? C::Function
# cast to fptr: must be a fptr
known_type[ce.rexpr, ce.type]
end
}
later.each { |ce, t| known_type[ce, t] }
# offsets have types now
types.each { |v, t|
# keep var type qualifiers
q = scope.symbol[v].type.qualifier
scope.symbol[v].type = t
t.qualifier = q if q
}
# remove offsets to struct members
# XXX this defeats antialiasing
# off => [structoff, membername, membertype]
memb = {}
types.dup.each { |n, t|
v = scope.symbol[n]
next if not o = v.stackoff
t = t.untypedef
if t.kind_of? C::Struct
t.members.to_a.each { |tm|
moff = t.offsetof(@c_parser, tm.name)
next if moff == 0
types.delete_if { |vv, tt| scope.symbol[vv].stackoff == o+moff }
memb[o+moff] = [v, tm.name, tm.type]
}
end
}
# patch local variables into the CExprs, incl unknown offsets
varat = lambda { |n|
v = scope.symbol[n]
if s = memb[v.stackoff]
v = C::CExpression[s[0], :'.', s[1], s[2]]
else
v.type = types[n] || C::BaseType.new(:int)
end
v
}
maycast = lambda { |v, e|
if sizeof(v) != sizeof(e)
v = C::CExpression[:*, [[:&, v], C::Pointer.new(e.type)]]
end
v
}
maycast_p = lambda { |v, e|
if not e.type.pointer? or sizeof(v) != sizeof(nil, e.type.untypedef.type)
C::CExpression[[:&, v], e.type]
else
C::CExpression[:&, v]
end
}
walk_ce(scope, true) { |ce|
case
when ce.op == :funcall
ce.rexpr.map! { |re|
if o = scopevar[re]; C::CExpression[maycast[varat[o], re]]
elsif o = pscopevar[re]; C::CExpression[maycast_p[varat[o], re]]
else re
end
}
when o = scopevar[ce.lexpr]; ce.lexpr = maycast[varat[o], ce.lexpr]
when o = scopevar[ce.rexpr]; ce.rexpr = maycast[varat[o], ce.rexpr]
ce.rexpr = C::CExpression[ce.rexpr] if not ce.op and ce.rexpr.kind_of? C::Variable
when o = pscopevar[ce.lexpr]; ce.lexpr = maycast_p[varat[o], ce.lexpr]
when o = pscopevar[ce.rexpr]; ce.rexpr = maycast_p[varat[o], ce.rexpr]
when o = scopevar[ce]; ce.replace C::CExpression[maycast[varat[o], ce]]
when o = pscopevar[ce]; ce.replace C::CExpression[maycast_p[varat[o], ce]]
end
}
fix_type_overlap(scope)
fix_pointer_arithmetic(scope)
# if int32 var_4 is always var_4 & 255, change type to int8
varuse = Hash.new(0)
varandff = Hash.new(0)
varandffff = Hash.new(0)
walk_ce(scope) { |ce|
if ce.op == :& and ce.lexpr.kind_of? C::Variable and ce.lexpr.type.integral? and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr.kind_of? ::Integer
case ce.rexpr.rexpr
when 0xff; varandff[ce.lexpr.name] += 1
when 0xffff; varandffff[ce.lexpr.name] += 1
end
end
varuse[ce.lexpr.name] += 1 if ce.lexpr.kind_of? C::Variable
varuse[ce.rexpr.name] += 1 if ce.rexpr.kind_of? C::Variable
}
varandff.each { |k, v|
scope.symbol[k].type = C::BaseType.new(:__int8, :unsigned) if varuse[k] == v
}
varandffff.each { |k, v|
scope.symbol[k].type = C::BaseType.new(:__int16, :unsigned) if varuse[k] == v
}
# propagate types to cexprs
walk_ce(scope, true) { |ce|
if ce.op
ce.type = C::CExpression[ce.lexpr, ce.op, ce.rexpr].type rescue next
if ce.op == :'=' and ce.rexpr and ce.rexpr.type != ce.type and (not ce.rexpr.type.integral? or not ce.type.integral?)
ce.rexpr = C::CExpression[[ce.rexpr], ce.type]
end
end
}
end
# struct foo { int i; int j; struct { int k; int l; } m; }; bla+12 => &bla->m.l
# st is a struct, ptr is an expr pointing to a struct, off is a numeric offset from ptr, msz is the size of the pointed member (nil ignored)
def structoffset(st, ptr, off, msz)
tabidx = off / sizeof(st)
off -= tabidx * sizeof(st)
ptr = C::CExpression[[ptr], C::Pointer.new(st)] if ptr.type.untypedef.type.untypedef != st
ptr = C::CExpression[:&, [ptr, :'[]', [tabidx]]] if tabidx != 0 or ptr.type.untypedef.kind_of? C::Array
m_ptr = lambda { |m|
if ptr.kind_of? C::CExpression and ptr.op == :& and not ptr.lexpr
C::CExpression[ptr.rexpr, :'.', m.name]
else
C::CExpression[ptr, :'->', m.name]
end
}
submemb = lambda { |sm| sm.name ? sm : sm.type.kind_of?(C::Union) ? sm.type.members.to_a.map { |ssm| submemb[ssm] } : nil }
mbs = st.members.to_a.map { |m| submemb[m] }.flatten.compact
mo = mbs.inject({}) { |h, m| h.update m => st.offsetof(@c_parser, m.name) }
if sm = mbs.find { |m| mo[m] == off and (not msz or sizeof(m) == msz) } ||
mbs.find { |m| mo[m] <= off and mo[m]+sizeof(m) > off }
off -= mo[sm]
sst = sm.type.untypedef
return ptr if mo[sm] == 0 and sst.pointer? and sst.type.untypedef == st # TODO fix infinite recursion on mutually recursive ptrs
ptr = C::CExpression[:&, m_ptr[sm]]
if sst.pointer? and sst.type.untypedef.kind_of? C::Union
structoffset(sst.type.untypedef, ptr, off, msz)
elsif off != 0
C::CExpression[[ptr, C::Pointer.new(C::BaseType.new(:__int8))], :+, [off]]
else
ptr
end
elsif off != 0
C::CExpression[[[ptr], C::Pointer.new(C::BaseType.new(:__int8))], :+, [off]]
else
ptr
end
end
# fix pointer arithmetic (eg int foo += 4 => int* foo += 1)
# use struct member access (eg *(structptr+8) => structptr->bla)
# must be run only once, right after type setting
def fix_pointer_arithmetic(scope)
walk_ce(scope, true) { |ce|
if ce.lexpr and ce.lexpr.type.pointer? and [:&, :>>, :<<].include? ce.op
ce.lexpr = C::CExpression[[ce.lexpr], C::BaseType.new(:int)]
end
if ce.op == :+ and ce.lexpr and ce.lexpr.type.integral? and ce.rexpr.type.pointer?
ce.rexpr, ce.lexpr = ce.lexpr, ce.rexpr
end
if ce.op == :* and not ce.lexpr and ce.rexpr.type.pointer? and ce.rexpr.type.untypedef.type.untypedef.kind_of? C::Struct
s = ce.rexpr.type.untypedef.type.untypedef
m = s.members.to_a.find { |m_| s.offsetof(@c_parser, m_.name) == 0 }
if sizeof(m) != sizeof(ce)
ce.rexpr = C::CExpression[[ce.rexpr, C::Pointer.new(s)], C::Pointer.new(ce.type)]
next
end
# *structptr => structptr->member
ce.lexpr = ce.rexpr
ce.op = :'->'
ce.rexpr = m.name
ce.type = m.type
next
elsif ce.op == :'=' and ce.lexpr.type.untypedef.kind_of? C::Struct
s = ce.lexpr.type.untypedef
m = s.members.to_a.find { |m_| s.offsetof(@c_parser, m_.name) == 0 }
ce.lexpr = C::CExpression.new(ce.lexpr, :'.', m.name, m.type)
ce.type = m.type
next
end
if ce.op == :+ and ce.lexpr and ce.lexpr.type.pointer? and not ce.type.pointer?
ce.type = ce.lexpr.type
end
if ce.op == :& and not ce.lexpr and ce.rexpr.kind_of? C::CExpression and ce.rexpr.op == :* and not ce.rexpr.lexpr
ce.replace C::CExpression[ce.rexpr.rexpr]
end
next if not ce.lexpr or not ce.lexpr.type.pointer?
if ce.op == :+ and (s = ce.lexpr.type.untypedef.type.untypedef).kind_of? C::Union and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and
ce.rexpr.rexpr.kind_of? ::Integer and o = ce.rexpr.rexpr
# structptr + 4 => &structptr->member
ce.replace structoffset(s, ce.lexpr, o, nil)
elsif [:+, :-, :'+=', :'-='].include? ce.op and ce.rexpr.kind_of? C::CExpression and ((not ce.rexpr.op and i = ce.rexpr.rexpr) or
(ce.rexpr.op == :* and i = ce.rexpr.lexpr and ((i.kind_of? C::CExpression and not i.op and i = i.rexpr) or true))) and
i.kind_of? ::Integer and psz = sizeof(nil, ce.lexpr.type.untypedef.type) and i % psz == 0
# ptr += 4 => ptr += 1
if not ce.rexpr.op
ce.rexpr.rexpr /= psz
else
ce.rexpr.lexpr.rexpr /= psz
if ce.rexpr.lexpr.rexpr == 1
ce.rexpr = ce.rexpr.rexpr
end
end
ce.type = ce.lexpr.type
elsif (ce.op == :+ or ce.op == :-) and sizeof(nil, ce.lexpr.type.untypedef.type) != 1
# ptr+x => (ptrtype*)(((__int8*)ptr)+x)
# XXX create struct ?
ce.rexpr = C::CExpression[ce.rexpr, C::BaseType.new(:int)] if not ce.rexpr.type.integral?
if sizeof(nil, ce.lexpr.type.untypedef.type) != 1
ptype = ce.lexpr.type
p = C::CExpression[[ce.lexpr], C::Pointer.new(C::BaseType.new(:__int8))]
ce.replace C::CExpression[[p, ce.op, ce.rexpr, p.type], ptype]
end
end
}
end
# handling of var overlapping (eg __int32 var_10; __int8 var_F => replace all var_F by *(&var_10 + 1))
# must be done before fix_pointer_arithmetic
def fix_type_overlap(scope)
varinfo = {}
scope.symbol.each_value { |var|
next if not off = var.stackoff
len = sizeof(var)
varinfo[var] = [off, len]
}
varinfo.each { |v1, (o1, l1)|
next if not v1.type.integral?
varinfo.each { |v2, (o2, l2)|
# XXX o1 may overlap o2 AND another (int32 v_10; int32 v_E; int32 v_C;)
# TODO should check stuff with aliasing domains
next if v1.name == v2.name or o1 >= o2+l2 or o1+l1 <= o2 or l1 > l2 or (l2 == l1 and o2 >= o1)
# v1 => *(&v2+delta)
p = C::CExpression[:&, v2]
p = C::CExpression[p, :+, [o1-o2]]
p = C::CExpression[p, C::Pointer.new(v1.type)] if v1.type != p.type.type
p = C::CExpression[:*, p]
walk_ce(scope) { |ce|
ce.lexpr = p if ce.lexpr == v1
ce.rexpr = p if ce.rexpr == v1
}
}
}
end
# to be run with scope = function body with only CExpr/Decl/Label/Goto/IfGoto/Return, with correct variables types
# will transform += 1 to ++, inline them to prev/next statement ('++x; if (x)..' => 'if (++x)..')
# remove useless variables ('int i;', i never used or 'i = 1; j = i;', i never read after => 'j = 1;')
# remove useless casts ('(int)i' with 'int i;' => 'i')
def optimize(scope)
optimize_code(scope)
optimize_vars(scope)
optimize_vars(scope) # 1st run may transform i = i+1 into i++ which second run may coalesce into if(i)
end
# simplify cexpressions (char & 255, redundant casts, etc)
def optimize_code(scope)
return if forbid_optimize_code
sametype = lambda { |t1, t2|
t1 = t1.untypedef
t2 = t2.untypedef
t1 == t2 or
(t1.kind_of? C::BaseType and t1.integral? and t2.kind_of? C::BaseType and t2.integral? and sizeof(nil, t1) == sizeof(nil, t2)) or
(t1.pointer? and t2.pointer? and sametype[t1.type, t2.type])
}
# most of this is a CExpr#reduce
future_array = []
walk_ce(scope, true) { |ce|
# *&bla => bla if types ok
if ce.op == :* and not ce.lexpr and ce.rexpr.kind_of? C::CExpression and ce.rexpr.op == :& and not ce.rexpr.lexpr and sametype[ce.rexpr.type.untypedef.type, ce.rexpr.rexpr.type]
ce.replace C::CExpression[ce.rexpr.rexpr]
end
# int x + 0xffffffff -> x-1
if ce.lexpr and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and [:+, :-, :'+=', :'-=', :'!=', :==, :>, :<, :>=, :<=].include? ce.op and
ce.rexpr.rexpr == (1 << (8*sizeof(ce.lexpr)))-1
ce.op = {:+ => :-, :- => :+, :'+=' => :'-=', :'-=' => :'+='}[ce.op]
ce.rexpr.rexpr = 1
end
# int *ptr; *(ptr + 4) => ptr[4]
if ce.op == :* and not ce.lexpr and ce.rexpr.kind_of? C::CExpression and ce.rexpr.op == :+ and var = ce.rexpr.lexpr and var.kind_of? C::Variable and var.type.pointer?
ce.lexpr, ce.op, ce.rexpr = ce.rexpr.lexpr, :'[]', ce.rexpr.rexpr
future_array << var.name
end
# char x; x & 255 => x
if ce.op == :& and ce.lexpr and (ce.lexpr.type.integral? or ce.lexpr.type.pointer?) and ce.rexpr.kind_of? C::CExpression and
not ce.rexpr.op and ce.rexpr.rexpr.kind_of? ::Integer and m = (1 << (8*sizeof(ce.lexpr))) - 1 and
ce.rexpr.rexpr & m == m
ce.replace C::CExpression[ce.lexpr]
end
# a + -b => a - b
if ce.op == :+ and ce.lexpr and ce.rexpr.kind_of? C::CExpression and ce.rexpr.op == :- and not ce.rexpr.lexpr
ce.op, ce.rexpr = :-, ce.rexpr.rexpr
end
# (((int) i >> 31) & 1) => i < 0
if ce.op == :& and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr == 1 and
ce.lexpr.kind_of? C::CExpression and ce.lexpr.op == :>> and ce.lexpr.rexpr.kind_of? C::CExpression and
not ce.lexpr.rexpr.op and ce.lexpr.rexpr.rexpr == sizeof(ce.lexpr.lexpr) * 8 - 1
ce.replace C::CExpression[ce.lexpr.lexpr, :<, [0]]
end
# a-b == 0 => a == b
if ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr == 0 and [:==, :'!=', :<, :>, :<=, :>=].include? ce.op and
ce.lexpr.kind_of? C::CExpression and ce.lexpr.op == :- and ce.lexpr.lexpr
ce.lexpr, ce.rexpr = ce.lexpr.lexpr, ce.lexpr.rexpr
end
# (a > 0) != 0
if ce.op == :'!=' and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr == 0 and ce.lexpr.kind_of? C::CExpression and
[:<, :<=, :>, :>=, :'==', :'!=', :'!'].include? ce.lexpr.op
ce.replace ce.lexpr
end
# (a < b) != ( [(a < 0) == !(b < 0)] && [(a < 0) != (a < b)] ) => jl
# a<b => true if !r => a<0 == b<0 or a>=0 => a>=0 or b>=0
# a>=b => true if r => a<0 == b>=0 and a<0 => a<0 and b>=0
# x != (a && (b != x)) => [x && (!a || b)] || [!x && !(!a || b)]
if ce.op == :'!=' and ce.lexpr.kind_of? C::CExpression and ce.lexpr.op == :< and ce.rexpr.kind_of? C::CExpression and
ce.rexpr.op == :'&&' and ce.rexpr.rexpr.kind_of? C::CExpression and ce.rexpr.rexpr.op == :'!=' and
ce.rexpr.rexpr.rexpr == ce.lexpr and not walk_ce(ce) { |ce_| break true if ce_.op == :funcall }
x, a, b = ce.lexpr, ce.rexpr.lexpr, ce.rexpr.rexpr.lexpr
ce.replace C::CExpression[ [x, :'&&', [[:'!',a],:'||',b]] , :'||', [[:'!', x], :'&&', [:'!', [[:'!',a],:'||',b]]] ]
optimize_code(ce)
end
# (a != b) || a => a || b
if ce.op == :'||' and ce.lexpr.kind_of? C::CExpression and ce.lexpr.op == :'!=' and ce.lexpr.lexpr == ce.rexpr and not walk_ce(ce) { |ce_| break true if ce_.op == :funcall }
ce.lexpr, ce.rexpr = ce.rexpr, ce.lexpr.rexpr
optimize_code(ce)
end
# (a<b) && !(a>=0 && b<0) || (a>=b) && (a>=0 && b<0) => (signed)a < (signed)b
if ce.op == :'||' and ce.lexpr.kind_of? C::CExpression and ce.rexpr.kind_of? C::CExpression and ce.lexpr.op == :'&&' and ce.rexpr.op == :'&&' and
ce.lexpr.lexpr.kind_of? C::CExpression and ce.lexpr.lexpr.op == :<
a, b = ce.lexpr.lexpr.lexpr, ce.lexpr.lexpr.rexpr
if ce.lexpr.rexpr === C::CExpression[[a, :'>=', [0]], :'&&', [b, :'<', [0]]].negate and
ce.rexpr.lexpr === ce.lexpr.lexpr.negate and ce.rexpr.rexpr === ce.lexpr.rexpr.negate
ce.replace C::CExpression[a, :'<', b]
end
end
# (a < b) | (a == b) => a <= b
if ce.op == :| and ce.rexpr.kind_of? C::CExpression and ce.rexpr.op == :== and ce.lexpr.kind_of? C::CExpression and
(ce.lexpr.op == :< or ce.lexpr.op == :>) and ce.lexpr.lexpr == ce.rexpr.lexpr and ce.lexpr.rexpr == ce.rexpr.rexpr
ce.op = {:< => :<=, :> => :>=}[ce.lexpr.op]
ce.lexpr, ce.rexpr = ce.lexpr.lexpr, ce.lexpr.rexpr
end
# a == 0 => !a
if ce.op == :== and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr == 0
ce.lexpr, ce.op, ce.rexpr = nil, :'!', ce.lexpr
end
# !(bool) => bool
if ce.op == :'!' and ce.rexpr.kind_of? C::CExpression and [:'==', :'!=', :<, :>, :<=, :>=, :'||', :'&&', :'!'].include? ce.rexpr.op
ce.replace ce.rexpr.negate
end
# (foo)(bar)x => (foo)x
if not ce.op and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr.kind_of? C::CExpression
ce.rexpr = ce.rexpr.rexpr
end
# (foo)bla => bla if bla of type foo
if not ce.op and ce.rexpr.kind_of? C::CExpression and sametype[ce.type, ce.rexpr.type]
ce.lexpr, ce.op, ce.rexpr = ce.rexpr.lexpr, ce.rexpr.op, ce.rexpr.rexpr
end
if ce.lexpr.kind_of? C::CExpression and not ce.lexpr.op and ce.lexpr.rexpr.kind_of? C::Variable and ce.lexpr.type == ce.lexpr.rexpr.type
ce.lexpr = ce.lexpr.rexpr
end
# &struct.1stmember => &struct
if ce.op == :& and not ce.lexpr and ce.rexpr.kind_of? C::CExpression and ce.rexpr.op == :'.' and s = ce.rexpr.lexpr.type and
s.kind_of? C::Struct and s.offsetof(@c_parser, ce.rexpr.rexpr) == 0
ce.rexpr = ce.rexpr.lexpr
ce.type = C::Pointer.new(ce.rexpr.type)
end
# (1stmember*)structptr => &structptr->1stmember
if not ce.op and ce.type.pointer? and (ce.rexpr.kind_of? C::CExpression or ce.rexpr.kind_of? C::Variable) and ce.rexpr.type.pointer? and
s = ce.rexpr.type.untypedef.type.untypedef and s.kind_of? C::Union and ce.type.untypedef.type.untypedef != s
ce.replace C::CExpression[structoffset(s, ce.rexpr, 0, sizeof(ce.type.untypedef.type))]
end
# (&foo)->bar => foo.bar
if ce.op == :'->' and ce.lexpr.kind_of? C::CExpression and ce.lexpr.op == :& and not ce.lexpr.lexpr
ce.lexpr = ce.lexpr.rexpr
ce.op = :'.'
end
}
# if there is a ptr[4], change all *ptr to ptr[0] for consistency
# do this after the first pass, which may change &*ptr to ptr
walk_ce(scope) { |ce|
if ce.op == :* and not ce.lexpr and ce.rexpr.kind_of? C::Variable and future_array.include? ce.rexpr.name
ce.lexpr, ce.op, ce.rexpr = ce.rexpr, :'[]', C::CExpression[0]
end
} if not future_array.empty?
# if (x != 0) => if (x)
walk(scope) { |st|
if st.kind_of? C::If and st.test.kind_of? C::CExpression and st.test.op == :'!=' and
st.test.rexpr.kind_of? C::CExpression and not st.test.rexpr.op and st.test.rexpr.rexpr == 0
st.test = C::CExpression[st.test.lexpr]
end
}
end
# checks if an expr has sideeffects (funcall, var assignment, mem dereference, use var out of scope if specified)
def sideeffect(exp, scope=nil)
case exp
when nil, ::Numeric, ::String; false
when ::Array; exp.any? { |_e| sideeffect _e, scope }
when C::Variable; (scope and not scope.symbol[exp.name]) or exp.type.qualifier.to_a.include? :volatile
when C::CExpression; (exp.op == :* and not exp.lexpr) or exp.op == :funcall or AssignOp.include?(exp.op) or
sideeffect(exp.lexpr, scope) or sideeffect(exp.rexpr, scope)
else true # failsafe
end
end
# converts C code to a graph of cexprs (nodes = cexprs, edges = codepaths)
# returns a CGraph
class CGraph
# exprs: label => [exprs], to: label => [labels], block: label => are exprs standalone (vs If#test), start: 1st label
attr_accessor :exprs, :to, :block, :start, :to_optim, :from_optim
end
def c_to_graph(st)
g = CGraph.new
g.exprs = {} # label => [exprs]
g.to = {} # label => [labels]
g.block = {} # label => is label in a block? (vs If#test)
anon_label = 0 # when no label is there, use anon_label++
# converts C code to a graph of codepath of cexprs
to_graph = lambda { |stmt, l_cur, l_after, l_cont, l_break|
case stmt
when C::Label; g.to[l_cur] = [stmt.name] ; g.to[stmt.name] = [l_after]
when C::Goto; g.to[l_cur] = [stmt.target]
when C::Continue; g.to[l_cur] = [l_cont]
when C::Break; g.to[l_cur] = [l_break]
when C::CExpression
g.exprs[l_cur] = [stmt]
g.to[l_cur] = [l_after]
when C::Return
g.exprs[l_cur] = [stmt.value] if stmt.value
g.to[l_cur] = []
when C::Block
to_graph[stmt.statements, l_cur, l_after, l_cont, l_break]
when ::Array
g.exprs[l_cur] = []
g.block[l_cur] = true
stmt.each_with_index { |s, i|
case s
when C::Declaration
when C::CExpression
g.exprs[l_cur] << s
else
l = anon_label += 1
ll = anon_label += 1
g.to[l_cur] = [l]
g.block[l_cur] = true
to_graph[stmt[i], l, ll, l_cont, l_break]
l_cur = ll
g.exprs[l_cur] = []
end
}
g.to[l_cur] = [l_after].compact
when C::If
g.exprs[l_cur] = [stmt.test]
lt = anon_label += 1
to_graph[stmt.bthen, lt, l_after, l_cont, l_break]
le = anon_label += 1
to_graph[stmt.belse, le, l_after, l_cont, l_break]
g.to[l_cur] = [lt, le]
when C::While, C::DoWhile
la = anon_label += 1
if stmt.kind_of? C::DoWhile
lt, lb = la, l_cur
else
lt, lb = l_cur, la
end
g.exprs[lt] = [stmt.test]
g.to[lt] = [lb, l_after]
to_graph[stmt.body, lb, lt, lt, l_after]
when C::Asm, nil; g.to[l_cur] = [l_after]
else puts "to_graph unhandled #{stmt.class}: #{stmt}" if $VERBOSE
end
}
g.start = anon_label
to_graph[st, g.start, nil, nil, nil]
# optimize graph
g.to_optim = {}
g.to.each { |k, v| g.to_optim[k] = v.uniq }
g.exprs.delete_if { |k, v| v == [] }
g.to_optim.delete_if { |k, v|
if v.length == 1 and not g.exprs[k] and v != [k]
g.to_optim.each_value { |t| if i = t.index(k) ; t[i] = v.first ; end }
true
elsif v.length == 0 and not g.exprs[k]
g.to_optim.each_value { |t| t.delete k }
true
end
}
g.from_optim = {}
g.to_optim.each { |k, v| v.each { |t| (g.from_optim[t] ||= []) << k } }
g
end
# dataflow optimization
# condenses expressions (++x; if (x) => if (++x))
# remove local var assignment (x = 1; f(x); x = 2; g(x); => f(1); g(2); etc)
def optimize_vars(scope)
return if forbid_optimize_dataflow
g = c_to_graph(scope)
# walks a cexpr in evaluation order (not strictly, but this is not strictly defined anyway..)
# returns the first subexpr to read var in ce
# returns :write if var is rewritten
# returns nil if var not read
# may return a cexpr var += 2
find_next_read_ce = lambda { |ce_, var|
walk_ce(ce_, true) { |ce|
case ce.op
when :funcall
break ce if ce.lexpr == var or ce.rexpr.find { |a| a == var }
when :'='
# a=a / a=a+1 => yield a, not :write
break ce if ce.rexpr == var
break :write if ce.lexpr == var
else
break ce if ce.lexpr == var or ce.rexpr == var
end
}
}
# badlabels is a list of labels that may be reached without passing through the first invocation block
find_next_read_rec = lambda { |label, idx, var, done, badlabels|
next if done.include? label
done << label if idx == 0
idx += 1 while ce = g.exprs[label].to_a[idx] and not ret = find_next_read_ce[ce, var]
next ret if ret
to = g.to_optim[label].to_a.map { |t|
break [:split] if badlabels.include? t
find_next_read_rec[t, 0, var, done, badlabels]
}.compact
tw = to - [:write]
if to.include? :split or tw.length > 1
:split
elsif tw.length == 1
tw.first
elsif to.include? :write
:write
end
}
# return the previous subexpr reading var with no fwd path to another reading (otherwise split), see loop comment for reason
find_next_read = nil
find_prev_read_rec = lambda { |label, idx, var, done|
next if done.include? label
done << label if idx == g.exprs[label].length-1
idx -= 1 while idx >= 0 and ce = g.exprs[label].to_a[idx] and not ret = find_next_read_ce[ce, var]
if ret.kind_of? C::CExpression
fwchk = find_next_read[label, idx+1, var]
ret = fwchk if not fwchk.kind_of? C::CExpression
end
next ret if ret
from = g.from_optim[label].to_a.map { |f|
find_prev_read_rec[f, g.exprs[f].to_a.length-1, var, done]
}.compact
next :split if from.include? :split
fw = from - [:write]
if fw.length == 1
fw.first
elsif fw.length > 1
:split
elsif from.include? :write
:write
end
}
# list of labels reachable without using a label
badlab = {}
build_badlabel = lambda { |label|
next if badlab[label]
badlab[label] = []
todo = [g.start]
while l = todo.pop
next if l == label or badlab[label].include? l
badlab[label] << l
todo.concat g.to_optim[l].to_a
end
}
# returns the next subexpr where var is read
# returns :write if var is written before being read
# returns :split if the codepath splits with both subpath reading or codepath merges with another
# returns nil if var is never read
# idx is the index of the first cexpr at g.exprs[label] to look at
find_next_read = lambda { |label, idx, var|
find_next_read_rec[label, idx, var, [], []]
}
find_prev_read = lambda { |label, idx, var|
find_prev_read_rec[label, idx, var, []]
}
# same as find_next_read, but returns :split if there exist a path from g.start to the read without passing through label
find_next_read_bl = lambda { |label, idx, var|
build_badlabel[label]
find_next_read_rec[label, idx, var, [], badlab[label]]
}
# walk each node, optimize data accesses there
# replace no longer useful exprs with CExpr[nil, nil, nil], those are wiped later.
g.exprs.each { |label, exprs|
next if not g.block[label]
i = 0
while i < exprs.length
e = exprs[i]
i += 1
# TODO x = x + 1 => x += 1 => ++x here, move all other optimizations after (in optim_code)
# needs also int & 0xffffffff -> int, *&var etc (decomp_type? optim_type?)
if (e.op == :'++' or e.op == :'--') and v = (e.lexpr || e.rexpr) and v.kind_of? C::Variable and
scope.symbol[v.name] and not v.type.qualifier.to_a.include? :volatile
next if !(pos = :post and oe = find_next_read_bl[label, i, v] and oe.kind_of? C::CExpression) and
!(pos = :prev and oe = find_prev_read[label, i-2, v] and oe.kind_of? C::CExpression)
next if oe.op == :& and not oe.lexpr # no &(++eax)
# merge pre/postincrement into next/prev var usage
# find_prev_read must fwd check when it finds something, to avoid
# while(x) x++; return x; to be converted to while(x++); return x; (return wrong value)
case oe.op
when e.op
# bla(i--); --i bla(--i); --i ++i; bla(i++) => ignore
next if pos == :pre or oe.lexpr
# ++i; bla(++i) => bla(i += 2)
oe.lexpr = oe.rexpr
oe.op = ((oe.op == :'++') ? :'+=' : :'-=')
oe.rexpr = C::CExpression[2]
when :'++', :'--' # opposite of e.op
if (pos == :post and not oe.lexpr) or (pos == :pre and not oe.rexpr)
# ++i; bla(--i) => bla(i)
# bla(i--); ++i => bla(i)
oe.op = nil
elsif pos == :post
# ++i; bla(i--) => bla(i+1)
oe.op = ((oe.op == :'++') ? :- : :+)
oe.rexpr = C::CExpression[1]
elsif pos == :pre
# bla(--i); ++i => bla(i-1)
oe.lexpr = oe.rexpr
oe.op = ((oe.op == :'++') ? :+ : :-)
oe.rexpr = C::CExpression[1]
end
when :'+=', :'-='
# TODO i++; i += 4 => i += 5
next
when *AssignOp
next # ++i; i |= 4 => ignore
else
if pos == :post and v == oe.lexpr; oe.lexpr = C::CExpression[e.op, v]
elsif pos == :post and v == oe.rexpr; oe.rexpr = C::CExpression[e.op, v]
elsif pos == :prev and v == oe.rexpr; oe.rexpr = C::CExpression[v, e.op]
elsif pos == :prev and v == oe.lexpr; oe.lexpr = C::CExpression[v, e.op]
else raise 'foobar' # find_dir_read failed
end
end
i -= 1
exprs.delete_at(i)
e.lexpr = e.op = e.rexpr = nil
elsif e.op == :'=' and v = e.lexpr and v.kind_of? C::Variable and scope.symbol[v.name] and
not v.type.qualifier.to_a.include? :volatile and not find_next_read_ce[e.rexpr, v]
# reduce trivial static assignments
if (e.rexpr.kind_of? C::CExpression and iv = e.rexpr.reduce(@c_parser) and iv.kind_of? ::Integer) or
(e.rexpr.kind_of? C::CExpression and e.rexpr.op == :& and not e.rexpr.lexpr) or
(e.rexpr.kind_of? C::Variable and e.rexpr.type.kind_of? C::Array)
rewritten = false
readers = []
discard = [e]
g.exprs.each { |l, el|
el.each_with_index { |ce, ci|
if ce_write(ce, v) and [label, i-1] != [l, ci]
if ce == e
discard << ce
else
rewritten = true
break
end
elsif ce_read(ce, v)
if walk_ce(ce) { |_ce| break true if _ce.op == :& and not _ce.lexpr and _ce.rexpr == v }
# i = 2 ; j = &i =!> j = &2
rewritten = true
break
end
readers << ce
end
} if not rewritten
}
if not rewritten
ce_patch(readers, v, C::CExpression[iv || e.rexpr])
discard.each { |d| d.lexpr = d.op = d.rexpr = nil }
next
end
end
case nr = find_next_read[label, i, v]
when C::CExpression
# read in one place only, try to patch rexpr in there
r = e.rexpr
# must check for conflicts (x = y; y += 1; foo(x) =!> foo(y))
# XXX x = a[1]; *(a+1) = 28; foo(x)...
isfunc = false
depend_vars = []
walk_ce(C::CExpression[r]) { |ce|
isfunc = true if ce.op == :func and (not ce.lexpr.kind_of? C::Variable or
not ce.lexpr.has_attribute('pure')) # XXX is there a C attr for func depending only on staticvars+param ?
depend_vars << ce.lexpr if ce.lexpr.kind_of? C::Variable
depend_vars << ce.rexpr if ce.rexpr.kind_of? C::Variable and (ce.lexpr or ce.op != :&) # a = &v; v = 12; func(a) => func(&v)
depend_vars << ce if ce.lvalue?
depend_vars.concat(ce.rexpr.grep(C::Variable)) if ce.rexpr.kind_of? ::Array
}
depend_vars.uniq!
# XXX x = 1; if () { x = 2; } foo(x) =!> foo(1) (find_next_read will return this)
# we'll just redo a find_next_read like
# XXX b = &a; a = 1; *b = 2; foo(a) unhandled & generate bad C
l_l = label
l_i = i
while g.exprs[l_l].to_a.each_with_index { |ce_, n_i|
next if n_i < l_i
# count occurences of read v in ce_
cnt = 0
bad = false
walk_ce(ce_) { |ce|
case ce.op
when :funcall
bad = true if isfunc
ce.rexpr.each { |a| cnt += 1 if a == v }
cnt += 1 if ce.lexpr == v
when :'='
bad = true if depend_vars.include? ce.lexpr
cnt += 1 if ce.rexpr == v
else
bad = true if (ce.op == :'++' or ce.op == :'--') and depend_vars.include? ce.rexpr
bad = true if AssignOp.include? ce.op and depend_vars.include? ce.lexpr
cnt += 1 if ce.lexpr == v
cnt += 1 if ce.rexpr == v
end
}
case cnt
when 0
break if bad
next
when 1 # good
break if e.complexity > 10 and ce_.complexity > 3 # try to keep the C readable
# x = 1; y = x; z = x; => cannot suppress x
nr = find_next_read[l_l, n_i+1, v]
break if (nr.kind_of? C::CExpression or nr == :split) and not walk_ce(ce_) { |ce| break true if ce.op == :'=' and ce.lexpr == v }
else break # a = 1; b = a + a => fail
end
# TODO XXX x = 1; y = x; z = x;
res = walk_ce(ce_, true) { |ce|
case ce.op
when :funcall
if ce.rexpr.to_a.each_with_index { |a,i_|
next if a != v
ce.rexpr[i_] = r
break :done
} == :done
break :done
elsif ce.lexpr == v
ce.lexpr = r
break :done
elsif isfunc
break :fail
end
when *AssignOp
break :fail if not ce.lexpr and depend_vars.include? ce.rexpr # ++depend
if ce.rexpr == v
ce.rexpr = r
break :done
elsif ce.lexpr == v or depend_vars.include? ce.lexpr
break :fail
end
else
break :fail if ce.op == :& and not ce.lexpr and ce.rexpr == v
if ce.lexpr == v
ce.lexpr = r
break :done
elsif ce.rexpr == v
ce_.type = r.type if not ce_.op and ce_.rexpr == v # return (int32)eax
ce.rexpr = r
break :done
end
end
}
case res
when :done
i -= 1
exprs.delete_at(i)
e.lexpr = e.op = e.rexpr = nil
break
when :fail
break
end
}
# ignore branches that will never reuse v
may_to = g.to_optim[l_l].find_all { |to| find_next_read[to, 0, v].kind_of? C::CExpression }
if may_to.length == 1 and to = may_to.first and to != l_l and g.from_optim[to] == [l_l]
l_i = 0
l_l = to
else break
end
end
when nil, :write
# useless assignment (value never read later)
# XXX foo = &bar; bar = 12; baz(*foo)
e.replace(C::CExpression[e.rexpr])
# remove sideeffectless subexprs
loop do
case e.op
when :funcall, *AssignOp
else
l = (e.lexpr.kind_of? C::CExpression and sideeffect(e.lexpr))
r = (e.rexpr.kind_of? C::CExpression and sideeffect(e.rexpr))
if l and r # could split...
elsif l
e.replace(e.lexpr)
next
elsif r
e.replace(e.rexpr)
next
else # remove the assignment altogether
i -= 1
exprs.delete_at(i)
e.lexpr = e.op = e.rexpr = nil
end
end
break
end
end
end
end
}
# wipe cexprs marked in the previous step
walk(scope) { |st|
next if not st.kind_of? C::Block
st.statements.delete_if { |e| e.kind_of? C::CExpression and not e.lexpr and not e.op and not e.rexpr }
}
# reoptimize cexprs
walk_ce(scope, true) { |ce|
# redo some simplification that may become available after variable propagation
# int8 & 255 => int8
if ce.op == :& and ce.lexpr and ce.lexpr.type.integral? and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr == (1 << (8*sizeof(ce.lexpr))) - 1
ce.replace C::CExpression[ce.lexpr]
end
# int *ptr; *(ptr + 4) => ptr[4]
if ce.op == :* and not ce.lexpr and ce.rexpr.kind_of? C::CExpression and ce.rexpr.op == :+ and var = ce.rexpr.lexpr and var.kind_of? C::Variable and var.type.pointer?
ce.lexpr, ce.op, ce.rexpr = ce.rexpr.lexpr, :'[]', ce.rexpr.rexpr
end
# useless casts
if not ce.op and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and (ce.rexpr.rexpr.kind_of? C::CExpression or
(ce.type.pointer? and ce.rexpr.rexpr == 0))
ce.rexpr = ce.rexpr.rexpr
end
if not ce.op and ce.rexpr.kind_of? C::CExpression and (ce.type == ce.rexpr.type or (ce.type.integral? and ce.rexpr.type.integral?))
ce.replace ce.rexpr
end
# useless casts (type)*((oeua)Ptype)
if not ce.op and ce.rexpr.kind_of? C::CExpression and ce.rexpr.op == :* and not ce.rexpr.lexpr and ce.rexpr.rexpr.kind_of? C::CExpression and not ce.rexpr.rexpr.op and
p = ce.rexpr.rexpr.rexpr and (p.kind_of? C::CExpression or p.kind_of? C::Variable) and p.type.pointer? and ce.type == p.type.untypedef.type
ce.op = ce.rexpr.op
ce.rexpr = ce.rexpr.rexpr.rexpr
end
# (a > 0) != 0
if ce.op == :'!=' and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr == 0 and ce.lexpr.kind_of? C::CExpression and
[:<, :<=, :>, :>=, :'==', :'!=', :'!'].include? ce.lexpr.op
ce.replace ce.lexpr
end
# a == 0 => !a
if ce.op == :== and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr == 0
ce.replace C::CExpression[:'!', ce.lexpr]
end
# !(int)a => !a
if ce.op == :'!' and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr.kind_of? C::CExpression
ce.rexpr = ce.rexpr.rexpr
end
# (int)a < (int)b => a < b TODO uint <-> int
if [:<, :<=, :>, :>=].include? ce.op and ce.rexpr.kind_of? C::CExpression and ce.lexpr.kind_of? C::CExpression and not ce.rexpr.op and not ce.lexpr.op and
ce.rexpr.rexpr.kind_of? C::CExpression and ce.rexpr.rexpr.type.pointer? and ce.lexpr.rexpr.kind_of? C::CExpression and ce.lexpr.rexpr.type.pointer?
ce.rexpr = ce.rexpr.rexpr
ce.lexpr = ce.lexpr.rexpr
end
# a & 3 & 1
while (ce.op == :& or ce.op == :|) and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr.kind_of? ::Integer and
ce.lexpr.kind_of? C::CExpression and ce.lexpr.op == ce.op and ce.lexpr.lexpr and
ce.lexpr.rexpr.kind_of? C::CExpression and ce.lexpr.rexpr.rexpr.kind_of? ::Integer
ce.lexpr, ce.rexpr.rexpr = ce.lexpr.lexpr, ce.lexpr.rexpr.rexpr.send(ce.op, ce.rexpr.rexpr)
end
# x = x | 4 => x |= 4
if ce.op == :'=' and ce.rexpr.kind_of? C::CExpression and [:+, :-, :*, :/, :|, :&, :^, :>>, :<<].include? ce.rexpr.op and ce.rexpr.lexpr == ce.lexpr
ce.op = (ce.rexpr.op.to_s + '=').to_sym
ce.rexpr = ce.rexpr.rexpr
end
# x += 1 => ++x
if (ce.op == :'+=' or ce.op == :'-=') and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr == 1
ce.lexpr, ce.op, ce.rexpr = nil, {:'+=' => :'++', :'-=' => :'--'}[ce.op], ce.lexpr
end
# --x+1 => x--
if (ce.op == :+ or ce.op == :-) and ce.lexpr.kind_of? C::CExpression and ce.lexpr.op == {:+ => :'--', :- => :'++'}[ce.op] and
ce.lexpr.rexpr and ce.rexpr.kind_of? C::CExpression and not ce.rexpr.op and ce.rexpr.rexpr == 1
ce.lexpr, ce.op, ce.rexpr = ce.lexpr.rexpr, ce.lexpr.op, nil
end
}
end
def remove_unreferenced_vars(scope)
used = {}
walk_ce(scope) { |ce|
# remove unreferenced local vars
used[ce.rexpr.name] = true if ce.rexpr.kind_of? C::Variable
used[ce.lexpr.name] = true if ce.lexpr.kind_of? C::Variable
ce.rexpr.each { |v| used[v.name] = true if v.kind_of? C::Variable } if ce.rexpr.kind_of?(::Array)
}
unused = scope.symbol.keys.find_all { |n| not used[n] }
unused.each { |v| scope.symbol[v].add_attribute 'unused' } # fastcall args need it
scope.statements.delete_if { |sm| sm.kind_of? C::Declaration and unused.include? sm.var.name }
scope.symbol.delete_if { |n, v| unused.include? n }
end
def finalize
optimize_global
true
end
def optimize_global
# check all global vars (pointers to global data)
tl = @c_parser.toplevel
vars = tl.symbol.keys.find_all { |k| tl.symbol[k].kind_of? C::Variable and not tl.symbol[k].type.kind_of? C::Function }
countref = Hash.new(0)
walk_ce(tl) { |ce|
# XXX int foo; void bar() { int foo; } => false negative
countref[ce.rexpr.name] += 1 if ce.rexpr.kind_of? C::Variable
countref[ce.lexpr.name] += 1 if ce.lexpr.kind_of? C::Variable
}
vars.delete_if { |v| countref[v] == 0 }
countref.delete_if { |k, v| not vars.include? k }
# by default globals are C::Arrays
# if all references are *foo, dereference the var type
# TODO allow foo to appear (change to &foo) (but still disallow casts/foo+12 etc)
countderef = Hash.new(0)
walk_ce(tl) { |ce|
if ce.op == :* and not ce.lexpr
r = ce.rexpr
elsif ce.op == :'->'
r = C::CExpression[ce.lexpr]
else next
end
# compare type.type cause var is an Array and the cast is a Pointer
countderef[r.rexpr.name] += 1 if r.kind_of? C::CExpression and not r.op and r.rexpr.kind_of? C::Variable and
sizeof(nil, r.type.type) == sizeof(nil, r.rexpr.type.type) rescue nil
}
vars.each { |n|
if countref[n] == countderef[n]
v = tl.symbol[n]
target = C::CExpression[:*, [v]]
v.type = v.type.type
v.initializer = v.initializer.first if v.initializer.kind_of? ::Array
walk_ce(tl) { |ce|
if ce.op == :'->' and C::CExpression[ce.lexpr] == C::CExpression[v]
ce.op = :'.'
elsif ce.lexpr == target
ce.lexpr = v
end
ce.rexpr = v if ce.rexpr == target
ce.lexpr, ce.op, ce.rexpr = nil, nil, v if ce == target
}
end
}
# if a global var appears only in one function, make it a static variable
tl.statements.each { |st|
next if not st.kind_of? C::Declaration or not st.var.type.kind_of? C::Function or not scope = st.var.initializer
localcountref = Hash.new(0)
walk_ce(scope) { |ce|
localcountref[ce.rexpr.name] += 1 if ce.rexpr.kind_of? C::Variable
localcountref[ce.lexpr.name] += 1 if ce.lexpr.kind_of? C::Variable
}
vars.delete_if { |n|
next if scope.symbol[n]
next if localcountref[n] != countref[n]
v = tl.symbol.delete(n)
tl.statements.delete_if { |d| d.kind_of? C::Declaration and d.var.name == n }
if countref[n] == 1 and v.initializer.kind_of? C::CExpression and v.initializer.rexpr.kind_of? String
walk_ce(scope) { |ce|
if ce.rexpr.kind_of? C::Variable and ce.rexpr.name == n
if not ce.op
ce.replace v.initializer
else
ce.rexpr = v.initializer
end
elsif ce.lexpr.kind_of? C::Variable and ce.lexpr.name == n
ce.lexpr = v.initializer
end
}
else
v.storage = :static
scope.symbol[v.name] = v
scope.statements.unshift C::Declaration.new(v)
end
true
}
}
end
# reorder statements to put decl first, move assignments to decl, move args to func prototype
def cleanup_var_decl(scope, func)
args = func.type.args
decl = []
scope.statements.delete_if { |sm|
next if not sm.kind_of? C::Declaration
if sm.var.stackoff.to_i > 0
args << sm.var
else
decl << sm
end
true
}
# move trivial affectations to initialiser
# XXX a = 1 ; b = a ; a = 2
go = true # break from delete_if does not delete..
scope.statements.delete_if { |st|
if go and st.kind_of? C::CExpression and st.op == :'=' and st.rexpr.kind_of? C::CExpression and not st.rexpr.op and
st.rexpr.rexpr.kind_of? ::Integer and st.lexpr.kind_of? C::Variable and scope.symbol[st.lexpr.name]
st.lexpr.initializer = st.rexpr
else
go = false
end
}
# reorder declarations
scope.statements[0, 0] = decl.sort_by { |sm| [-sm.var.stackoff.to_i, sm.var.name] }
# ensure arglist has no hole (create&add unreferenced args)
func.type.args = []
argoff = @c_parser.typesize[:ptr]
args.sort_by { |sm| sm.stackoff.to_i }.each { |a|
# XXX misalignment ?
if not curoff = a.stackoff
func.type.args << a # __fastcall
next
end
while curoff > argoff
wantarg = C::Variable.new
wantarg.name = scope.decompdata[:stackoff_name][argoff] || stackoff_to_varname(argoff)
wantarg.type = C::BaseType.new(:int)
wantarg.attributes = ['unused']
func.type.args << wantarg
scope.symbol[wantarg.name] = wantarg
argoff += @c_parser.typesize[:ptr]
end
func.type.args << a
argoff += @c_parser.typesize[:ptr]
}
end
# rename local variables from subfunc arg names
def rename_variables(scope)
funcs = []
cntrs = []
cmpi = []
walk_ce(scope) { |ce|
funcs << ce if ce.op == :funcall
cntrs << (ce.lexpr || ce.rexpr) if ce.op == :'++'
cmpi << ce.lexpr if [:<, :>, :<=, :>=, :==, :'!='].include? ce.op and ce.rexpr.kind_of? C::CExpression and ce.rexpr.rexpr.kind_of? ::Integer
}
rename = lambda { |var, name|
var = var.rexpr if var.kind_of? C::CExpression and not var.op
next if not var.kind_of? C::Variable or not scope.symbol[var.name] or not name
next if (var.name !~ /^(var|arg)_/ and not var.storage == :register) or not scope.symbol[var.name] or name =~ /^(var|arg)_/
s = scope.symbol_ancestors
n = name
i = 0
n = name + "#{i+=1}" while s[n]
scope.symbol[n] = scope.symbol.delete(var.name)
var.name = n
}
funcs.each { |ce|
next if not ce.lexpr.kind_of? C::Variable or not ce.lexpr.type.kind_of? C::Function
ce.rexpr.to_a.zip(ce.lexpr.type.args.to_a).each { |a, fa| rename[a, fa.name] if fa }
}
funcs.each { |ce|
next if not ce.lexpr.kind_of? C::Variable or not ce.lexpr.type.kind_of? C::Function
ce.rexpr.to_a.zip(ce.lexpr.type.args.to_a).each { |a, fa|
next if not a.kind_of? C::CExpression or a.op != :& or a.lexpr
next if not fa or not fa.name
rename[a.rexpr, fa.name.sub(/^l?p/, '')]
}
}
(cntrs & cmpi).each { |v| rename[v, 'cntr'] }
end
# yield each CExpr member (recursive, allows arrays, order: self(!post), lexpr, rexpr, self(post))
# if given a non-CExpr, walks it until it finds a CExpr to yield
def walk_ce(ce, post=false, &b)
case ce
when C::CExpression
yield ce if not post
walk_ce(ce.lexpr, post, &b)
walk_ce(ce.rexpr, post, &b)
yield ce if post
when ::Array
ce.each { |ce_| walk_ce(ce_, post, &b) }
when C::Statement
case ce
when C::Block; walk_ce(ce.statements, post, &b)
when C::If
walk_ce(ce.test, post, &b)
walk_ce(ce.bthen, post, &b)
walk_ce(ce.belse, post, &b) if ce.belse
when C::While, C::DoWhile
walk_ce(ce.test, post, &b)
walk_ce(ce.body, post, &b)
when C::Return
walk_ce(ce.value, post, &b) if ce.value
end
when C::Declaration
walk_ce(ce.var.initializer, post, &b) if ce.var.initializer
end
nil
end
# yields each statement (recursive)
def walk(scope, post=false, &b)
case scope
when ::Array; scope.each { |s| walk(s, post, &b) }
when C::Statement
yield scope if not post
case scope
when C::Block; walk(scope.statements, post, &b)
when C::If
yield scope.test
walk(scope.bthen, post, &b)
walk(scope.belse, post, &b) if scope.belse
when C::While, C::DoWhile
yield scope.test
walk(scope.body, post, &b)
when C::Return
yield scope.value
end
yield scope if post
when C::Declaration
walk(scope.var.initializer, post, &b) if scope.var.initializer
end
end
# forwards to @c_parser, handles cast to Array (these should not happen btw...)
def sizeof(var, type=nil)
var, type = nil, var if var.kind_of? C::Type and not type
type ||= var.type
return @c_parser.typesize[:ptr] if type.kind_of? C::Array and not var.kind_of? C::Variable
@c_parser.sizeof(var, type) rescue -1
end
end
end
|
# This file is part of Metasm, the Ruby assembly manipulation suite
# Copyright (C) 2006-2009 Yoann GUILLOT
#
# Licence is LGPL, see LICENCE in the top-level directory
require 'metasm/gui/dasm_main'
module Metasm
module Gui
# TODO invalidate dbg.disassembler on selfmodifying code
# TODO handle multiple threads, reattach, etc
# TODO customize child widgets (listing: persistent hilight of current instr, show/set breakpoints, ...)
# TODO handle debugee fork()
class DbgWidget < ContainerVBoxWidget
attr_accessor :dbg, :console, :regs, :code, :mem, :win
attr_accessor :watchpoint
attr_accessor :parent_widget, :keyboard_callback, :keyboard_callback_ctrl
def initialize_widget(dbg)
@dbg = dbg
@keyboard_callback = {}
@keyboard_callback_ctrl = {}
@parent_widget = nil
@regs = DbgRegWidget.new(dbg, self)
@mem = DisasmWidget.new(dbg.disassembler)
@code = DisasmWidget.new(dbg.disassembler) # after mem so that dasm.gui == @code
@console = DbgConsoleWidget.new(dbg, self)
@code.parent_widget = self
@mem.parent_widget = self
@dbg.disassembler.disassemble_fast(@dbg.pc)
oldcb = @code.bg_color_callback
@code.bg_color_callback = lambda { |a|
if a == @dbg.pc
'f88'
# TODO breakpoints & stuff
elsif oldcb; oldcb[a]
end
}
# TODO popup menu, set bp, goto here, show arg in memdump..
@children = [@code, @mem, @regs]
add @regs, 'expand' => false # XXX
add @mem
add @code
add @console
@watchpoint = { @code => @dbg.register_pc }
@code.focus_addr(@dbg.resolve_expr(@watchpoint[@code]), :graph)
@mem.focus_addr(0, :hex)
end
def swapin_tid
@regs.swapin_tid
@dbg.disassembler.disassemble_fast(@dbg.pc)
@children.each { |c|
if wp = @watchpoint[c]
c.focus_addr @dbg.resolve_expr(wp), nil, true
end
}
redraw
end
def swapin_pid
@mem.dasm = @dbg.disassembler
@code.dasm = @dbg.disassembler
swapin_tid
gui_update
end
def keypress(key)
return true if @keyboard_callback[key] and @keyboard_callback[key][key]
case key
when :f5; protect { dbg_continue }
when :f10; protect { dbg_stepover }
when :f11; protect { dbg_singlestep }
when :f12; protect { dbg_stepout }
when ?.; @console.grab_focus
else return @parent_widget ? @parent_widget.keypress(key) : false
end
true
end
def keypress_ctrl(key)
return true if @keyboard_callback_ctrl[key] and @keyboard_callback_ctrl[key][key]
case key
when :f5; protect { @dbg.pass_current_exception ; dbg.continue }
else return @parent_widget ? @parent_widget.keypress_ctrl(key) : false
end
true
end
def pre_dbg_run
@regs.pre_dbg_run
end
# TODO check_target always, incl when :stopped
def post_dbg_run
want_redraw = true
return if @idle_checking ||= nil # load only one bg proc
@idle_checking = true
Gui.idle_add {
@dbg.check_target
if @dbg.state == :running
redraw if want_redraw # redraw once if the target is running (less flicker with singlestep)
want_redraw = false
next true
end
@idle_checking = false
@dbg.dasm_invalidate
@mem.gui_update
@dbg.disassembler.sections.clear if @dbg.state == :dead
@dbg.disassembler.disassemble_fast(@dbg.pc)
@children.each { |c|
if wp = @watchpoint[c]
c.focus_addr @dbg.resolve_expr(wp), nil, true
end
}
redraw
false
}
end
def wrap_run
pre_dbg_run
yield
post_dbg_run
end
def dbg_continue(*a) wrap_run { @dbg.continue(*a) } end
def dbg_singlestep(*a) wrap_run { @dbg.singlestep(*a) } end
def dbg_stepover(*a) wrap_run { @dbg.stepover(*a) } end
def dbg_stepout(*a) wrap_run { @dbg.stepout(*a) } end # TODO idle_add etc
def redraw
super
@console.redraw
@regs.gui_update
@children.each { |c| c.redraw }
end
def gui_update
@console.redraw
@children.each { |c| c.gui_update }
end
end
# a widget that displays values of registers of a Debugger
# also controls the Debugger and commands slave windows (showing listing & memory)
class DbgRegWidget < DrawableWidget
attr_accessor :dbg
def initialize_widget(dbg, parent_widget)
@dbg = dbg
@parent_widget = parent_widget
@caret_x = @caret_reg = 0
@oldcaret_x = @oldcaret_reg = 42
@tid_stuff = {}
swapin_tid
@reg_pos = [] # list of x y w h vx of the reg drawing on widget, vx is x of value
@default_color_association = { :label => :black, :data => :blue, :write_pending => :darkred,
:changed => :darkgreen, :caret => :black, :background => :white,
:inactive => :palegrey }
end
def swapin_tid
stf = @tid_stuff[[@dbg.pid, @dbg.tid]] ||= {}
return if not @dbg.cpu
@write_pending = stf[:write_pending] ||= {} # addr -> newvalue (bytewise)
@registers = stf[:registers] ||= @dbg.register_list
@flags = stf[:flags] ||= @dbg.flag_list
@register_size = stf[:reg_sz] ||= @registers.inject(Hash.new(1)) { |h, r| h.update r => @dbg.register_size[r]/4 }
@reg_cache = stf[:reg_cache] ||= Hash.new(0)
@reg_cache_old = stf[:reg_cache_old] ||= {}
end
def initialize_visible
gui_update
end
def click(ex, ey)
if p = @reg_pos.find { |x, y, w, h, vx| x <= ex and x+w >= ex and y <= ey and y+h >= ey }
@caret_reg = @reg_pos.index(p)
@caret_x = ((ex - p[4]) / @font_width).to_i
rs = @register_size[@registers[@caret_reg]]
@caret_x = rs-1 if @caret_x > rs-1
@caret_x = 0 if @caret_x < 0
update_caret
end
end
def rightclick(x, y)
doubleclick(x, y) # XXX
end
def doubleclick(x, y)
gui_update # XXX
end
def paint
curaddr = 0
x = 1
y = 0
w_w = width
render = lambda { |str, color|
draw_string_color(color, x, y, str)
x += str.length * @font_width
}
@reg_pos = []
running = (@dbg.state != :stopped)
@registers.each { |reg|
strlen = reg.to_s.length + 1 + @register_size[reg]
if x + strlen*@font_width >= w_w
x = 1
y += @font_height
end
@reg_pos << [x, y, (strlen+1)*@font_width, @font_height, x+(reg.to_s.length+1)*@font_width]
render["#{reg}=", :label]
v = @write_pending[reg] || @reg_cache[reg]
col = running ? :inactive : @write_pending[reg] ? :write_pending : @reg_cache_old.fetch(reg, v) != v ? :changed : :data
render["%0#{@register_size[reg]}x " % v, col]
x += @font_width # space
}
@flags.each { |reg|
if x + @font_width >= w_w # XXX nowrap flags ?
x = 1
y += @font_height
end
@reg_pos << [x, y, @font_width, @font_height, x]
v = @write_pending[reg] || @reg_cache[reg]
col = running ? :inactive : @write_pending[reg] ? :write_pending : @reg_cache_old.fetch(reg, v) != v ? :changed : :data
v = v == 0 ? reg.to_s.downcase : reg.to_s.upcase
render[v, col]
x += @font_width # space
}
if focus?
# draw caret
cx = @reg_pos[@caret_reg][4] + @caret_x*@font_width
cy = @reg_pos[@caret_reg][1]
draw_line_color(:caret, cx, cy, cx, cy+@font_height-1)
end
@oldcaret_x, @oldcaret_reg = @caret_x, @caret_reg
@parent_widget.resize_child(self, width, y+@font_height)
end
# keyboard binding
# basic navigation (arrows, pgup etc)
def keypress(key)
case key
when :left
if @caret_x > 0
@caret_x -= 1
update_caret
end
when :right
if @caret_x < @register_size[@registers[@caret_reg]]-1
@caret_x += 1
update_caret
end
when :up
if @caret_reg > 0
@caret_reg -= 1
else
@caret_reg = @registers.length+@flags.length-1
end
@caret_x = 0
update_caret
when :down
if @caret_reg < @registers.length+@flags.length-1
@caret_reg += 1
else
@caret_reg = 0
end
@caret_x = 0
update_caret
when :home
@caret_x = 0
update_caret
when :end
@caret_x = @register_size[@registers[@caret_reg]]-1
update_caret
when :tab
if @caret_reg < @registers.length+@flags.length-1
@caret_reg += 1
else
@caret_reg = 0
end
@caret_x = 0
update_caret
when :backspace
# TODO
when :enter
commit_writes
redraw
when :esc
@write_pending.clear
redraw
when ?\x20..?\x7e
if ?a.kind_of?(String)
v = key.ord
case key
when ?\x20; v = nil # keep current value
when ?0..?9; v -= ?0.ord
when ?a..?f; v -= ?a.ord-10
when ?A..?F; v -= ?A.ord-10
else return false
end
else
case v = key
when ?\x20; v = nil
when ?0..?9; v -= ?0
when ?a..?f; v -= ?a-10
when ?A..?F; v -= ?A-10
else return false
end
end
reg = @registers[@caret_reg] || @flags[@caret_reg-@registers.length]
rsz = @register_size[reg]
if v and rsz != 1
oo = 4*(rsz-@caret_x-1)
ov = @write_pending[reg] || @reg_cache[reg]
ov &= ~(0xf << oo)
ov |= v << oo
@write_pending[reg] = ov
elsif v and (v == 0 or v == 1) # TODO change z flag by typing 'z' or 'Z'
@write_pending[reg] = v
rsz = 1
end
if rsz == 1
@caret_reg += 1
@caret_reg = @registers.length if @caret_reg >= @registers.length + @flags.length
elsif @caret_x < rsz-1
@caret_x += 1
else
@caret_x = 0
end
redraw
else return false
end
true
end
def pre_dbg_run
@reg_cache_old.replace @reg_cache if @reg_cache
end
def commit_writes
@write_pending.each { |k, v|
if @registers.index(k)
@dbg.set_reg_value(k, v)
else
@dbg.set_flag_value(k, v)
end
@reg_cache[k] = v
}
@write_pending.clear
end
def gui_update
@reg_cache.replace @registers.inject({}) { |h, r| h.update r => @dbg.get_reg_value(r) }
@flags.each { |f| @reg_cache[f] = @dbg.get_flag_value(f) }
redraw
end
# hint that the caret moved
def update_caret
return if @oldcaret_x == @caret_x and @oldcaret_reg == @caret_reg
invalidate_caret(@oldcaret_x, 0, *@reg_pos[@oldcaret_reg].values_at(4, 1))
invalidate_caret(@caret_x, 0, *@reg_pos[@caret_reg].values_at(4, 1))
@oldcaret_x, @oldcaret_reg = @caret_x, @caret_reg
end
end
# a widget that displays logs of the debugger, and a cli interface to the dbg
class DbgConsoleWidget < DrawableWidget
attr_accessor :dbg, :cmd_history, :log, :statusline, :commands, :cmd_help
def initialize_widget(dbg, parent_widget)
@dbg = dbg
@parent_widget = parent_widget
@dbg.gui = self
@log = []
@log_length = 4000
@log_offset = 0
@curline = ''
@statusline = 'type \'help\' for help'
@cmd_history = ['']
@cmd_history_length = 200 # number of past commands to remember
@cmd_histptr = nil
@dbg.set_log_proc { |l| add_log l }
@default_color_association = { :log => :palegrey, :curline => :white, :caret => :yellow,
:background => :black, :status => :black, :status_bg => '088' }
init_commands
end
def initialize_visible
grab_focus
gui_update
end
def swapin_tid
@parent_widget.swapin_tid
end
def swapin_pid
@parent_widget.swapin_pid
end
def click(x, y)
@caret_x = (x-1).to_i / @font_width - 1
@caret_x = [[@caret_x, 0].max, @curline.length].min
update_caret
end
def doubleclick(x, y)
# TODO real copy/paste
# for now, copy the line under the dblclick
y -= height % @font_height
y = y.to_i / @font_height
hc = height / @font_height
if y == hc - 1
txt = @statusline
elsif y == hc - 2
txt = @curline
else
txt = @log.reverse[@log_offset + hc - y - 3].to_s
end
clipboard_copy(txt)
end
def mouse_wheel(dir, x, y)
case dir
when :up; @log_offset += 3
when :down; @log_offset -= 3
end
redraw
end
def paint
y = height
render = lambda { |str, color|
draw_string_color(color, 1, y, str)
y -= @font_height
}
w_w = width
y -= @font_height
draw_rectangle_color(:status_bg, 0, y, w_w, @font_height)
str = "#{@dbg.pid}:#{@dbg.tid} #{@dbg.state} #{@dbg.info}"
draw_string_color(:status, w_w-str.length*@font_width-1, y, str)
draw_string_color(:status, 1+@font_width, y, @statusline)
y -= @font_height
w_w_c = w_w/@font_width
@caret_y = y
if @caret_x < w_w_c-1
render[':' + @curline, :curline]
else
render['~' + @curline[@caret_x-w_w_c+2, w_w_c], :curline]
end
l_nr = -1
lastline = nil
@log_offset = 0 if @log_offset < 0
@log.reverse.each { |l|
l.scan(/.{1,#{w_w/@font_width}}/).reverse_each { |l_|
lastline = l_
l_nr += 1
next if l_nr < @log_offset
render[l_, :log]
}
break if y < 0
}
if lastline and l_nr < @log_offset
render[lastline, :log]
@log_offset = l_nr-1
end
if focus?
cx = [@caret_x+1, w_w_c-1].min*@font_width+1
cy = @caret_y
draw_line_color(:caret, cx, cy, cx, cy+@font_height-1)
end
@oldcaret_x = @caret_x
end
def keypress(key)
case key
when :left
if @caret_x > 0
@caret_x -= 1
update_caret
end
when :right
if @caret_x < @curline.length
@caret_x += 1
update_caret
end
when :up
if not @cmd_histptr
if @curline != ''
@cmd_history << @curline
@cmd_histptr = 2
else
@cmd_histptr = 1
end
else
@cmd_histptr += 1
@cmd_histptr = 1 if @cmd_histptr > @cmd_history.length
end
@curline = @cmd_history[-@cmd_histptr].dup
@caret_x = @curline.length
update_status_cmd
redraw
when :down
if not @cmd_histptr
@cmd_history << @curline if @curline != ''
@cmd_histptr = @cmd_history.length
else
@cmd_histptr -= 1
@cmd_histptr = @cmd_history.length if @cmd_histptr < 1
end
@curline = @cmd_history[-@cmd_histptr].dup
@caret_x = @curline.length
update_status_cmd
redraw
when :home
@caret_x = 0
update_caret
when :end
@caret_x = @curline.length
update_caret
when :pgup
@log_offset += height/@font_height - 3
redraw
when :pgdown
@log_offset -= height/@font_height - 3
redraw
when :tab
# autocomplete
if @caret_x > 0 and not @curline[0, @caret_x].index(?\ ) and st = @curline[0, @caret_x] and not @commands[st]
keys = @commands.keys.find_all { |k| k[0, st.length] == st }
while st.length < keys.first.to_s.length and keys.all? { |k| k[0, st.length+1] == keys.first[0, st.length+1] }
st << keys.first[st.length]
@curline[@caret_x, 0] = st[-1, 1]
@caret_x += 1
end
update_status_cmd
redraw
end
when :enter
@cmd_histptr = nil
handle_command
update_status_cmd
when :esc
when :delete
if @caret_x < @curline.length
@curline[@caret_x, 1] = ''
update_status_cmd
redraw
end
when :backspace
if @caret_x > 0
@caret_x -= 1
@curline[@caret_x, 1] = ''
update_status_cmd
redraw
end
when :insert
if keyboard_state(:shift)
txt = clipboard_paste.to_s
@curline[@caret_x, 0] = txt
@caret_x += txt.length
update_status_cmd
redraw
end
when Symbol; return false # avoid :shift cannot coerce to Int warning
when ?\x20..?\x7e
@curline[@caret_x, 0] = key.chr
@caret_x += 1
update_status_cmd
redraw
else return false
end
true
end
def keypress_ctrl(key)
case key
when ?v
txt = clipboard_paste.to_s
@curline[@caret_x, 0] = txt
@caret_x += txt.length
update_status_cmd
redraw
else return false
end
true
end
def update_status_cmd
st = @curline.split.first
if @commands[st]
@statusline = "#{st}: #{@cmd_help[st]}"
else
keys = @commands.keys.find_all { |k| k[0, st.length] == st } if st
if keys and not keys.empty?
@statusline = keys.sort.join(' ')
else
@statusline = 'type \'help\' for help'
end
end
end
def new_command(*cmd, &b)
hlp = cmd.pop if cmd.last.include? ' '
cmd.each { |c|
@cmd_help[c] = hlp || 'nodoc'
@commands[c] = lambda { |*a| protect { b.call(*a) } }
}
end
# arg str -> expr value, with special codeptr/dataptr = code/data.curaddr
def parse_expr(arg)
parse_expr!(arg.dup)
end
def parse_expr!(arg)
@dbg.parse_expr!(arg) { |e|
case e.downcase
when 'code_addr', 'codeptr'; @parent_widget.code.curaddr
when 'data_addr', 'dataptr'; @parent_widget.mem.curaddr
end
}
end
def solve_expr(arg)
solve_expr!(arg.dup)
end
def solve_expr!(arg)
return if not e = parse_expr!(arg)
@dbg.resolve_expr(e)
end
def init_commands
@commands = {}
@cmd_help = {}
p = @parent_widget
new_command('help') { add_log @commands.keys.sort.join(' ') } # TODO help <subject>
new_command('d', 'focus data window on an address') { |arg| p.mem.focus_addr(solve_expr(arg)) }
new_command('db', 'display bytes in data window') { |arg| p.mem.curview.data_size = 1 ; p.mem.gui_update ; @commands['d'][arg] }
new_command('dw', 'display bytes in data window') { |arg| p.mem.curview.data_size = 2 ; p.mem.gui_update ; @commands['d'][arg] }
new_command('dd', 'display bytes in data window') { |arg| p.mem.curview.data_size = 4 ; p.mem.gui_update ; @commands['d'][arg] }
new_command('u', 'focus code window on an address') { |arg| p.code.focus_addr(solve_expr(arg)) }
new_command('.', 'focus code window on current address') { p.code.focus_addr(solve_expr(@dbg.register_pc.to_s)) }
new_command('wc', 'set code window height') { |arg|
if arg == ''
p.code.curview.grab_focus
else
p.resize_child(p.code, width, arg.to_i*@font_height)
end
}
new_command('wd', 'set data window height') { |arg|
if arg == ''
p.mem.curview.grab_focus
else
p.resize_child(p.mem, width, arg.to_i*@font_height)
end
}
new_command('wp', 'set console window height') { |arg|
if arg == ''
grab_focus
else
p.resize_child(self, width, arg.to_i*@font_height)
end
}
new_command('width', 'set window width (chars)') { |arg|
if a = solve_expr(arg); p.win.width = a*@font_width
else add_log "width #{p.win.width/@font_width}"
end
}
new_command('height', 'set window height (chars)') { |arg|
if a = solve_expr(arg); p.win.height = a*@font_height
else add_log "height #{p.win.height/@font_height}"
end
}
new_command('continue', 'run', 'let the target run until something occurs') { p.dbg_continue }
new_command('stepinto', 'singlestep', 'run a single instruction of the target') { p.dbg_singlestep }
new_command('stepover', 'run a single instruction of the target, do not enter into subfunctions') { p.dbg_stepover }
new_command('stepout', 'stepover until getting out of the current function') { p.dbg_stepout }
new_command('bpx', 'set a breakpoint') { |arg|
arg =~ /^(.*?)( once)?(?: if (.*?))?(?: do (.*?))?(?: if (.*?))?$/i
e, o, c, a = $1, $2, ($3 || $5), $4
o = o ? true : false
cd = parse_expr(c) if c
cb = lambda { a.split(';').each { |aaa| run_command(aaa) } } if a
@dbg.bpx(solve_expr(e), o, cd, &cb)
}
new_command('hwbp', 'set a hardware breakpoint') { |arg|
arg =~ /^(.*?)(?: if (.*?))?(?: do (.*?))?(?: if (.*?))?$/i
e, c, a = $1, ($2 || $4), $3
cd = parse_expr(c) if c
cb = lambda { a.split(';').each { |aaa| run_command(aaa) } } if a
@dbg.hwbp(solve_expr(e), :x, 1, false, cd, &cb)
}
new_command('bpm', 'set a hardware memory breakpoint: bpm r 0x4800ff 16') { |arg|
arg =~ /^(.*?)(?: if (.*?))?(?: do (.*?))?(?: if (.*?))?$/i
e, c, a = $1, ($2 || $4), $3
cd = parse_expr(c) if c
cb = lambda { a.split(';').each { |aaa| run_command(aaa) } } if a
raise 'bad syntax: bpm r|w|x addr [len]' unless e =~ /^([rwx]) (.*)/i
mode = $1.downcase.to_sym
e = $2
exp = solve_expr!(e)
len = solve_expr(e) if e != ''
len ||= 1
@dbg.hwbp(exp, mode, len, false, cd, &cb)
}
new_command('g', 'wait until target reaches the specified address') { |arg|
arg =~ /^(.*?)(?: if (.*?))?(?: do (.*?))?(?: if (.*?))?$/i
e, c, a = $1, ($2 || $4), $3
cd = parse_expr(c) if c
cb = lambda { a.split(';').each { |aaa| run_command(aaa) } } if a
@dbg.bpx(solve_expr(e), true, cd, &cb) if arg
p.dbg_continue
}
new_command('refresh', 'redraw', 'update', 'update the target memory/register cache') {
@dbg.invalidate
@dbg.dasm_invalidate
p.gui_update
}
new_command('bl', 'list breakpoints') {
i = -1
@dbg.breakpoint.sort.each { |a, b|
add_log "#{i+=1} #{@dbg.addrname!(a)} #{b.type} #{b.state}#{" if #{b.condition}" if b.condition}#{' do {}' if b.action}"
}
}
new_command('bc', 'clear breakpoints') { |arg|
if arg == '*'
@dbg.breakpoint.keys.each { |i| @dbg.remove_breakpoint(i) }
else
next if not i = solve_expr(arg)
i = @dbg.breakpoint.sort[i][0] if i < @dbg.breakpoint.length
@dbg.remove_breakpoint(i)
end
}
new_command('break', 'interrupt a running target') { |arg| @dbg.break ; p.post_dbg_run }
new_command('kill', 'kill the target') { |arg| @dbg.kill(arg) ; p.post_dbg_run }
new_command('detach', 'detach from the target') { @dbg.detach ; p.post_dbg_run }
new_command('r', 'read/write the content of a register') { |arg|
reg, val = arg.split(/\s+/, 2)
if reg == 'fl'
@dbg.toggle_flag(val.to_sym)
elsif not reg
@dbg.register_list.each { |r|
add_log "#{r} = #{Expression[@dbg.get_reg_value(r)]}"
}
elsif not val
add_log "#{reg} = #{Expression[@dbg.get_reg_value(reg.to_sym)]}"
else
@dbg.set_reg_value(reg.to_sym, solve_expr(val))
end
p.regs.gui_update
}
new_command('m', 'memory_dump', 'dump memory - m <addr> <len>') { |arg|
next if not addr = solve_expr!(arg)
len = solve_expr(arg) || 16
mem = @dbg.memory[addr, len]
mem.scan(/.{1,16}/m).each { |l|
hex = l.unpack('C*').map { |c| '%02x' % c }.join(' ')
asc = l.gsub(/[^\x20-\x7e]/, '.')
add_log "#{Expression[addr]} #{hex.ljust(3*16)} #{asc}"
addr += l.length
}
}
new_command('ma', 'memory_ascii', 'write memory (ascii) - ma <addr> foo bar') { |arg|
next if not addr = solve_expr!(arg)
data = arg.strip
@dbg.memory[addr, data.length] = data
@dbg.invalidate
@dbg.dasm_invalidate
p.gui_update
}
new_command('mx', 'memory_hex', 'write memory (hex) - mx <addr> 0011223344') { |arg|
next if not addr = solve_expr!(arg)
data = [arg.delete(' ')].pack('H*')
@dbg.memory[addr, data.length] = data
@dbg.invalidate
@dbg.dasm_invalidate
p.gui_update
}
new_command('?', 'display a value') { |arg|
next if not v = solve_expr(arg)
add_log "#{v} 0x#{v.to_s(16)} #{[v & 0xffff_ffff].pack('L').inspect} #{@dbg.addrname!(v)}"
}
new_command('exit', 'quit', 'quit the debugger interface') { p.win.destroy }
new_command('ruby', 'execute arbitrary ruby code') { |arg|
case ret = eval(arg)
when nil, true, false, Symbol; add_log ret.inspect
when String; add_log ret[0, 64].inspect
when Integer, Expression; add_log Expression[ret].to_s
else add_log "#<#{ret.class}>"
end
}
new_command('loadsyms', 'load symbols from a mapped module') { |arg|
if not arg.empty? and arg = (solve_expr(arg) rescue arg)
@dbg.loadsyms(arg)
else
@dbg.loadallsyms { |a|
@statusline = "loading symbols from #{Expression[a]}"
redraw
Gui.main_iter
}
end
p.gui_update
}
new_command('scansyms', 'scan target memory for loaded modules') {
if defined? @scan_addr and @scan_addr
add_log 'scanning @%08x' % @scan_addr
next
end
@scan_addr = 0
Gui.idle_add {
if @scan_addr <= 0xffff_f000 # cpu.size?
protect { @dbg.loadsyms(@scan_addr) }
@scan_addr += 0x1000
true
else
add_log 'scansyms finished'
@scan_addr = nil
p.gui_update
nil
end
}
}
new_command('symbol', 'display information on symbols') { |arg|
arg = arg.to_s.downcase
@dbg.symbols.map { |k, v| an = @dbg.addrname(k) ; [k, an] if an.downcase.include? arg }.compact.sort_by { |k, v| v.downcase }.each { |k, v|
add_log "#{Expression[k]} #{@dbg.addrname(k)}"
}
}
new_command('maps', 'show file mappings from parsed modules') { |arg|
want = arg.to_s.downcase
want = nil if want == ''
@dbg.modulemap.map { |n, (a_b, a_e)|
[a_b, "#{Expression[a_b]}-#{Expression[a_e]} #{n}"] if not want or n.downcase.include?(want)
}.compact.sort.each { |s1, s2|
add_log s2
}
}
new_command('rawmaps', 'show OS file mappings') { |arg|
# XXX listwindow
@dbg.mappings.sort.each { |a, l, *i|
foo = i*' '
next if arg.to_s != '' and foo !~ /#{arg}/i
add_log "%08x %06x %s" % [a, l, i*' ']
}
}
new_command('add_symbol', 'add a symbol name') { |arg|
name, val = arg.to_s.split(/\s+/, 2)
val = solve_expr(val)
if val.kind_of? Integer
@dbg.symbols[val] = name
@dbg.disassembler.set_label_at(val, name)
p.gui_update
end
}
new_command('bt', 'backtrace', 'stacktrace', 'bt [limit] - show a stack trace from current pc') { |arg|
arg = solve_expr(arg) if arg
arg = 500 if not arg.kind_of? ::Integer
@dbg.stacktrace(arg) { |a, s| add_log "#{Expression[a]} #{s}" }
}
new_command('dasm', 'disassemble_fast', 'disassembles from an address') { |arg|
addr = solve_expr(arg)
dasm = @dbg.disassembler
dasm.disassemble_fast(addr)
dasm.function_blocks(addr).keys.sort.each { |a|
next if not di = dasm.di_at(a)
dasm.dump_block(di.block) { |l| add_log l }
}
p.gui_update
}
new_command('save_hist', 'save the command buffer to a file') { |arg|
File.open(arg, 'w') { |fd| fd.puts @log }
}
new_command('watch', 'follow an expression in the data view (none to delete)') { |arg|
if arg == 'nil' or arg == 'none' or arg == 'delete'
p.watchpoint.delete p.mem
else
e = parse_expr(arg)
p.watchpoint[p.mem] = e
end
}
new_command('list_pid', 'list pids currently debugged') { |arg|
add_log @dbg.list_debug_pids.sort.map { |pp| pp == @dbg.pid ? "*#{pp}" : pp }.join(' ')
}
new_command('list_tid', 'list tids currently debugged') { |arg|
add_log @dbg.list_debug_tids.sort.map { |tt| tt == @dbg.tid ? "*#{tt}" : tt }.join(' ')
}
new_command('list_processes', 'list processes available for debugging') { |arg|
@dbg.list_processes.each { |pp|
add_log "#{pp.pid} #{pp.path}"
}
}
new_command('list_threads', 'list thread ids of the current process') { |arg|
@dbg.list_threads.each { |t|
stf = { :state => @dbg.state, :info => @dbg.info } if t == @dbg.tid
stf ||= @dbg.tid_stuff[t]
stf ||= {}
add_log "#{t} #{stf[:state]} #{stf[:info]}"
}
}
new_command('pid', 'select a pid') { |arg|
if pid = solve_expr(arg)
@dbg.pid = pid
else
add_log "pid #{@dbg.pid}"
end
}
new_command('tid', 'select a tid') { |arg|
if tid = solve_expr(arg)
@dbg.tid = tid
else
add_log "tid #{@dbg.tid} #{@dbg.state} #{@dbg.info}"
end
}
new_command('exception_pass', 'pass the exception unhandled to the target on next continue') {
@dbg.pass_current_exception
}
new_command('exception_handle', 'handle the exception, hide it from the target on next continue') {
@dbg.pass_current_exception false
}
new_command('exception_pass_all', 'ignore all target exceptions') {
@dbg.pass_all_exceptions = true
}
new_command('exception_handle_all', 'break on target exceptions') {
@dbg.pass_all_exceptions = false
}
new_command('thread_events_break', 'break on thread creation/termination') {
@dbg.ignore_newthread = false
@dbg.ignore_endthread = false
}
new_command('thread_event_ignore', 'ignore thread creation/termination') {
@dbg.ignore_newthread = true
@dbg.ignore_endthread = true
}
new_command('trace_children', 'trace children of debuggee (0|1)') { |arg|
arg = case arg.to_s.strip.downcase
when '0', 'no', 'false'; false
else true
end
add_log "trace children #{arg ? 'active' : 'inactive'}"
# update the flag for future debugee
@dbg.trace_children = arg
# change current debugee setting if supported
@dbg.do_trace_children if @dbg.respond_to?(:do_trace_children)
}
new_command('attach', 'attach to a running process') { |arg|
if pr = @dbg.list_processes.find { |pp| pp.path.to_s.downcase.include?(arg.downcase) }
pid = pr.pid
else
pid = solve_expr(arg)
end
@dbg.attach(pid)
}
new_command('create_process', 'create a new process and debug it') { |arg|
@dbg.create_process(arg)
}
@dbg.ui_command_setup(self) if @dbg.respond_to? :ui_command_setup
end
def wrap_run(&b) @parent_widget.wrap_run(&b) end
def keyboard_callback; @parent_widget.keyboard_callback end
def keyboard_callback_ctrl; @parent_widget.keyboard_callback_ctrl end
def handle_command
add_log(":#@curline")
return if @curline == ''
@cmd_history << @curline
@cmd_history.shift if @cmd_history.length > @cmd_history_length
@log_offset = 0
cmd = @curline
@curline = ''
@caret_x = 0
run_command(cmd)
end
def run_command(cmd)
cn = cmd.split.first
if not @commands[cn]
a = @commands.keys.find_all { |k| k[0, cn.length] == cn }
cn = a.first if a.length == 1
end
if pc = @commands[cn]
pc[cmd.split(/\s+/, 2)[1].to_s]
else
add_log 'unknown command'
end
end
def add_log(l)
@log << l.to_s
@log.shift if log.length > @log_length
redraw
end
def gui_update
redraw
end
# hint that the caret moved
def update_caret
return if @oldcaret_x == @caret_x
w_w = width - @font_width
x1 = (@oldcaret_x+1) * @font_width + 1
x2 = (@caret_x+1) * @font_width + 1
y = @caret_y
if x1 > w_w or x2 > w_w
invalidate(0, y, 100000, @font_height)
else
invalidate(x1-1, y, 2, @font_height)
invalidate(x2-1, y, 2, @font_height)
end
@oldcaret_x = @caret_x
end
end
class DbgWindow < Window
attr_accessor :dbg_widget
def initialize_window(dbg = nil, title='metasm debugger')
self.title = title
display(dbg) if dbg
end
# show a new DbgWidget
def display(dbg)
@dbg_widget = DbgWidget.new(dbg)
@dbg_widget.win = self
self.widget = @dbg_widget
@dbg_widget
end
def build_menu
dbgmenu = new_menu
addsubmenu(dbgmenu, 'continue', '<f5>') { @dbg_widget.dbg_continue }
addsubmenu(dbgmenu, 'step over', '<f10>') { @dbg_widget.dbg_stepover }
addsubmenu(dbgmenu, 'step into', '<f11>') { @dbg_widget.dbg_singlestep }
addsubmenu(dbgmenu, 'kill target') { @dbg_widget.dbg.kill } # destroy ?
addsubmenu(dbgmenu, 'detach target') { @dbg_widget.dbg.detach } # destroy ?
addsubmenu(dbgmenu)
addsubmenu(dbgmenu, 'QUIT') { destroy }
addsubmenu(@menu, dbgmenu, '_Actions')
end
end
end
end
gui/dbg: avoid graph view on too big graph, align regs
# This file is part of Metasm, the Ruby assembly manipulation suite
# Copyright (C) 2006-2009 Yoann GUILLOT
#
# Licence is LGPL, see LICENCE in the top-level directory
require 'metasm/gui/dasm_main'
module Metasm
module Gui
# TODO invalidate dbg.disassembler on selfmodifying code
class DbgWidget < ContainerVBoxWidget
attr_accessor :dbg, :console, :regs, :code, :mem, :win
attr_accessor :watchpoint
attr_accessor :parent_widget, :keyboard_callback, :keyboard_callback_ctrl
def initialize_widget(dbg)
@dbg = dbg
@keyboard_callback = {}
@keyboard_callback_ctrl = {}
@parent_widget = nil
@regs = DbgRegWidget.new(dbg, self)
@mem = DisasmWidget.new(dbg.disassembler)
@code = DisasmWidget.new(dbg.disassembler) # after mem so that dasm.gui == @code
@console = DbgConsoleWidget.new(dbg, self)
@code.parent_widget = self
@mem.parent_widget = self
@dbg.disassembler.disassemble_fast(@dbg.pc)
oldcb = @code.bg_color_callback
@code.bg_color_callback = lambda { |a|
if a == @dbg.pc
'f88'
# TODO breakpoints & stuff
elsif oldcb; oldcb[a]
end
}
# TODO popup menu, set bp, goto here, show arg in memdump..
@children = [@code, @mem, @regs]
add @regs, 'expand' => false # XXX
add @mem
add @code
add @console
@watchpoint = { @code => @dbg.register_pc }
pc = @dbg.resolve_expr(@watchpoint[@code])
graph = :graph if @dbg.disassembler.function_blocks(pc).to_a.length < 100
@code.focus_addr(pc, graph)
@mem.focus_addr(0, :hex)
end
def swapin_tid
@regs.swapin_tid
@dbg.disassembler.disassemble_fast(@dbg.pc)
@children.each { |c|
if wp = @watchpoint[c]
c.focus_addr @dbg.resolve_expr(wp), nil, true
end
}
redraw
end
def swapin_pid
@mem.dasm = @dbg.disassembler
@code.dasm = @dbg.disassembler
swapin_tid
gui_update
end
def keypress(key)
return true if @keyboard_callback[key] and @keyboard_callback[key][key]
case key
when :f5; protect { dbg_continue }
when :f10; protect { dbg_stepover }
when :f11; protect { dbg_singlestep }
when :f12; protect { dbg_stepout }
when ?.; @console.grab_focus
else return @parent_widget ? @parent_widget.keypress(key) : false
end
true
end
def keypress_ctrl(key)
return true if @keyboard_callback_ctrl[key] and @keyboard_callback_ctrl[key][key]
case key
when :f5; protect { @dbg.pass_current_exception ; dbg.continue }
else return @parent_widget ? @parent_widget.keypress_ctrl(key) : false
end
true
end
def pre_dbg_run
@regs.pre_dbg_run
end
# TODO check_target always, incl when :stopped
def post_dbg_run
want_redraw = true
return if @idle_checking ||= nil # load only one bg proc
@idle_checking = true
Gui.idle_add {
@dbg.check_target
if @dbg.state == :running
redraw if want_redraw # redraw once if the target is running (less flicker with singlestep)
want_redraw = false
next true
end
@idle_checking = false
@dbg.dasm_invalidate
@mem.gui_update
@dbg.disassembler.sections.clear if @dbg.state == :dead
@dbg.disassembler.disassemble_fast(@dbg.pc)
@children.each { |c|
if wp = @watchpoint[c]
c.focus_addr @dbg.resolve_expr(wp), nil, true
end
}
redraw
false
}
end
def wrap_run
pre_dbg_run
yield
post_dbg_run
end
def dbg_continue(*a) wrap_run { @dbg.continue(*a) } end
def dbg_singlestep(*a) wrap_run { @dbg.singlestep(*a) } end
def dbg_stepover(*a) wrap_run { @dbg.stepover(*a) } end
def dbg_stepout(*a) wrap_run { @dbg.stepout(*a) } end # TODO idle_add etc
def redraw
super
@console.redraw
@regs.gui_update
@children.each { |c| c.redraw }
end
def gui_update
@console.redraw
@children.each { |c| c.gui_update }
end
end
# a widget that displays values of registers of a Debugger
# also controls the Debugger and commands slave windows (showing listing & memory)
class DbgRegWidget < DrawableWidget
attr_accessor :dbg
def initialize_widget(dbg, parent_widget)
@dbg = dbg
@parent_widget = parent_widget
@caret_x = @caret_reg = 0
@oldcaret_x = @oldcaret_reg = 42
@tid_stuff = {}
swapin_tid
@reg_pos = [] # list of x y w h vx of the reg drawing on widget, vx is x of value
@default_color_association = { :label => :black, :data => :blue, :write_pending => :darkred,
:changed => :darkgreen, :caret => :black, :background => :white,
:inactive => :palegrey }
end
def swapin_tid
stf = @tid_stuff[[@dbg.pid, @dbg.tid]] ||= {}
return if not @dbg.cpu
@write_pending = stf[:write_pending] ||= {} # addr -> newvalue (bytewise)
@registers = stf[:registers] ||= @dbg.register_list
@flags = stf[:flags] ||= @dbg.flag_list
@register_size = stf[:reg_sz] ||= @registers.inject(Hash.new(1)) { |h, r| h.update r => @dbg.register_size[r]/4 }
@reg_cache = stf[:reg_cache] ||= Hash.new(0)
@reg_cache_old = stf[:reg_cache_old] ||= {}
end
def initialize_visible
gui_update
end
def click(ex, ey)
if p = @reg_pos.find { |x, y, w, h, vx| x <= ex and x+w >= ex and y <= ey and y+h >= ey }
@caret_reg = @reg_pos.index(p)
@caret_x = ((ex - p[4]) / @font_width).to_i
rs = @register_size[@registers[@caret_reg]]
@caret_x = rs-1 if @caret_x > rs-1
@caret_x = 0 if @caret_x < 0
update_caret
end
end
def rightclick(x, y)
doubleclick(x, y) # XXX
end
def doubleclick(x, y)
gui_update # XXX
end
def paint
curaddr = 0
x = 1
y = 0
w_w = width
render = lambda { |str, color|
draw_string_color(color, x, y, str)
x += str.length * @font_width
}
@reg_pos = []
running = (@dbg.state != :stopped)
regstrlen = @registers.map { |reg| reg.to_s.length + 1 }.max
@registers.each { |reg|
strlen = regstrlen + @register_size[reg]
if x + strlen*@font_width >= w_w
x = 1
y += @font_height
end
@reg_pos << [x, y, (strlen+1)*@font_width, @font_height, x+regstrlen*@font_width]
render["#{reg}=".ljust(regstrlen), :label]
v = @write_pending[reg] || @reg_cache[reg]
col = running ? :inactive : @write_pending[reg] ? :write_pending : @reg_cache_old.fetch(reg, v) != v ? :changed : :data
render["%0#{@register_size[reg]}x " % v, col]
x += @font_width # space
}
@flags.each { |reg|
if x + @font_width >= w_w # XXX nowrap flags ?
x = 1
y += @font_height
end
@reg_pos << [x, y, @font_width, @font_height, x]
v = @write_pending[reg] || @reg_cache[reg]
col = running ? :inactive : @write_pending[reg] ? :write_pending : @reg_cache_old.fetch(reg, v) != v ? :changed : :data
v = v == 0 ? reg.to_s.downcase : reg.to_s.upcase
render[v, col]
x += @font_width # space
}
if focus?
# draw caret
cx = @reg_pos[@caret_reg][4] + @caret_x*@font_width
cy = @reg_pos[@caret_reg][1]
draw_line_color(:caret, cx, cy, cx, cy+@font_height-1)
end
@oldcaret_x, @oldcaret_reg = @caret_x, @caret_reg
@parent_widget.resize_child(self, width, y+@font_height)
end
# keyboard binding
# basic navigation (arrows, pgup etc)
def keypress(key)
case key
when :left
if @caret_x > 0
@caret_x -= 1
update_caret
end
when :right
if @caret_x < @register_size[@registers[@caret_reg]]-1
@caret_x += 1
update_caret
end
when :up
if @caret_reg > 0
@caret_reg -= 1
else
@caret_reg = @registers.length+@flags.length-1
end
@caret_x = 0
update_caret
when :down
if @caret_reg < @registers.length+@flags.length-1
@caret_reg += 1
else
@caret_reg = 0
end
@caret_x = 0
update_caret
when :home
@caret_x = 0
update_caret
when :end
@caret_x = @register_size[@registers[@caret_reg]]-1
update_caret
when :tab
if @caret_reg < @registers.length+@flags.length-1
@caret_reg += 1
else
@caret_reg = 0
end
@caret_x = 0
update_caret
when :backspace
# TODO
when :enter
commit_writes
redraw
when :esc
@write_pending.clear
redraw
when ?\x20..?\x7e
if ?a.kind_of?(String)
v = key.ord
case key
when ?\x20; v = nil # keep current value
when ?0..?9; v -= ?0.ord
when ?a..?f; v -= ?a.ord-10
when ?A..?F; v -= ?A.ord-10
else return false
end
else
case v = key
when ?\x20; v = nil
when ?0..?9; v -= ?0
when ?a..?f; v -= ?a-10
when ?A..?F; v -= ?A-10
else return false
end
end
reg = @registers[@caret_reg] || @flags[@caret_reg-@registers.length]
rsz = @register_size[reg]
if v and rsz != 1
oo = 4*(rsz-@caret_x-1)
ov = @write_pending[reg] || @reg_cache[reg]
ov &= ~(0xf << oo)
ov |= v << oo
@write_pending[reg] = ov
elsif v and (v == 0 or v == 1) # TODO change z flag by typing 'z' or 'Z'
@write_pending[reg] = v
rsz = 1
end
if rsz == 1
@caret_reg += 1
@caret_reg = @registers.length if @caret_reg >= @registers.length + @flags.length
elsif @caret_x < rsz-1
@caret_x += 1
else
@caret_x = 0
end
redraw
else return false
end
true
end
def pre_dbg_run
@reg_cache_old.replace @reg_cache if @reg_cache
end
def commit_writes
@write_pending.each { |k, v|
if @registers.index(k)
@dbg.set_reg_value(k, v)
else
@dbg.set_flag_value(k, v)
end
@reg_cache[k] = v
}
@write_pending.clear
end
def gui_update
@reg_cache.replace @registers.inject({}) { |h, r| h.update r => @dbg.get_reg_value(r) }
@flags.each { |f| @reg_cache[f] = @dbg.get_flag_value(f) }
redraw
end
# hint that the caret moved
def update_caret
return if @oldcaret_x == @caret_x and @oldcaret_reg == @caret_reg
invalidate_caret(@oldcaret_x, 0, *@reg_pos[@oldcaret_reg].values_at(4, 1))
invalidate_caret(@caret_x, 0, *@reg_pos[@caret_reg].values_at(4, 1))
@oldcaret_x, @oldcaret_reg = @caret_x, @caret_reg
end
end
# a widget that displays logs of the debugger, and a cli interface to the dbg
class DbgConsoleWidget < DrawableWidget
attr_accessor :dbg, :cmd_history, :log, :statusline, :commands, :cmd_help
def initialize_widget(dbg, parent_widget)
@dbg = dbg
@parent_widget = parent_widget
@dbg.gui = self
@log = []
@log_length = 4000
@log_offset = 0
@curline = ''
@statusline = 'type \'help\' for help'
@cmd_history = ['']
@cmd_history_length = 200 # number of past commands to remember
@cmd_histptr = nil
@dbg.set_log_proc { |l| add_log l }
@default_color_association = { :log => :palegrey, :curline => :white, :caret => :yellow,
:background => :black, :status => :black, :status_bg => '088' }
init_commands
end
def initialize_visible
grab_focus
gui_update
end
def swapin_tid
@parent_widget.swapin_tid
end
def swapin_pid
@parent_widget.swapin_pid
end
def click(x, y)
@caret_x = (x-1).to_i / @font_width - 1
@caret_x = [[@caret_x, 0].max, @curline.length].min
update_caret
end
def doubleclick(x, y)
# TODO real copy/paste
# for now, copy the line under the dblclick
y -= height % @font_height
y = y.to_i / @font_height
hc = height / @font_height
if y == hc - 1
txt = @statusline
elsif y == hc - 2
txt = @curline
else
txt = @log.reverse[@log_offset + hc - y - 3].to_s
end
clipboard_copy(txt)
end
def mouse_wheel(dir, x, y)
case dir
when :up; @log_offset += 3
when :down; @log_offset -= 3
end
redraw
end
def paint
y = height
render = lambda { |str, color|
draw_string_color(color, 1, y, str)
y -= @font_height
}
w_w = width
y -= @font_height
draw_rectangle_color(:status_bg, 0, y, w_w, @font_height)
str = "#{@dbg.pid}:#{@dbg.tid} #{@dbg.state} #{@dbg.info}"
draw_string_color(:status, w_w-str.length*@font_width-1, y, str)
draw_string_color(:status, 1+@font_width, y, @statusline)
y -= @font_height
w_w_c = w_w/@font_width
@caret_y = y
if @caret_x < w_w_c-1
render[':' + @curline, :curline]
else
render['~' + @curline[@caret_x-w_w_c+2, w_w_c], :curline]
end
l_nr = -1
lastline = nil
@log_offset = 0 if @log_offset < 0
@log.reverse.each { |l|
l.scan(/.{1,#{w_w/@font_width}}/).reverse_each { |l_|
lastline = l_
l_nr += 1
next if l_nr < @log_offset
render[l_, :log]
}
break if y < 0
}
if lastline and l_nr < @log_offset
render[lastline, :log]
@log_offset = l_nr-1
end
if focus?
cx = [@caret_x+1, w_w_c-1].min*@font_width+1
cy = @caret_y
draw_line_color(:caret, cx, cy, cx, cy+@font_height-1)
end
@oldcaret_x = @caret_x
end
def keypress(key)
case key
when :left
if @caret_x > 0
@caret_x -= 1
update_caret
end
when :right
if @caret_x < @curline.length
@caret_x += 1
update_caret
end
when :up
if not @cmd_histptr
if @curline != ''
@cmd_history << @curline
@cmd_histptr = 2
else
@cmd_histptr = 1
end
else
@cmd_histptr += 1
@cmd_histptr = 1 if @cmd_histptr > @cmd_history.length
end
@curline = @cmd_history[-@cmd_histptr].dup
@caret_x = @curline.length
update_status_cmd
redraw
when :down
if not @cmd_histptr
@cmd_history << @curline if @curline != ''
@cmd_histptr = @cmd_history.length
else
@cmd_histptr -= 1
@cmd_histptr = @cmd_history.length if @cmd_histptr < 1
end
@curline = @cmd_history[-@cmd_histptr].dup
@caret_x = @curline.length
update_status_cmd
redraw
when :home
@caret_x = 0
update_caret
when :end
@caret_x = @curline.length
update_caret
when :pgup
@log_offset += height/@font_height - 3
redraw
when :pgdown
@log_offset -= height/@font_height - 3
redraw
when :tab
# autocomplete
if @caret_x > 0 and not @curline[0, @caret_x].index(?\ ) and st = @curline[0, @caret_x] and not @commands[st]
keys = @commands.keys.find_all { |k| k[0, st.length] == st }
while st.length < keys.first.to_s.length and keys.all? { |k| k[0, st.length+1] == keys.first[0, st.length+1] }
st << keys.first[st.length]
@curline[@caret_x, 0] = st[-1, 1]
@caret_x += 1
end
update_status_cmd
redraw
end
when :enter
@cmd_histptr = nil
handle_command
update_status_cmd
when :esc
when :delete
if @caret_x < @curline.length
@curline[@caret_x, 1] = ''
update_status_cmd
redraw
end
when :backspace
if @caret_x > 0
@caret_x -= 1
@curline[@caret_x, 1] = ''
update_status_cmd
redraw
end
when :insert
if keyboard_state(:shift)
txt = clipboard_paste.to_s
@curline[@caret_x, 0] = txt
@caret_x += txt.length
update_status_cmd
redraw
end
when Symbol; return false # avoid :shift cannot coerce to Int warning
when ?\x20..?\x7e
@curline[@caret_x, 0] = key.chr
@caret_x += 1
update_status_cmd
redraw
else return false
end
true
end
def keypress_ctrl(key)
case key
when ?v
txt = clipboard_paste.to_s
@curline[@caret_x, 0] = txt
@caret_x += txt.length
update_status_cmd
redraw
else return false
end
true
end
def update_status_cmd
st = @curline.split.first
if @commands[st]
@statusline = "#{st}: #{@cmd_help[st]}"
else
keys = @commands.keys.find_all { |k| k[0, st.length] == st } if st
if keys and not keys.empty?
@statusline = keys.sort.join(' ')
else
@statusline = 'type \'help\' for help'
end
end
end
def new_command(*cmd, &b)
hlp = cmd.pop if cmd.last.include? ' '
cmd.each { |c|
@cmd_help[c] = hlp || 'nodoc'
@commands[c] = lambda { |*a| protect { b.call(*a) } }
}
end
# arg str -> expr value, with special codeptr/dataptr = code/data.curaddr
def parse_expr(arg)
parse_expr!(arg.dup)
end
def parse_expr!(arg)
@dbg.parse_expr!(arg) { |e|
case e.downcase
when 'code_addr', 'codeptr'; @parent_widget.code.curaddr
when 'data_addr', 'dataptr'; @parent_widget.mem.curaddr
end
}
end
def solve_expr(arg)
solve_expr!(arg.dup)
end
def solve_expr!(arg)
return if not e = parse_expr!(arg)
@dbg.resolve_expr(e)
end
def init_commands
@commands = {}
@cmd_help = {}
p = @parent_widget
new_command('help') { add_log @commands.keys.sort.join(' ') } # TODO help <subject>
new_command('d', 'focus data window on an address') { |arg| p.mem.focus_addr(solve_expr(arg)) }
new_command('db', 'display bytes in data window') { |arg| p.mem.curview.data_size = 1 ; p.mem.gui_update ; @commands['d'][arg] }
new_command('dw', 'display bytes in data window') { |arg| p.mem.curview.data_size = 2 ; p.mem.gui_update ; @commands['d'][arg] }
new_command('dd', 'display bytes in data window') { |arg| p.mem.curview.data_size = 4 ; p.mem.gui_update ; @commands['d'][arg] }
new_command('u', 'focus code window on an address') { |arg| p.code.focus_addr(solve_expr(arg)) }
new_command('.', 'focus code window on current address') { p.code.focus_addr(solve_expr(@dbg.register_pc.to_s)) }
new_command('wc', 'set code window height') { |arg|
if arg == ''
p.code.curview.grab_focus
else
p.resize_child(p.code, width, arg.to_i*@font_height)
end
}
new_command('wd', 'set data window height') { |arg|
if arg == ''
p.mem.curview.grab_focus
else
p.resize_child(p.mem, width, arg.to_i*@font_height)
end
}
new_command('wp', 'set console window height') { |arg|
if arg == ''
grab_focus
else
p.resize_child(self, width, arg.to_i*@font_height)
end
}
new_command('width', 'set window width (chars)') { |arg|
if a = solve_expr(arg); p.win.width = a*@font_width
else add_log "width #{p.win.width/@font_width}"
end
}
new_command('height', 'set window height (chars)') { |arg|
if a = solve_expr(arg); p.win.height = a*@font_height
else add_log "height #{p.win.height/@font_height}"
end
}
new_command('continue', 'run', 'let the target run until something occurs') { p.dbg_continue }
new_command('stepinto', 'singlestep', 'run a single instruction of the target') { p.dbg_singlestep }
new_command('stepover', 'run a single instruction of the target, do not enter into subfunctions') { p.dbg_stepover }
new_command('stepout', 'stepover until getting out of the current function') { p.dbg_stepout }
new_command('bpx', 'set a breakpoint') { |arg|
arg =~ /^(.*?)( once)?(?: if (.*?))?(?: do (.*?))?(?: if (.*?))?$/i
e, o, c, a = $1, $2, ($3 || $5), $4
o = o ? true : false
cd = parse_expr(c) if c
cb = lambda { a.split(';').each { |aaa| run_command(aaa) } } if a
@dbg.bpx(solve_expr(e), o, cd, &cb)
}
new_command('hwbp', 'set a hardware breakpoint') { |arg|
arg =~ /^(.*?)(?: if (.*?))?(?: do (.*?))?(?: if (.*?))?$/i
e, c, a = $1, ($2 || $4), $3
cd = parse_expr(c) if c
cb = lambda { a.split(';').each { |aaa| run_command(aaa) } } if a
@dbg.hwbp(solve_expr(e), :x, 1, false, cd, &cb)
}
new_command('bpm', 'set a hardware memory breakpoint: bpm r 0x4800ff 16') { |arg|
arg =~ /^(.*?)(?: if (.*?))?(?: do (.*?))?(?: if (.*?))?$/i
e, c, a = $1, ($2 || $4), $3
cd = parse_expr(c) if c
cb = lambda { a.split(';').each { |aaa| run_command(aaa) } } if a
raise 'bad syntax: bpm r|w|x addr [len]' unless e =~ /^([rwx]) (.*)/i
mode = $1.downcase.to_sym
e = $2
exp = solve_expr!(e)
len = solve_expr(e) if e != ''
len ||= 1
@dbg.hwbp(exp, mode, len, false, cd, &cb)
}
new_command('g', 'wait until target reaches the specified address') { |arg|
arg =~ /^(.*?)(?: if (.*?))?(?: do (.*?))?(?: if (.*?))?$/i
e, c, a = $1, ($2 || $4), $3
cd = parse_expr(c) if c
cb = lambda { a.split(';').each { |aaa| run_command(aaa) } } if a
@dbg.bpx(solve_expr(e), true, cd, &cb) if arg
p.dbg_continue
}
new_command('refresh', 'redraw', 'update', 'update the target memory/register cache') {
@dbg.invalidate
@dbg.dasm_invalidate
p.gui_update
}
new_command('bl', 'list breakpoints') {
i = -1
@dbg.breakpoint.sort.each { |a, b|
add_log "#{i+=1} #{@dbg.addrname!(a)} #{b.type} #{b.state}#{" if #{b.condition}" if b.condition}#{' do {}' if b.action}"
}
}
new_command('bc', 'clear breakpoints') { |arg|
if arg == '*'
@dbg.breakpoint.keys.each { |i| @dbg.remove_breakpoint(i) }
else
next if not i = solve_expr(arg)
i = @dbg.breakpoint.sort[i][0] if i < @dbg.breakpoint.length
@dbg.remove_breakpoint(i)
end
}
new_command('break', 'interrupt a running target') { |arg| @dbg.break ; p.post_dbg_run }
new_command('kill', 'kill the target') { |arg| @dbg.kill(arg) ; p.post_dbg_run }
new_command('detach', 'detach from the target') { @dbg.detach ; p.post_dbg_run }
new_command('r', 'read/write the content of a register') { |arg|
reg, val = arg.split(/\s+/, 2)
if reg == 'fl'
@dbg.toggle_flag(val.to_sym)
elsif not reg
@dbg.register_list.each { |r|
add_log "#{r} = #{Expression[@dbg.get_reg_value(r)]}"
}
elsif not val
add_log "#{reg} = #{Expression[@dbg.get_reg_value(reg.to_sym)]}"
else
@dbg.set_reg_value(reg.to_sym, solve_expr(val))
end
p.regs.gui_update
}
new_command('m', 'memory_dump', 'dump memory - m <addr> <len>') { |arg|
next if not addr = solve_expr!(arg)
len = solve_expr(arg) || 16
mem = @dbg.memory[addr, len]
mem.scan(/.{1,16}/m).each { |l|
hex = l.unpack('C*').map { |c| '%02x' % c }.join(' ')
asc = l.gsub(/[^\x20-\x7e]/, '.')
add_log "#{Expression[addr]} #{hex.ljust(3*16)} #{asc}"
addr += l.length
}
}
new_command('ma', 'memory_ascii', 'write memory (ascii) - ma <addr> foo bar') { |arg|
next if not addr = solve_expr!(arg)
data = arg.strip
@dbg.memory[addr, data.length] = data
@dbg.invalidate
@dbg.dasm_invalidate
p.gui_update
}
new_command('mx', 'memory_hex', 'write memory (hex) - mx <addr> 0011223344') { |arg|
next if not addr = solve_expr!(arg)
data = [arg.delete(' ')].pack('H*')
@dbg.memory[addr, data.length] = data
@dbg.invalidate
@dbg.dasm_invalidate
p.gui_update
}
new_command('?', 'display a value') { |arg|
next if not v = solve_expr(arg)
add_log "#{v} 0x#{v.to_s(16)} #{[v & 0xffff_ffff].pack('L').inspect} #{@dbg.addrname!(v)}"
}
new_command('exit', 'quit', 'quit the debugger interface') { p.win.destroy }
new_command('ruby', 'execute arbitrary ruby code') { |arg|
case ret = eval(arg)
when nil, true, false, Symbol; add_log ret.inspect
when String; add_log ret[0, 64].inspect
when Integer, Expression; add_log Expression[ret].to_s
else add_log "#<#{ret.class}>"
end
}
new_command('loadsyms', 'load symbols from a mapped module') { |arg|
if not arg.empty? and arg = (solve_expr(arg) rescue arg)
@dbg.loadsyms(arg)
else
@dbg.loadallsyms { |a|
@statusline = "loading symbols from #{Expression[a]}"
redraw
Gui.main_iter
}
end
p.gui_update
}
new_command('scansyms', 'scan target memory for loaded modules') {
if defined? @scan_addr and @scan_addr
add_log 'scanning @%08x' % @scan_addr
next
end
@scan_addr = 0
Gui.idle_add {
if @scan_addr <= 0xffff_f000 # cpu.size?
protect { @dbg.loadsyms(@scan_addr) }
@scan_addr += 0x1000
true
else
add_log 'scansyms finished'
@scan_addr = nil
p.gui_update
nil
end
}
}
new_command('symbol', 'display information on symbols') { |arg|
arg = arg.to_s.downcase
@dbg.symbols.map { |k, v| an = @dbg.addrname(k) ; [k, an] if an.downcase.include? arg }.compact.sort_by { |k, v| v.downcase }.each { |k, v|
add_log "#{Expression[k]} #{@dbg.addrname(k)}"
}
}
new_command('maps', 'show file mappings from parsed modules') { |arg|
want = arg.to_s.downcase
want = nil if want == ''
@dbg.modulemap.map { |n, (a_b, a_e)|
[a_b, "#{Expression[a_b]}-#{Expression[a_e]} #{n}"] if not want or n.downcase.include?(want)
}.compact.sort.each { |s1, s2|
add_log s2
}
}
new_command('rawmaps', 'show OS file mappings') { |arg|
# XXX listwindow
@dbg.mappings.sort.each { |a, l, *i|
foo = i*' '
next if arg.to_s != '' and foo !~ /#{arg}/i
add_log "%08x %06x %s" % [a, l, i*' ']
}
}
new_command('add_symbol', 'add a symbol name') { |arg|
name, val = arg.to_s.split(/\s+/, 2)
val = solve_expr(val)
if val.kind_of? Integer
@dbg.symbols[val] = name
@dbg.disassembler.set_label_at(val, name)
p.gui_update
end
}
new_command('bt', 'backtrace', 'stacktrace', 'bt [limit] - show a stack trace from current pc') { |arg|
arg = solve_expr(arg) if arg
arg = 500 if not arg.kind_of? ::Integer
@dbg.stacktrace(arg) { |a, s| add_log "#{Expression[a]} #{s}" }
}
new_command('dasm', 'disassemble_fast', 'disassembles from an address') { |arg|
addr = solve_expr(arg)
dasm = @dbg.disassembler
dasm.disassemble_fast(addr)
dasm.function_blocks(addr).keys.sort.each { |a|
next if not di = dasm.di_at(a)
dasm.dump_block(di.block) { |l| add_log l }
}
p.gui_update
}
new_command('save_hist', 'save the command buffer to a file') { |arg|
File.open(arg, 'w') { |fd| fd.puts @log }
}
new_command('watch', 'follow an expression in the data view (none to delete)') { |arg|
if arg == 'nil' or arg == 'none' or arg == 'delete'
p.watchpoint.delete p.mem
else
e = parse_expr(arg)
p.watchpoint[p.mem] = e
end
}
new_command('list_pid', 'list pids currently debugged') { |arg|
add_log @dbg.list_debug_pids.sort.map { |pp| pp == @dbg.pid ? "*#{pp}" : pp }.join(' ')
}
new_command('list_tid', 'list tids currently debugged') { |arg|
add_log @dbg.list_debug_tids.sort.map { |tt| tt == @dbg.tid ? "*#{tt}" : tt }.join(' ')
}
new_command('list_processes', 'list processes available for debugging') { |arg|
@dbg.list_processes.each { |pp|
add_log "#{pp.pid} #{pp.path}"
}
}
new_command('list_threads', 'list thread ids of the current process') { |arg|
@dbg.list_threads.each { |t|
stf = { :state => @dbg.state, :info => @dbg.info } if t == @dbg.tid
stf ||= @dbg.tid_stuff[t]
stf ||= {}
add_log "#{t} #{stf[:state]} #{stf[:info]}"
}
}
new_command('pid', 'select a pid') { |arg|
if pid = solve_expr(arg)
@dbg.pid = pid
else
add_log "pid #{@dbg.pid}"
end
}
new_command('tid', 'select a tid') { |arg|
if tid = solve_expr(arg)
@dbg.tid = tid
else
add_log "tid #{@dbg.tid} #{@dbg.state} #{@dbg.info}"
end
}
new_command('exception_pass', 'pass the exception unhandled to the target on next continue') {
@dbg.pass_current_exception
}
new_command('exception_handle', 'handle the exception, hide it from the target on next continue') {
@dbg.pass_current_exception false
}
new_command('exception_pass_all', 'ignore all target exceptions') {
@dbg.pass_all_exceptions = true
}
new_command('exception_handle_all', 'break on target exceptions') {
@dbg.pass_all_exceptions = false
}
new_command('thread_events_break', 'break on thread creation/termination') {
@dbg.ignore_newthread = false
@dbg.ignore_endthread = false
}
new_command('thread_event_ignore', 'ignore thread creation/termination') {
@dbg.ignore_newthread = true
@dbg.ignore_endthread = true
}
new_command('trace_children', 'trace children of debuggee (0|1)') { |arg|
arg = case arg.to_s.strip.downcase
when '0', 'no', 'false'; false
else true
end
add_log "trace children #{arg ? 'active' : 'inactive'}"
# update the flag for future debugee
@dbg.trace_children = arg
# change current debugee setting if supported
@dbg.do_trace_children if @dbg.respond_to?(:do_trace_children)
}
new_command('attach', 'attach to a running process') { |arg|
if pr = @dbg.list_processes.find { |pp| pp.path.to_s.downcase.include?(arg.downcase) }
pid = pr.pid
else
pid = solve_expr(arg)
end
@dbg.attach(pid)
}
new_command('create_process', 'create a new process and debug it') { |arg|
@dbg.create_process(arg)
}
@dbg.ui_command_setup(self) if @dbg.respond_to? :ui_command_setup
end
def wrap_run(&b) @parent_widget.wrap_run(&b) end
def keyboard_callback; @parent_widget.keyboard_callback end
def keyboard_callback_ctrl; @parent_widget.keyboard_callback_ctrl end
def handle_command
add_log(":#@curline")
return if @curline == ''
@cmd_history << @curline
@cmd_history.shift if @cmd_history.length > @cmd_history_length
@log_offset = 0
cmd = @curline
@curline = ''
@caret_x = 0
run_command(cmd)
end
def run_command(cmd)
cn = cmd.split.first
if not @commands[cn]
a = @commands.keys.find_all { |k| k[0, cn.length] == cn }
cn = a.first if a.length == 1
end
if pc = @commands[cn]
pc[cmd.split(/\s+/, 2)[1].to_s]
else
add_log 'unknown command'
end
end
def add_log(l)
@log << l.to_s
@log.shift if log.length > @log_length
redraw
end
def gui_update
redraw
end
# hint that the caret moved
def update_caret
return if @oldcaret_x == @caret_x
w_w = width - @font_width
x1 = (@oldcaret_x+1) * @font_width + 1
x2 = (@caret_x+1) * @font_width + 1
y = @caret_y
if x1 > w_w or x2 > w_w
invalidate(0, y, 100000, @font_height)
else
invalidate(x1-1, y, 2, @font_height)
invalidate(x2-1, y, 2, @font_height)
end
@oldcaret_x = @caret_x
end
end
class DbgWindow < Window
attr_accessor :dbg_widget
def initialize_window(dbg = nil, title='metasm debugger')
self.title = title
display(dbg) if dbg
end
# show a new DbgWidget
def display(dbg)
@dbg_widget = DbgWidget.new(dbg)
@dbg_widget.win = self
self.widget = @dbg_widget
@dbg_widget
end
def build_menu
dbgmenu = new_menu
addsubmenu(dbgmenu, 'continue', '<f5>') { @dbg_widget.dbg_continue }
addsubmenu(dbgmenu, 'step over', '<f10>') { @dbg_widget.dbg_stepover }
addsubmenu(dbgmenu, 'step into', '<f11>') { @dbg_widget.dbg_singlestep }
addsubmenu(dbgmenu, 'kill target') { @dbg_widget.dbg.kill } # destroy ?
addsubmenu(dbgmenu, 'detach target') { @dbg_widget.dbg.detach } # destroy ?
addsubmenu(dbgmenu)
addsubmenu(dbgmenu, 'QUIT') { destroy }
addsubmenu(@menu, dbgmenu, '_Actions')
end
end
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "mharris_ext"
s.version = "1.6.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Mike Harris"]
s.date = "2012-11-13"
s.description = "mharris717 utlity methods"
s.email = "mharris717@gmail.com"
s.extra_rdoc_files = [
"LICENSE",
"README"
]
s.files = [
"LICENSE",
"README",
"Rakefile",
"VERSION.yml",
"features/mharris_ext.feature",
"features/steps/mharris_ext_steps.rb",
"features/support/env.rb",
"lib/mharris_ext.rb",
"lib/mharris_ext/accessor.rb",
"lib/mharris_ext/benchmark.rb",
"lib/mharris_ext/cmd.rb",
"lib/mharris_ext/enumerable.rb",
"lib/mharris_ext/file.rb",
"lib/mharris_ext/fileutils.rb",
"lib/mharris_ext/from_hash.rb",
"lib/mharris_ext/gems.rb",
"lib/mharris_ext/methods.rb",
"lib/mharris_ext/object.rb",
"lib/mharris_ext/regexp.rb",
"lib/mharris_ext/string.rb",
"lib/mharris_ext/time.rb",
"mharris_ext.gemspec",
"test/mharris_ext_test.rb",
"test/test_helper.rb"
]
s.homepage = "http://github.com/GFunk911/mharris_ext"
s.require_paths = ["lib"]
s.rubygems_version = "1.8.24"
s.summary = "mharris717 utility methods"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<fattr>, [">= 0"])
s.add_runtime_dependency(%q<facets>, [">= 0"])
else
s.add_dependency(%q<fattr>, [">= 0"])
s.add_dependency(%q<facets>, [">= 0"])
end
else
s.add_dependency(%q<fattr>, [">= 0"])
s.add_dependency(%q<facets>, [">= 0"])
end
end
gemspec
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "mharris_ext"
s.version = "1.7.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Mike Harris"]
s.date = "2013-04-12"
s.description = "mharris717 utlity methods"
s.email = "mharris717@gmail.com"
s.extra_rdoc_files = [
"LICENSE",
"README"
]
s.files = [
"LICENSE",
"README",
"Rakefile",
"VERSION.yml",
"features/mharris_ext.feature",
"features/steps/mharris_ext_steps.rb",
"features/support/env.rb",
"lib/mharris_ext.rb",
"lib/mharris_ext/accessor.rb",
"lib/mharris_ext/benchmark.rb",
"lib/mharris_ext/cmd.rb",
"lib/mharris_ext/enumerable.rb",
"lib/mharris_ext/file.rb",
"lib/mharris_ext/fileutils.rb",
"lib/mharris_ext/from_hash.rb",
"lib/mharris_ext/gems.rb",
"lib/mharris_ext/methods.rb",
"lib/mharris_ext/object.rb",
"lib/mharris_ext/present.rb",
"lib/mharris_ext/regexp.rb",
"lib/mharris_ext/string.rb",
"lib/mharris_ext/time.rb",
"lib/mharris_ext/trace.rb",
"mharris_ext.gemspec",
"test/mharris_ext_test.rb",
"test/test_helper.rb"
]
s.homepage = "http://github.com/GFunk911/mharris_ext"
s.require_paths = ["lib"]
s.rubygems_version = "1.8.23"
s.summary = "mharris717 utility methods"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<fattr>, [">= 0"])
s.add_runtime_dependency(%q<facets>, [">= 0"])
else
s.add_dependency(%q<fattr>, [">= 0"])
s.add_dependency(%q<facets>, [">= 0"])
end
else
s.add_dependency(%q<fattr>, [">= 0"])
s.add_dependency(%q<facets>, [">= 0"])
end
end
|
#!/usr/bin/ruby
class String
def trim
d=split("\n").map{|i|i.sub(/#.*/){}.chomp}*" "
# p d
d
end
end
def sizemake d
d=d.trim.split.join
i=(d.size+8)/2
# p [d,i,i.to_s(16)]
("00000000"+i.to_s(16))[-8..-1]
end
def varlen(v)
if v < 0x80
return v
else
v1 = v & 0b01111111
v2=(v-v1)>>7
v2 =varlen(v2)
return [v2,v1]
end
end
def varlenHex(v)
b=[varlen(v)]
b=b.flatten
c=b[0..-2].map{|i| i | 0x80 }
r=[c,b[-1]].flatten
res=0
r.each{|i|
res=res*0x100+i
}
format("%0#{b.size*2}x",res)
end
def txt2hex t
r=t.split('').map{|i|format"%02x",i.ord}
size=r.size
[r*" ",varlenHex(size)]
end
module Mid
def self.header format,track,size
format=[format,0xff].min
track=[track,0xff].min
size=[size,0xffff].min
format=format("%02x",format)
track=format("%02x",track)
size=format("%04x",size)
"
4D 54 68 64 # ヘッダ
00 00 00 06 # データ長:6[byte]
00 #{format} # フォーマット
00 #{track} # トラック数
#{size} # 1 拍の分解能
"
end
def self.oneNote len=480,key=0x3C,velocity=40,ch=0
ch=[ch,0x0f].min
velocity=[velocity,0x7f].min
key=[key,0x7f].min
key=format("%02x",key)
ch=format("%01x",ch)
velocity=format("%02x",velocity)
delta=varlenHex(len)
str="
00 9#{ch} #{key} #{velocity} # 0拍後, soundオン...
#{delta} 80 #{key} 00 # delta後, soundオフ
"
end
def self.notekey key
@set||=[480,40,0]
len,velocity,ch=@set
if key.class==Fixnum
self.oneNote(len,@basekey+key,velocity,ch)
else
key,ch=key
self.oneNote(len,@basekey+key,velocity,ch)
end
end
def self.notes c
@rythmtrack||=9
@notes||={
"c"=>0,
"C"=>1,
"d"=>2,
"D"=>3,
"e"=>4,
"f"=>5,
"F"=>6,
"g"=>7,
"G"=>8,
"a"=>9,
"A"=>10,
"b"=>11,
"t"=>[0,@rythmtrack],
"s"=>[3,@rythmtrack],
"u"=>[6,@rythmtrack]
}
notekey(@notes[c])
end
def self.rest len=480
delta=varlenHex(len)
"
#{delta} 89 3C 00 # 1拍後, オフ:ch10, key:3C
"
end
def self.makefraze rundata
r=[]
@basekey||=0x3C
rundata.scan(/[0-9]+|[-+a-zA-Z]/).each{|i|
case i
when /-/
@basekey-=12
when /\+/
@basekey+=12
when /[0-9]+/
(i.to_i-1).times{r<<r[-1]}
when "r"
r<<self.rest
when " "
else
r<<self.notes(i)
end
}
r*"\n# onoff ==== \n"
end
end
array = []
d_head=Mid.header(1,1,480)
delta=varlenHex(480)
p "deltaTime: 0x#{delta}"
d_start="
4D 54 72 6B # トラック 1 開始
"
d_dsize=""
comment="by midi-simple-make.rb"
commenthex,len=txt2hex(comment)
d_comment="
00 FF 01 #{len} #{commenthex}
"
d_tempo="
00 FF 51 03 07 A1 20 #bpm=120, 四分音符の長さをマイクロ秒で3byte
"
d_last=
"
#{delta} 89 3C 00 # 1拍後, オフ:ch10, key:3C
"
d_trackend="
00 FF 2F 00 # トラック 1 終了
"
def hint
puts "usage: #{$0} 'dddd dr3 dddd r4 drdrdrdr dddd dr3' outfile.mid bpm"
puts " abcdefg=sound, +-=octave change, r=rest, num=length, blank ignored"
end
def makebpm bpm
d="000000"+(60_000_000/bpm.to_f).to_i.to_s(16)
d[-6..-1]
end
rundata,ofile,bpm = ARGV
(hint;exit) if ! rundata
bpm=120 if ! bpm
d_bpm=makebpm(bpm)
d_tempo="
00 FF 51 03 #{d_bpm} # 四分音符の長さをマイクロ秒で3byte
"
d_data = d_comment + d_tempo + Mid.makefraze(rundata) + d_last
d_dsize=sizemake(d_data)
#p d_dsize
alla=[d_head,d_start,d_dsize,d_data,d_trackend]
all=alla.map(&:trim)*""
array=[all.split.join]
#puts alla,all,array
binary = array.pack( "H*" )
#p binary.unpack("H*")
exit if ! ofile
open(ofile,"wb"){|f|f.write binary}
midi tempo up-down
#!/usr/bin/ruby
class String
def trim
d=split("\n").map{|i|i.sub(/#.*/){}.chomp}*" "
# p d
d
end
end
def sizemake d
d=d.trim.split.join
i=(d.size+8)/2
# p [d,i,i.to_s(16)]
("00000000"+i.to_s(16))[-8..-1]
end
# 可変長数値表現
# 7bitずつに区切り最後以外のbyteは先頭bitを立てる
def varlen(v)
if v < 0x80
return v
else
v1 = v & 0b01111111
v2=(v-v1)>>7
v2 =varlen(v2)
return [v2,v1]
end
end
def varlenHex(v)
b=[varlen(v)]
b=b.flatten
c=b[0..-2].map{|i| i | 0x80 }
r=[c,b[-1]].flatten
res=0
r.each{|i|
res=res*0x100+i
}
format("%0#{b.size*2}x",res)
end
def txt2hex t
r=t.split('').map{|i|format"%02x",i.ord}
size=r.size
[r*" ",varlenHex(size)]
end
module Mid
def self.header format,track,tbase=480
format=[format,0xff].min
track=[track,0xff].min
tbase=[tbase,0x7fff].min
@tbase=tbase
format=format("%02x",format)
track=format("%02x",track)
tbase=format("%04x",tbase)
"
4D 54 68 64 # ヘッダ
00 00 00 06 # データ長:6[byte]
00 #{format} # フォーマット
00 #{track} # トラック数
#{tbase} # 1 拍の分解能
"
end
def self.oneNote len=@tbase,key=@basekey,velocity=0x40,ch=0
ch=[ch,0x0f].min
velocity=[velocity,0x7f].min
key=[key,0x7f].min
key=format("%02x",key)
ch=format("%01x",ch)
velocity=format("%02x",velocity)
delta=varlenHex(len)
str="
00 9#{ch} #{key} #{velocity} # 0拍後, soundオン...
#{delta} 8#{ch} #{key} 00 # delta後, soundオフ
"
end
def self.notekey key
@set||=[@tbase,40,0]
len,velocity,ch=@set
if key.class==Fixnum
self.oneNote(len,@basekey+key,velocity,ch)
else
key,ch=key
self.oneNote(len,@basekey+key,velocity,ch)
end
end
def self.notes c
@rythmtrack||=9
@notes||={
"c"=>0,
"C"=>1,
"d"=>2,
"D"=>3,
"e"=>4,
"f"=>5,
"F"=>6,
"g"=>7,
"G"=>8,
"a"=>9,
"A"=>10,
"b"=>11,
"t"=>[0,@rythmtrack],
"s"=>[3,@rythmtrack],
"u"=>[6,@rythmtrack]
}
notekey(@notes[c])
end
def self.rest len=@tbase
delta=varlenHex(len)
"
#{delta} 89 3C 00 # 1拍後, オフ:ch10, key:3C
"
end
def self.tempo bpm
@bpm=bpm
d_bpm=self.makebpm(@bpm)
"
00 FF 51 03 #{d_bpm} # 四分音符の長さをマイクロ秒で3byte
"
end
def self.makebpm bpm
d="000000"+(60_000_000/bpm.to_f).to_i.to_s(16)
d[-6..-1]
end
def self.makefraze rundata
@h=[]
@basekey||=0x3C
rundata.scan(/[0-9]+|[-+a-zA-Z><]/).each{|i|
case i
when /</
@bpm=@bpm/1.25
@h<<self.tempo(@bpm)
when />/
@bpm=@bpm*1.25
@h<<self.tempo(@bpm)
when /-/
@basekey-=12
when /\+/
@basekey+=12
when /[0-9]+/
(i.to_i-1).times{@h<<@h[-1]}
when "r"
@h<<self.rest
when " "
else
@h<<self.notes(i)
end
}
@h*"\n# onoff ==== \n"
end
def self.dumpHex
end
end
array = []
tbase=480
d_head=Mid.header(1,1,tbase)
delta=varlenHex(tbase)
#p "deltaTime: 0x#{delta}"
d_start="
4D 54 72 6B # トラック 1 開始
"
d_dsize=""
comment="by midi-simple-make.rb"
commenthex,len=txt2hex(comment)
d_comment="
00 FF 01 #{len} #{commenthex}
"
d_last=
"
#{delta} 89 3C 00 # 1拍後, オフ:ch10, key:3C
"
d_trackend="
00 FF 2F 00 # トラック 1 終了
"
def hint
puts "usage: #{$0} 'dddd dr3 dddd r4 drdrdrdr dddd dr3' outfile.mid bpm"
puts " abcdefg=sound, +-=octave change, r=rest, num=length, blank ignored"
end
rundata,ofile,bpm = ARGV
(hint;exit) if ! rundata
bpm=120 if ! bpm
d_tempo=Mid.tempo(bpm)
d_data = d_comment + d_tempo + Mid.makefraze(rundata) + d_last
d_dsize=sizemake(d_data)
#p d_dsize
alla=[d_head,d_start,d_dsize,d_data,d_trackend]
all=alla.map(&:trim)*""
array=[all.split.join]
#puts alla,all,array
binary = array.pack( "H*" )
#p binary.unpack("H*")
exit if ! ofile
open(ofile,"wb"){|f|f.write binary}
|
require "set"
require "kafka/broker_pool"
module Kafka
# A cluster represents the state of a Kafka cluster. It needs to be initialized
# with a non-empty list of seed brokers. The first seed broker that the cluster can connect
# to will be asked for the cluster metadata, which allows the cluster to map topic
# partitions to the current leader for those partitions.
class Cluster
# Initializes a Cluster with a set of seed brokers.
#
# The cluster will try to fetch cluster metadata from one of the brokers.
#
# @param seed_brokers [Array<URI>]
# @param broker_pool [Kafka::BrokerPool]
# @param logger [Logger]
def initialize(seed_brokers:, broker_pool:, logger:)
if seed_brokers.empty?
raise ArgumentError, "At least one seed broker must be configured"
end
@logger = logger
@seed_brokers = seed_brokers
@broker_pool = broker_pool
@cluster_info = nil
@stale = true
# This is the set of topics we need metadata for. If empty, metadata for
# all topics will be fetched.
@target_topics = Set.new
end
# Adds a list of topics to the target list. Only the topics on this list will
# be queried for metadata.
#
# @param topics [Array<String>]
# @return [nil]
def add_target_topics(topics)
new_topics = Set.new(topics) - @target_topics
unless new_topics.empty?
@logger.info "New topics added to target list: #{new_topics.to_a.join(', ')}"
@target_topics.merge(new_topics)
refresh_metadata!
end
end
# Clears the list of target topics.
#
# @see #add_target_topics
# @return [nil]
def clear_target_topics
@target_topics.clear
refresh_metadata!
end
def mark_as_stale!
@stale = true
end
def refresh_metadata!
@cluster_info = nil
cluster_info
end
def refresh_metadata_if_necessary!
refresh_metadata! if @stale
end
# Finds the broker acting as the leader of the given topic and partition.
#
# @param topic [String]
# @param partition [Integer]
# @return [Broker] the broker that's currently leader.
def get_leader(topic, partition)
connect_to_broker(get_leader_id(topic, partition))
end
def get_group_coordinator(group_id:)
@logger.debug "Getting group coordinator for `#{group_id}`"
refresh_metadata_if_necessary!
cluster_info.brokers.each do |broker_info|
begin
broker = connect_to_broker(broker_info.node_id)
response = broker.find_group_coordinator(group_id: group_id)
Protocol.handle_error(response.error_code)
coordinator_id = response.coordinator_id
coordinator = connect_to_broker(coordinator_id)
@logger.debug "Coordinator for group `#{group_id}` is #{coordinator}"
return coordinator
rescue GroupCoordinatorNotAvailable
@logger.debug "Coordinator not available; retrying in 1s"
sleep 1
retry
rescue ConnectionError => e
@logger.error "Failed to get group coordinator info from #{broker}: #{e}"
end
end
raise Kafka::Error, "Failed to find group coordinator"
end
def partitions_for(topic)
add_target_topics([topic])
refresh_metadata_if_necessary!
cluster_info.partitions_for(topic)
rescue Kafka::ProtocolError
mark_as_stale!
raise
end
def resolve_offsets(topic, partitions, offset)
add_target_topics([topic])
refresh_metadata_if_necessary!
partitions_by_broker = partitions.each_with_object({}) {|partition, hsh|
broker = get_leader(topic, partition)
hsh[broker] ||= []
hsh[broker] << partition
}
if offset == :earliest
offset = -2
elsif offset == :latest
offset = -1
end
offsets = {}
partitions_by_broker.each do |broker, broker_partitions|
response = broker.list_offsets(
topics: {
topic => broker_partitions.map {|partition|
{
partition: partition,
time: offset,
max_offsets: 1,
}
}
}
)
broker_partitions.each do |partition|
offsets[partition] = response.offset_for(topic, partition)
end
end
offsets
rescue Kafka::ProtocolError
mark_as_stale!
raise
end
def resolve_offset(topic, partition, offset)
resolve_offsets(topic, [partition], offset).fetch(partition)
end
def topics
cluster_info.topics.map(&:topic_name)
end
def disconnect
@broker_pool.close
end
private
def get_leader_id(topic, partition)
cluster_info.find_leader_id(topic, partition)
end
def cluster_info
@cluster_info ||= fetch_cluster_info
end
# Fetches the cluster metadata.
#
# This is used to update the partition leadership information, among other things.
# The methods will go through each node listed in `seed_brokers`, connecting to the
# first one that is available. This node will be queried for the cluster metadata.
#
# @raise [ConnectionError] if none of the nodes in `seed_brokers` are available.
# @return [Protocol::MetadataResponse] the cluster metadata.
def fetch_cluster_info
errors = []
@seed_brokers.shuffle.each do |node|
@logger.info "Fetching cluster metadata from #{node}"
broker_error = nil
begin
broker = @broker_pool.connect(node.hostname, node.port)
cluster_info = broker.fetch_metadata(topics: @target_topics)
@stale = false
@logger.info "Discovered cluster metadata; nodes: #{cluster_info.brokers.join(', ')}"
return cluster_info
rescue Error => e
broker_error = "Failed to fetch metadata from #{node}: #{e}"
@logger.error broker_error
errors << [node, e]
ensure
broker.disconnect unless broker.nil?
end
end
error_description = errors.map {|node, exception| "- #{node}: #{exception}" }.join("\n")
raise ConnectionError, "Could not connect to any of the seed brokers: #{@seed_brokers.join(', ')}: #{error_description}"
end
def connect_to_broker(broker_id)
info = cluster_info.find_broker(broker_id)
@broker_pool.connect(info.host, info.port, node_id: info.node_id)
end
end
end
Clean up the error reporting code a bit
require "set"
require "kafka/broker_pool"
module Kafka
# A cluster represents the state of a Kafka cluster. It needs to be initialized
# with a non-empty list of seed brokers. The first seed broker that the cluster can connect
# to will be asked for the cluster metadata, which allows the cluster to map topic
# partitions to the current leader for those partitions.
class Cluster
# Initializes a Cluster with a set of seed brokers.
#
# The cluster will try to fetch cluster metadata from one of the brokers.
#
# @param seed_brokers [Array<URI>]
# @param broker_pool [Kafka::BrokerPool]
# @param logger [Logger]
def initialize(seed_brokers:, broker_pool:, logger:)
if seed_brokers.empty?
raise ArgumentError, "At least one seed broker must be configured"
end
@logger = logger
@seed_brokers = seed_brokers
@broker_pool = broker_pool
@cluster_info = nil
@stale = true
# This is the set of topics we need metadata for. If empty, metadata for
# all topics will be fetched.
@target_topics = Set.new
end
# Adds a list of topics to the target list. Only the topics on this list will
# be queried for metadata.
#
# @param topics [Array<String>]
# @return [nil]
def add_target_topics(topics)
new_topics = Set.new(topics) - @target_topics
unless new_topics.empty?
@logger.info "New topics added to target list: #{new_topics.to_a.join(', ')}"
@target_topics.merge(new_topics)
refresh_metadata!
end
end
# Clears the list of target topics.
#
# @see #add_target_topics
# @return [nil]
def clear_target_topics
@target_topics.clear
refresh_metadata!
end
def mark_as_stale!
@stale = true
end
def refresh_metadata!
@cluster_info = nil
cluster_info
end
def refresh_metadata_if_necessary!
refresh_metadata! if @stale
end
# Finds the broker acting as the leader of the given topic and partition.
#
# @param topic [String]
# @param partition [Integer]
# @return [Broker] the broker that's currently leader.
def get_leader(topic, partition)
connect_to_broker(get_leader_id(topic, partition))
end
def get_group_coordinator(group_id:)
@logger.debug "Getting group coordinator for `#{group_id}`"
refresh_metadata_if_necessary!
cluster_info.brokers.each do |broker_info|
begin
broker = connect_to_broker(broker_info.node_id)
response = broker.find_group_coordinator(group_id: group_id)
Protocol.handle_error(response.error_code)
coordinator_id = response.coordinator_id
coordinator = connect_to_broker(coordinator_id)
@logger.debug "Coordinator for group `#{group_id}` is #{coordinator}"
return coordinator
rescue GroupCoordinatorNotAvailable
@logger.debug "Coordinator not available; retrying in 1s"
sleep 1
retry
rescue ConnectionError => e
@logger.error "Failed to get group coordinator info from #{broker}: #{e}"
end
end
raise Kafka::Error, "Failed to find group coordinator"
end
def partitions_for(topic)
add_target_topics([topic])
refresh_metadata_if_necessary!
cluster_info.partitions_for(topic)
rescue Kafka::ProtocolError
mark_as_stale!
raise
end
def resolve_offsets(topic, partitions, offset)
add_target_topics([topic])
refresh_metadata_if_necessary!
partitions_by_broker = partitions.each_with_object({}) {|partition, hsh|
broker = get_leader(topic, partition)
hsh[broker] ||= []
hsh[broker] << partition
}
if offset == :earliest
offset = -2
elsif offset == :latest
offset = -1
end
offsets = {}
partitions_by_broker.each do |broker, broker_partitions|
response = broker.list_offsets(
topics: {
topic => broker_partitions.map {|partition|
{
partition: partition,
time: offset,
max_offsets: 1,
}
}
}
)
broker_partitions.each do |partition|
offsets[partition] = response.offset_for(topic, partition)
end
end
offsets
rescue Kafka::ProtocolError
mark_as_stale!
raise
end
def resolve_offset(topic, partition, offset)
resolve_offsets(topic, [partition], offset).fetch(partition)
end
def topics
cluster_info.topics.map(&:topic_name)
end
def disconnect
@broker_pool.close
end
private
def get_leader_id(topic, partition)
cluster_info.find_leader_id(topic, partition)
end
def cluster_info
@cluster_info ||= fetch_cluster_info
end
# Fetches the cluster metadata.
#
# This is used to update the partition leadership information, among other things.
# The methods will go through each node listed in `seed_brokers`, connecting to the
# first one that is available. This node will be queried for the cluster metadata.
#
# @raise [ConnectionError] if none of the nodes in `seed_brokers` are available.
# @return [Protocol::MetadataResponse] the cluster metadata.
def fetch_cluster_info
errors = []
@seed_brokers.shuffle.each do |node|
@logger.info "Fetching cluster metadata from #{node}"
begin
broker = @broker_pool.connect(node.hostname, node.port)
cluster_info = broker.fetch_metadata(topics: @target_topics)
@stale = false
@logger.info "Discovered cluster metadata; nodes: #{cluster_info.brokers.join(', ')}"
return cluster_info
rescue Error => e
@logger.error "Failed to fetch metadata from #{node}: #{e}"
errors << [node, e]
ensure
broker.disconnect unless broker.nil?
end
end
error_description = errors.map {|node, exception| "- #{node}: #{exception}" }.join("\n")
raise ConnectionError, "Could not connect to any of the seed brokers:\n#{error_description}"
end
def connect_to_broker(broker_id)
info = cluster_info.find_broker(broker_id)
@broker_pool.connect(info.host, info.port, node_id: info.node_id)
end
end
end
|
require 'cgi'
module Kappa
# @private
class ChannelBase
include IdEquality
def self.get(channel_name)
encoded_name = CGI.escape(channel_name)
json = connection.get("channels/#{encoded_name}")
if !json || json['status'] == 404
nil
else
new(json)
end
end
end
end
module Kappa::V2
class Channel < Kappa::ChannelBase
# TODO:
# c.subscriptions
# c.start_commercial
# c.reset_stream_key
# c.foo = 'bar' ; c.save!
# Current user's channel
include Connection
def initialize(hash)
@id = hash['_id']
@background_url = hash['background']
@banner_url = hash['banner']
@created_at = DateTime.parse(hash['created_at'])
@display_name = hash['display_name']
@game_name = hash['game']
@logo_url = hash['logo']
@mature = hash['mature'] || false
@name = hash['name']
@status = hash['status']
@updated_at = DateTime.parse(hash['updated_at'])
@url = hash['url']
@video_banner_url = hash['video_banner']
@teams = []
teams = hash['teams']
teams.each do |team_json|
@teams << Team.new(team_json)
end
end
# This flag is specified by the owner of the channel.
# @return [Boolean] `true` if the channel has mature content, `false` otherwise.
def mature?
@mature
end
# Get the live stream associated with this channel.
# @return [Stream] Live stream object for this channel, or `nil` if the channel is not currently streaming.
# @see #streaming?
def stream
Stream.get(@name)
end
# This makes a separate request to get the channel's stream. If you want to actually use
# the stream object, you should call `#stream` instead.
# @return [Boolean] `true` if the channel currently has a live stream, `false` otherwise.
# @see #stream
def streaming?
!stream.nil?
end
#
# GET /channels/:channel/editors
# https://github.com/justintv/Twitch-API/blob/master/v2_resources/channels.md#get-channelschanneleditors
#
def editors
# TODO
end
#
# GET /channels/:channels/videos
# https://github.com/justintv/Twitch-API/blob/master/v2_resources/videos.md#get-channelschannelvideos
#
def videos(params = {})
# TODO
end
#
# GET /channels/:channel/follows
# https://github.com/justintv/Twitch-API/blob/master/v2_resources/channels.md#get-channelschannelfollows
# TODO: Warning: this set can be very large, this can run for very long time, recommend using :limit/:offset.
#
def followers(args = {})
params = {}
limit = args[:limit]
if limit && (limit < 100)
params[:limit] = limit
else
params[:limit] = 100
limit = 0
end
return connection.accumulate(
:path => "channels/#{@name}/follows",
:params => params,
:json => 'follows',
:sub_json => 'user',
:class => User,
:limit => limit
)
end
# TODO: Requires authentication.
def subscribers
end
#
# GET /channels/:channel/subscriptions/:user
# https://github.com/justintv/Twitch-API/blob/master/v2_resources/subscriptions.md#get-channelschannelsubscriptionsuser
#
# TODO: Requires authentication.
def has_subscriber?(user)
# Support User object or username (string)
end
# @return [Fixnum] Unique Twitch ID.
attr_reader :id
# @return [String] URL for background image.
attr_reader :background_url
# @return [String] URL for banner image.
attr_reader :banner_url
# @return [DateTime] When the channel was created.
attr_reader :created_at
# @return [String] User-friendly display name, e.g. name used for page title.
attr_reader :display_name
# @return [String] Name of the primary game for this channel.
attr_reader :game_name
# @return [String] URL for the logo image.
attr_reader :logo_url
# @return [String] Unique Twitch name.
attr_reader :name
# @return [String] Current status.
attr_reader :status
# @return [DateTime] When the channel was last updated, e.g. last stream time.
attr_reader :updated_at
# @return [String] The URL for the channel's main page.
attr_reader :url
# @return [String] URL for the image shown when the stream is offline.
attr_reader :video_banner_url
attr_reader :teams
end
end
Rounding out documentation for Channel.
require 'cgi'
module Kappa
# @private
class ChannelBase
include IdEquality
def self.get(channel_name)
encoded_name = CGI.escape(channel_name)
json = connection.get("channels/#{encoded_name}")
if !json || json['status'] == 404
nil
else
new(json)
end
end
end
end
module Kappa::V2
# Channels serve as the home location for a user's content. Channels have a stream, can run
# commercials, store videos, display information and status, and have a customized page including
# banners and backgrounds.
class Channel < Kappa::ChannelBase
# TODO:
# c.subscriptions
# c.start_commercial
# c.reset_stream_key
# c.foo = 'bar' ; c.save!
# Current user's channel
include Connection
# Create a new `Channel` from a hash containing the channel's properties.
# @param hash [Hash] Hash containing the channel's properties.
def initialize(hash)
@id = hash['_id']
@background_url = hash['background']
@banner_url = hash['banner']
@created_at = DateTime.parse(hash['created_at'])
@display_name = hash['display_name']
@game_name = hash['game']
@logo_url = hash['logo']
@mature = hash['mature'] || false
@name = hash['name']
@status = hash['status']
@updated_at = DateTime.parse(hash['updated_at'])
@url = hash['url']
@video_banner_url = hash['video_banner']
@teams = []
teams = hash['teams']
teams.each do |team_json|
@teams << Team.new(team_json)
end
end
# Does this channel have mature content? This flag is specified by the owner of the channel.
# @return [Boolean] `true` if the channel has mature content, `false` otherwise.
def mature?
@mature
end
# Get the live stream associated with this channel.
# @return [Stream] Live stream object for this channel, or `nil` if the channel is not currently streaming.
# @see #streaming?
def stream
Stream.get(@name)
end
# Does this channel currently have a live stream?
# @note This makes a separate request to get the channel's stream. If you want to actually use the stream object, you should call `#stream` instead.
# @return [Boolean] `true` if the channel currently has a live stream, `false` otherwise.
# @see #stream
def streaming?
!stream.nil?
end
#
# GET /channels/:channel/editors
# https://github.com/justintv/Twitch-API/blob/master/v2_resources/channels.md#get-channelschanneleditors
#
# @private
# Private until implemented.
def editors
# TODO
end
#
# GET /channels/:channels/videos
# https://github.com/justintv/Twitch-API/blob/master/v2_resources/videos.md#get-channelschannelvideos
#
# @private
# Private until implemented.
def videos(params = {})
# TODO
end
# TODO: Requires authentication.
# @private
# Private until implemented.
def subscribers
end
#
# GET /channels/:channel/subscriptions/:user
# https://github.com/justintv/Twitch-API/blob/master/v2_resources/subscriptions.md#get-channelschannelsubscriptionsuser
#
# TODO: Requires authentication.
# @private
# Private until implemented.
def has_subscriber?(user)
# Support User object or username (string)
end
#
# GET /channels/:channel/follows
# https://github.com/justintv/Twitch-API/blob/master/v2_resources/channels.md#get-channelschannelfollows
#
# Get the users following this channel.
# @note The number of followers is potentially very large, so it's recommended that you specify a `:limit`.
# @param :limit [Fixnum] (optional) Limit on the number of results returned. If omitted, all results are returned.
# @param :offset [Fixnum] (optional) Offset into the result set to begin enumeration.
# @return [[User]] List of users following this channel.
def followers(args = {})
params = {}
limit = args[:limit]
if limit && (limit < 100)
params[:limit] = limit
else
params[:limit] = 100
limit = 0
end
return connection.accumulate(
:path => "channels/#{@name}/follows",
:params => params,
:json => 'follows',
:sub_json => 'user',
:class => User,
:limit => limit
)
end
# @return [Fixnum] Unique Twitch ID.
attr_reader :id
# @return [String] URL for background image.
attr_reader :background_url
# @return [String] URL for banner image.
attr_reader :banner_url
# @return [DateTime] When the channel was created.
attr_reader :created_at
# @return [String] User-friendly display name. This name is used for the channel's page title.
attr_reader :display_name
# @return [String] Name of the primary game for this channel.
attr_reader :game_name
# @return [String] URL for the logo image.
attr_reader :logo_url
# @return [String] Unique Twitch name.
attr_reader :name
# @return [String] Current status set by the channel's owner.
attr_reader :status
# @return [DateTime] When the channel was last updated. When a stream is started, its channel is updated.
attr_reader :updated_at
# @return [String] The URL for the channel's main page.
attr_reader :url
# @return [String] URL for the image shown when the stream is offline.
attr_reader :video_banner_url
# @return [[Team]] The list of teams that this channel is associated with. Not all channels have associated teams.
attr_reader :teams
end
end
|
module Dnsimple
class Client
module Registrar
# Registers a domain.
#
# @see https://developer.dnsimple.com/v2/registrar/#register
#
# @param [Fixnum] account_id the account ID
# @param [#to_s] domain_name The domain name to register.
# @param [Hash] attributes
# @param [Hash] options
# @return [Struct::Domain]
#
# @raise [RequestError] When the request fails.
def register(account_id, domain_name, attributes = {}, options = {})
Extra.validate_mandatory_attributes(attributes, [:registrant_id])
options = options.merge(attributes)
response = client.post(Client.versioned("/%s/registrar/domains/%s/registration" % [account_id, domain_name]), options)
Dnsimple::Response.new(response, Struct::Domain.new(response["data"]))
end
def check(account_id, domain_name, options = {})
response = client.get(Client.versioned("/%s/registrar/domains/%s/check" % [account_id, domain_name]), options)
Dnsimple::Response.new(response, Struct::DomainCheck.new(response["data"]))
end
end
end
end
Add documentation
module Dnsimple
class Client
module Registrar
# Registers a domain.
#
# @see https://developer.dnsimple.com/v2/registrar/#register
#
# @param [Fixnum] account_id the account ID
# @param [#to_s] domain_name The domain name to register.
# @param [Hash] attributes
# @param [Hash] options
# @return [Struct::Domain]
#
# @raise [RequestError] When the request fails.
def register(account_id, domain_name, attributes = {}, options = {})
Extra.validate_mandatory_attributes(attributes, [:registrant_id])
options = options.merge(attributes)
response = client.post(Client.versioned("/%s/registrar/domains/%s/registration" % [account_id, domain_name]), options)
Dnsimple::Response.new(response, Struct::Domain.new(response["data"]))
end
# Checks whether a domain is available to be registered.
#
# @see https://developer.dnsimple.com/v2/registrar/#check
#
# @example Check whether example.com is available.
# client.registrar.check(1010, "example.com")
#
# @param [Fixnum] account_id the account ID
# @param [#to_s] domain_name The domain name to check.
# @param [Hash] options
# @return [Struct::DomainCheck]
#
# @raise [RequestError] When the request fails.
def check(account_id, domain_name, options = {})
response = client.get(Client.versioned("/%s/registrar/domains/%s/check" % [account_id, domain_name]), options)
Dnsimple::Response.new(response, Struct::DomainCheck.new(response["data"]))
end
end
end
end
|
module Katip
VERSION = '0.1.0'
end
Version 0.2.0
module Katip
VERSION = '0.2.0'
end
|
module Kazan
VERSION = '0.3.2'.freeze
RAILS_VERSION = '~> 5.0.0'.freeze
RUBY_PROJECT_VERSION = IO.read("#{File.dirname(__FILE__)}/../../.ruby-version").strip.freeze
end
Release v.0.3.3
module Kazan
VERSION = '0.3.3'.freeze
RAILS_VERSION = '~> 5.0.0'.freeze
RUBY_PROJECT_VERSION = IO.read("#{File.dirname(__FILE__)}/../../.ruby-version").strip.freeze
end
|
# frozen_string_literal: true
module Dphil
module Helpers
module Refinements
refine Object do
def try_first
respond_to?(:first) ? first : self
end
end
end
end
end
Refinements for try_dup and deep_dup (inspired by ActiveSupport).
# frozen_string_literal: true
module Dphil
module Helpers
module Refinements
refine Object do
def try_first
respond_to?(:first) ? first : self
end
def try_dup
dup
rescue TypeError
self
end
def deep_dup
dup
rescue TypeError
self
end
end
class NilClass
def try_dup
self
end
def deep_dup
self
end
end
class FalseClass
def try_dup
self
end
def deep_dup
self
end
end
class TrueClass
def try_dup
self
end
def deep_dup
self
end
end
class Symbol
def try_dup
self
end
def deep_dup
self
end
end
class Numeric
def try_dup
self
end
def deep_dup
self
end
end
# Necessary to re-override Numeric
class BigDecimal
def try_dup
dup
end
def deep_dup
dup
end
end
class Array
def deep_dup
map(&:deep_dup)
end
end
class Hash
def deep_dup
hash = dup
each_pair do |key, value|
if key.frozen? && ::String === key # rubocop:disable Style/CaseEquality
hash[key] = value.deep_dup
else
hash.delete(key)
hash[key.deep_dup] = value.deep_dup
end
end
hash
end
end
class Set
def deep_dup
set_a = to_a
set_a.map! do |val|
next val if val.frozen? && ::String === val # rubocop:disable Style/CaseEquality
val.deep_dup
end
self.class[set_a]
end
end
end
end
end
|
require 'net/http'
require 'active_support/core_ext/string'
require 'active_support/core_ext/hash'
require 'json'
require 'rest_client'
module Kickit
# Configuration for kickapps API. Perform configuration in some kind of
# initializer.
#
# KickIt::Config.new do |config|
# config.rest_base_uri: 'http://api.kickapps.com/rest'
# config.as: '822134'
# config.developerKey: '2i4js7fx'
# end
#
# The api token may be set here, but typically you will start a session
# which will take care of this for you.
#
class Config
class << self; attr_accessor :rest_base_uri, :as, :developerKey, :token, :admin_username, :feed_url; end
def initialize(&block)
yield Config if block_given?
end
end
# This helps to manage a specific user's session against the API in
# which the user must first obtain and utilize a session token for
# making any subsequent requests to the API.
#
# Example:
#
# Kickit::RestSession.new(username) do |session|
# resp = session.api(:user_profile).execute(:userId => userId)
# resp['UserProfile']
# end
#
class RestSession
# the established kickapps rest api session token
attr_accessor :token
# username of the kickapps user the session is being created for
attr_reader :username
def initialize(username, set_token = true)
@username = username
refresh_token() if set_token
yield self if block_given?
end
def api(method_name)
clazz = ApiMethod.find(method_name.to_sym)
api_method = clazz.new
api_method.session = self if api_method.kind_of? RestMethod
api_method
end
private
# attempts to establish a token only if one does not already exist
def refresh_token
return if token
refresh_token!
end
# grab a new token no matter what
def refresh_token!
create_token = Kickit::API::CreateToken.new
resp = create_token.execute(:username => username,
:developerKey => Kickit::Config.developerKey)
@token = resp
end
end
class ApiMethod
@@register = {}
# all registered api method implementations
@@register = {}
def execute(parameters={})
end
def self.all()
@@register
end
def self.desc(value=nil)
return @description unless value
@description = value
end
def self.find(method_name)
@@register[method_name]
end
# detect when subclasses are created and register them by a
# parameterized name
def self.inherited(subclass)
return if subclass == RssMethod or subclass == RestMethod
# register subclasses
name = subclass.name.demodulize.underscore.to_sym
if @@register[name]
# TODO: do smart integration with logging
puts "warning: api method already registered for #{@@register[name]}. This has been overridden by #{subclass.name}"
end
@@register[subclass.name.demodulize.underscore.to_sym] = subclass
end
end
class RssMethod < ApiMethod
def self.param(name, value)
self.params[name] = value
end
def self.params
@params ||= {}
end
def self.all
ApiMethod.all.select do |name, clazz|
clazz < RssMethod
end
end
def execute(queryString="")
parameters = prepare(queryString)
uri = URI.parse(Kickit::Config.feed_url)
path = "#{uri.path}?".concat(parameters.collect { |k,v| "#{k}=#{CGI::escape(v.to_s)}" }.join('&'))
puts path
response = Net::HTTP.get(uri.host, path)
Hash.from_xml(response)
end
private
def prepare(queryString)
parameters = {}
parameters[:as] = Kickit::Config.as
if (queryString and !queryString.empty?)
params = queryString.split('&')
params.each do |param|
name, value = param.split("=")
parameters[CGI.escape(name)] = CGI.escape(value)
end
end
parameters = self.class.params.merge(parameters)
parameters
end
end
class RestMethod < ApiMethod
attr_accessor :session
def self.uri_path(path=nil)
return @uri_path unless path
@uri_path = path
end
def self.param(name, config={})
self.params[name] = config
end
def self.params
@params ||= {}
end
def self.multipart(*file_params)
return @multipart unless !file_params.empty?
@multipart = file_params
end
def self.all
ApiMethod.all.select do |name, clazz|
clazz < RestMethod
end
end
# submits the request and returns a Hash of the response from the API.
# If the response is not valid, nil is returned.
# Callers will need to interogate the response Hash to see if the
# request encountered some kind of error
#
# TODO: instead of returning a hash, provide some way to map the
# response to some kind of appropriate object.
def execute(parameters={})
parameters = parameters.clone
prepare(parameters)
# TODO: abstract the kind of http client we're using
# TODO: ensure we have some kind of timeout handling
url = URI.parse(create_url(parameters))
puts "Calling: #{url.to_s}"
puts " parameters: #{parameters.inspect}"
response = nil
# add multipart parameters
if self.class.multipart
post_data = parameters
self.class.multipart.each do |param|
if parameters[param]
file_path = parameters.delete(param)
post_data[param] = File.new(file_path)
end
end
response = RestClient.post(url.to_s, post_data).to_str
else
response = Net::HTTP.post_form(url, parameters).body
end
begin
return JSON.parse(response)
rescue JSON::ParserError => e
# it's most likely that the request was invalid and the server has
# sent back an error page full of html
# just return nil
end
end
private
def prepare(parameters)
parameters[:as] = Kickit::Config.as.to_s
parameters[:t] = session.token['TOKEN'] if session
end
# constructs the url based on configuration while taking care to
# perform any kind of string interopolation.
# This assumes that the list parameters has been made complete for the
# call (i.e. any t and as parameters have been added)
def create_url(parameters)
# "/badges/add/:as
path = uri_path
if path =~ /:/
# ["", "badges", "add", ":as"]
parts = path.split('/')
path = ""
parts.each do |part|
next if part == "" or part.nil?
path << '/'
if part =~ /:/
part_name = /:(.*)/.match(part)[1]
# parameters[:as]
part = parameters[part_name.to_sym]
end
path << part
end
end
url = "#{Kickit::Config.rest_base_uri}#{path}"
end
def uri_path
self.class.instance_eval {self.uri_path}
end
end
module API
#
class CreateToken < RestMethod
desc 'Obtains a token used when calling any subsequent requests to the API.'
uri_path '/token/create/:username/:as'
param :developerKey, :required => true
param :username, :required => true
param :idType #username or email
end
class EditBadge < RestMethod
desc 'Edit a badge from the available set of badges in the BET community'
uri_path '/badge/edit/:as'
param :badgeId, :required => true
param :published
end
class UserBadgeStatus < RestMethod
desc 'Returns progress information on badges for a given user and optionally a location'
uri_path 'user/badges/getstatus/:as'
param :user_id, :required => true
param :location
end
class ListActions < RestMethod
desc 'Lists all badge actions in the system'
uri_path '/badgeactions/list/:as'
end
class AddAction < RestMethod
desc 'Adds a new badge action to the system'
uri_path '/badgeaction/add/:as'
param :name, :required => true
end
class AddBadgeRequirement < RestMethod
desc 'Adds badge requirements into the system'
uri_path '/badgerequirement/add/:as'
param :badgeId, :required => true
param :actionId, :required => true
param :location, :required => true
param :quantity
param :published, :default => false
end
class ListBadgeRequirements < RestMethod
desc 'Returns a list of all Badge Requirements for the provided badgeId'
uri_path '/badgerequirement/list/:as'
param :badgeId
end
class EditBadgeRequirement < RestMethod
desc 'Updates badge requirement'
uri_path '/badgerequirement/edit/:as'
param :requirementId, :required => true
param :badgeId, :required => true
param :location, :required => true
param :quantity
param :published, :default => false
param :actionId, :default => false
end
class ListBadges < RestMethod
desc 'This is an admin method that will list all badges for an AS either site-wide or for a specific location.'
uri_path '/badges/list/:as'
param :location
param :published
end
class UserProfile < RestMethod
desc 'Retrievs a specific user profile.'
uri_path '/user/profile/:userid/:as'
param :userid, :required => true
param :include, :required => false
end
class UserBadges < RestMethod
desc 'Retrieves badges belonging to a specified user'
uri_path '/user/badges/get/:as'
param :user_id, :required => true
param :pgNum
param :pageSize
end
class UserAssociations < RestMethod
desc 'Get a list of a member’s friends, fans, and other members they are a fan of'
uri_path '/member/:operation/get/:userId/:as'
param :operation, :required => true
param :userId, :required => true
end
class AddUserAction < RestMethod
desc 'Add user actions to aid in the awarding users achievement based badges'
uri_path '/user/action/add/:as'
param :user_id, :required => true
param :actionId, :required => true
param :location, :required => true
param :quantity
end
class SetProfilePhoto < RestMethod
desc 'Set user profile'
uri_path '/user/profile/photo/add/:as'
param :photoId, :required => true
end
class AddPoints < RestMethod
desc 'Add user points offset'
uri_path '/points/add/:userId/:as'
param :userId, :required => true
param :addToOffset, :required => true
end
class AddOrRemoveFriend < RestMethod
desc 'Add or remove friend. operation is one of \'add\' or \'remove\''
uri_path '/friend/:operation/:friendId/:as'
param :operation, :required => true
param :friendId, :required => true
end
class AddOrRemoveFavorite < RestMethod
desc 'adds or removes a media as a favorite'
uri_path '/favorite/:operation/:mediaType/:mediaId/:as'
param :operation, :required => true
param :mediaType, :required => true
param :mediaId, :required => true
param :url
end
class FavoriteMediaCheck < RestMethod
desc 'check if a member has favorited a media'
uri_path '/check/favorite/:mediaType/:mediaId/:as'
param :mediaType, :required => true
param :mediaId, :required => true
param :url
end
class FavoriteMedia < RestMethod
desc "retrieves user's favorite media"
uri_path 'user/media/:userid/:as'
param :userid, :required => true
param :mediaType
end
class ListMemberMedia < RestMethod
desc 'retrieve a users media'
uri_path '/user/media/:userid/:as'
param :mediaType, :required => true
param :userid, :required => true
end
class RetrieveExternalMedia < RestMethod
desc 'retrieve external media data'
uri_path '/externalmedia/:as'
param :url, :required => true
end
class UploadMedia < RestMethod
desc "upload memeber media"
uri_path '/upload/:mediaType/:as'
multipart :media
param :mediaType, :required => true
param :name, :required => true
param :isProfileImage
end
class DeleteMedia < RestMethod
desc "delete a media"
uri_path '/deletemedia/:as'
param :mediaType, :required => true
param :mediaId, :required => false
param :url, :required => false
end
class RetrieveExternalMedia < RestMethod
desc "Retrieve media metadata"
uri_path '/externalmedia/:as'
param :url, :required => true
end
class RetrieveMediaMeta < RestMethod
desc "Retrieve media metadata"
uri_path '/mediainfo/:mediaType/:mediaId/:as'
param :mediaType, :required => true
param :mediaId, :required => true
end
class UserFeed < RssMethod
desc "list users from feed"
param :mediaType, 'user'
end
class FlagMedia < RestMethod
desc "add or remove flag from media"
uri_path '/flag/:operation/:mediaType/:mediaId/:as'
param :mediaType, :required => true
param :operation, :required => true
param :mediaId, :required => true
end
class ApproveMedia < RestMethod
desc "approve member media"
uri_path '/media/approve/:mediaType/:mediaId/:as'
param :mediaType, :required => true
param :mediaId, :required => true
end
class AddPoints < RestMethod
desc "Add to memeber offset"
uri_path '/points/add/:userId/:as'
param :userId , :required => true
end
class PhotosFeed < RssMethod
desc "list photos for a specific user"
param :mediaType, 'photo'
end
end
end
adding add_badge
require 'net/http'
require 'active_support/core_ext/string'
require 'active_support/core_ext/hash'
require 'json'
require 'rest_client'
module Kickit
# Configuration for kickapps API. Perform configuration in some kind of
# initializer.
#
# KickIt::Config.new do |config|
# config.rest_base_uri: 'http://api.kickapps.com/rest'
# config.as: '822134'
# config.developerKey: '2i4js7fx'
# end
#
# The api token may be set here, but typically you will start a session
# which will take care of this for you.
#
class Config
class << self; attr_accessor :rest_base_uri, :as, :developerKey, :token, :admin_username, :feed_url; end
def initialize(&block)
yield Config if block_given?
end
end
# This helps to manage a specific user's session against the API in
# which the user must first obtain and utilize a session token for
# making any subsequent requests to the API.
#
# Example:
#
# Kickit::RestSession.new(username) do |session|
# resp = session.api(:user_profile).execute(:userId => userId)
# resp['UserProfile']
# end
#
class RestSession
# the established kickapps rest api session token
attr_accessor :token
# username of the kickapps user the session is being created for
attr_reader :username
def initialize(username, set_token = true)
@username = username
refresh_token() if set_token
yield self if block_given?
end
def api(method_name)
clazz = ApiMethod.find(method_name.to_sym)
api_method = clazz.new
api_method.session = self if api_method.kind_of? RestMethod
api_method
end
private
# attempts to establish a token only if one does not already exist
def refresh_token
return if token
refresh_token!
end
# grab a new token no matter what
def refresh_token!
create_token = Kickit::API::CreateToken.new
resp = create_token.execute(:username => username,
:developerKey => Kickit::Config.developerKey)
@token = resp
end
end
class ApiMethod
@@register = {}
# all registered api method implementations
@@register = {}
def execute(parameters={})
end
def self.all()
@@register
end
def self.desc(value=nil)
return @description unless value
@description = value
end
def self.find(method_name)
@@register[method_name]
end
# detect when subclasses are created and register them by a
# parameterized name
def self.inherited(subclass)
return if subclass == RssMethod or subclass == RestMethod
# register subclasses
name = subclass.name.demodulize.underscore.to_sym
if @@register[name]
# TODO: do smart integration with logging
puts "warning: api method already registered for #{@@register[name]}. This has been overridden by #{subclass.name}"
end
@@register[subclass.name.demodulize.underscore.to_sym] = subclass
end
end
class RssMethod < ApiMethod
def self.param(name, value)
self.params[name] = value
end
def self.params
@params ||= {}
end
def self.all
ApiMethod.all.select do |name, clazz|
clazz < RssMethod
end
end
def execute(queryString="")
parameters = prepare(queryString)
uri = URI.parse(Kickit::Config.feed_url)
path = "#{uri.path}?".concat(parameters.collect { |k,v| "#{k}=#{CGI::escape(v.to_s)}" }.join('&'))
puts path
response = Net::HTTP.get(uri.host, path)
Hash.from_xml(response)
end
private
def prepare(queryString)
parameters = {}
parameters[:as] = Kickit::Config.as
if (queryString and !queryString.empty?)
params = queryString.split('&')
params.each do |param|
name, value = param.split("=")
parameters[CGI.escape(name)] = CGI.escape(value)
end
end
parameters = self.class.params.merge(parameters)
parameters
end
end
class RestMethod < ApiMethod
attr_accessor :session
def self.uri_path(path=nil)
return @uri_path unless path
@uri_path = path
end
def self.param(name, config={})
self.params[name] = config
end
def self.params
@params ||= {}
end
def self.multipart(*file_params)
return @multipart unless !file_params.empty?
@multipart = file_params
end
def self.all
ApiMethod.all.select do |name, clazz|
clazz < RestMethod
end
end
# submits the request and returns a Hash of the response from the API.
# If the response is not valid, nil is returned.
# Callers will need to interogate the response Hash to see if the
# request encountered some kind of error
#
# TODO: instead of returning a hash, provide some way to map the
# response to some kind of appropriate object.
def execute(parameters={})
parameters = parameters.clone
prepare(parameters)
# TODO: abstract the kind of http client we're using
# TODO: ensure we have some kind of timeout handling
url = URI.parse(create_url(parameters))
puts "Calling: #{url.to_s}"
puts " parameters: #{parameters.inspect}"
response = nil
# add multipart parameters
if self.class.multipart
post_data = parameters
self.class.multipart.each do |param|
if parameters[param]
file_path = parameters.delete(param)
post_data[param] = File.new(file_path)
end
end
response = RestClient.post(url.to_s, post_data).to_str
else
response = Net::HTTP.post_form(url, parameters).body
end
begin
return JSON.parse(response)
rescue JSON::ParserError => e
# it's most likely that the request was invalid and the server has
# sent back an error page full of html
# just return nil
end
end
private
def prepare(parameters)
parameters[:as] = Kickit::Config.as.to_s
parameters[:t] = session.token['TOKEN'] if session
end
# constructs the url based on configuration while taking care to
# perform any kind of string interopolation.
# This assumes that the list parameters has been made complete for the
# call (i.e. any t and as parameters have been added)
def create_url(parameters)
# "/badges/add/:as
path = uri_path
if path =~ /:/
# ["", "badges", "add", ":as"]
parts = path.split('/')
path = ""
parts.each do |part|
next if part == "" or part.nil?
path << '/'
if part =~ /:/
part_name = /:(.*)/.match(part)[1]
# parameters[:as]
part = parameters[part_name.to_sym]
end
path << part
end
end
url = "#{Kickit::Config.rest_base_uri}#{path}"
end
def uri_path
self.class.instance_eval {self.uri_path}
end
end
module API
#
class CreateToken < RestMethod
desc 'Obtains a token used when calling any subsequent requests to the API.'
uri_path '/token/create/:username/:as'
param :developerKey, :required => true
param :username, :required => true
param :idType #username or email
end
class AddBadge < RestMethod
desc 'Add a badge'
uri_path '/badge/add/:as'
param :location
param :name
param :verbosename
param :url
end
class EditBadge < RestMethod
desc 'Edit a badge from the available set of badges in the BET community'
uri_path '/badge/edit/:as'
param :badgeId, :required => true
param :published
param :name
end
class UserBadgeStatus < RestMethod
desc 'Returns progress information on badges for a given user and optionally a location'
uri_path 'user/badges/getstatus/:as'
param :user_id, :required => true
param :location
end
class ListActions < RestMethod
desc 'Lists all badge actions in the system'
uri_path '/badgeactions/list/:as'
end
class AddAction < RestMethod
desc 'Adds a new badge action to the system'
uri_path '/badgeaction/add/:as'
param :name, :required => true
end
class AddBadgeRequirement < RestMethod
desc 'Adds badge requirements into the system'
uri_path '/badgerequirement/add/:as'
param :badgeId, :required => true
param :actionId, :required => true
param :location, :required => true
param :quantity
param :published, :default => false
end
class ListBadgeRequirements < RestMethod
desc 'Returns a list of all Badge Requirements for the provided badgeId'
uri_path '/badgerequirement/list/:as'
param :badgeId
end
class EditBadgeRequirement < RestMethod
desc 'Updates badge requirement'
uri_path '/badgerequirement/edit/:as'
param :requirementId, :required => true
param :badgeId, :required => true
param :location, :required => true
param :quantity
param :published, :default => false
param :actionId, :default => false
end
class ListBadges < RestMethod
desc 'This is an admin method that will list all badges for an AS either site-wide or for a specific location.'
uri_path '/badges/list/:as'
param :location
param :published
end
class UserProfile < RestMethod
desc 'Retrievs a specific user profile.'
uri_path '/user/profile/:userid/:as'
param :userid, :required => true
param :include, :required => false
end
class UserBadges < RestMethod
desc 'Retrieves badges belonging to a specified user'
uri_path '/user/badges/get/:as'
param :user_id, :required => true
param :pgNum
param :pageSize
end
class UserAssociations < RestMethod
desc 'Get a list of a member’s friends, fans, and other members they are a fan of'
uri_path '/member/:operation/get/:userId/:as'
param :operation, :required => true
param :userId, :required => true
end
class AddUserAction < RestMethod
desc 'Add user actions to aid in the awarding users achievement based badges'
uri_path '/user/action/add/:as'
param :user_id, :required => true
param :actionId, :required => true
param :location, :required => true
param :quantity
end
class SetProfilePhoto < RestMethod
desc 'Set user profile'
uri_path '/user/profile/photo/add/:as'
param :photoId, :required => true
end
class AddPoints < RestMethod
desc 'Add user points offset'
uri_path '/points/add/:userId/:as'
param :userId, :required => true
param :addToOffset, :required => true
end
class AddOrRemoveFriend < RestMethod
desc 'Add or remove friend. operation is one of \'add\' or \'remove\''
uri_path '/friend/:operation/:friendId/:as'
param :operation, :required => true
param :friendId, :required => true
end
class AddOrRemoveFavorite < RestMethod
desc 'adds or removes a media as a favorite'
uri_path '/favorite/:operation/:mediaType/:mediaId/:as'
param :operation, :required => true
param :mediaType, :required => true
param :mediaId, :required => true
param :url
end
class FavoriteMediaCheck < RestMethod
desc 'check if a member has favorited a media'
uri_path '/check/favorite/:mediaType/:mediaId/:as'
param :mediaType, :required => true
param :mediaId, :required => true
param :url
end
class FavoriteMedia < RestMethod
desc "retrieves user's favorite media"
uri_path 'user/media/:userid/:as'
param :userid, :required => true
param :mediaType
end
class ListMemberMedia < RestMethod
desc 'retrieve a users media'
uri_path '/user/media/:userid/:as'
param :mediaType, :required => true
param :userid, :required => true
end
class RetrieveExternalMedia < RestMethod
desc 'retrieve external media data'
uri_path '/externalmedia/:as'
param :url, :required => true
end
class UploadMedia < RestMethod
desc "upload memeber media"
uri_path '/upload/:mediaType/:as'
multipart :media
param :mediaType, :required => true
param :name, :required => true
param :isProfileImage
end
class DeleteMedia < RestMethod
desc "delete a media"
uri_path '/deletemedia/:as'
param :mediaType, :required => true
param :mediaId, :required => false
param :url, :required => false
end
class RetrieveExternalMedia < RestMethod
desc "Retrieve media metadata"
uri_path '/externalmedia/:as'
param :url, :required => true
end
class RetrieveMediaMeta < RestMethod
desc "Retrieve media metadata"
uri_path '/mediainfo/:mediaType/:mediaId/:as'
param :mediaType, :required => true
param :mediaId, :required => true
end
class UserFeed < RssMethod
desc "list users from feed"
param :mediaType, 'user'
end
class FlagMedia < RestMethod
desc "add or remove flag from media"
uri_path '/flag/:operation/:mediaType/:mediaId/:as'
param :mediaType, :required => true
param :operation, :required => true
param :mediaId, :required => true
end
class ApproveMedia < RestMethod
desc "approve member media"
uri_path '/media/approve/:mediaType/:mediaId/:as'
param :mediaType, :required => true
param :mediaId, :required => true
end
class AddPoints < RestMethod
desc "Add to memeber offset"
uri_path '/points/add/:userId/:as'
param :userId , :required => true
end
class PhotosFeed < RssMethod
desc "list photos for a specific user"
param :mediaType, 'photo'
end
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.