CombinedText stringlengths 4 3.42M |
|---|
require 'forwardable'
module Directlog
class Response
include Enumerable
extend Forwardable
def_delegators :@savon, :to_s
def initialize(method, savon)
@method = method
@savon = savon
end
def result
@result = doc
end
private
def doc
xml_parser(@savon.hash[:envelope][:body]["#{@method}_response".to_sym]["#{@method}_result".to_sym])
end
def xml_parser(xml)
Nori.new(convert_tags_to: lambda { |tag| tag.snakecase.to_sym }).parse(xml)
end
end
end
(libResponse): separating response and result to treat when it is null
require 'forwardable'
module Directlog
class Response
include Enumerable
extend Forwardable
def_delegators :@savon, :to_s
def initialize(method, savon)
@method = method
@savon = savon
end
def result
@result = doc
end
private
def doc
node_result ? xml_parser(node_result) : xml_parser(node_response[:xml])
end
def xml_parser(xml)
Nori.new(convert_tags_to: lambda { |tag| tag.snakecase.to_sym }).parse(xml)
end
def node_response
@savon.hash[:envelope][:body]["#{@method}_response".to_sym]
end
def node_result
node_response["#{@method}_result".to_sym]
end
end
end
|
class DockerTest::Worker
def initialize(runner_class, server)
@name = `hostname`.strip + '-' + SecureRandom.hex
@runner_class = runner_class
@server = server
@input_queue = Queue.new
@output_queue = Queue.new
end
def logger
@logger ||= RemoteServerLogger.new("WORKER #{@name}", @server)
end
def start_runner
@runner_thread = Thread.new { @runner.run_each(@input_queue, @output_queue) }
end
def handle_message(message)
case message
when DockerTest::Message::Item
handle_example(message)
when DockerTest::Message::ZeroItems
handle_zero_items
when DockerTest::Message::RunnerArgs
handle_runner_args(message.payload)
else
logger.warn("invalid received message: #{message}")
end
end
def handle_example(message)
@input_queue << message.payload
loop do
result = @output_queue.pop
break if result.instance_of?(DockerTest::Message::Finished)
publish_result(result)
end
end
def handle_runner_args(payload)
logger.debug("runner_args arguments: #{payload}")
@runner = @runner_class.new(payload, logger)
start_runner
end
def handle_zero_items
@quit = true
@input_queue << DockerTest::Message::Stop.new
@runner_thread.join
end
def publish_result(result)
@server.return_result(result)
end
def start
logger.info("starting worker")
@server.check_in(@name)
runner_args = @server.get_runner_args
handle_message(runner_args)
loop do
message = @server.get_item
handle_message(message)
break if @quit
end
@server.check_out(@name)
rescue StandardError => e
logger.warn("exception raised: #{e}")
end
class RemoteServerLogger
def initialize(prefix, server)
@prefix = prefix
@server = server
end
def wrapped_message(msg)
"#{@prefix} - #{msg}"
end
[:debug, :info, :warn].each do |name|
define_method(name) do |msg|
@server.log(name, wrapped_message(msg))
end
end
end
end
Show backtrace
class DockerTest::Worker
def initialize(runner_class, server)
@name = `hostname`.strip + '-' + SecureRandom.hex
@runner_class = runner_class
@server = server
@input_queue = Queue.new
@output_queue = Queue.new
end
def logger
@logger ||= RemoteServerLogger.new("WORKER #{@name}", @server)
end
def start_runner
@runner_thread = Thread.new { @runner.run_each(@input_queue, @output_queue) }
end
def handle_message(message)
case message
when DockerTest::Message::Item
handle_example(message)
when DockerTest::Message::ZeroItems
handle_zero_items
when DockerTest::Message::RunnerArgs
handle_runner_args(message.payload)
else
logger.warn("invalid received message: #{message}")
end
end
def handle_example(message)
@input_queue << message.payload
loop do
result = @output_queue.pop
break if result.instance_of?(DockerTest::Message::Finished)
publish_result(result)
end
end
def handle_runner_args(payload)
logger.debug("runner_args arguments: #{payload}")
@runner = @runner_class.new(payload, logger)
start_runner
end
def handle_zero_items
@quit = true
@input_queue << DockerTest::Message::Stop.new
@runner_thread.join
end
def publish_result(result)
@server.return_result(result)
end
def start
logger.info("starting worker")
@server.check_in(@name)
runner_args = @server.get_runner_args
handle_message(runner_args)
loop do
message = @server.get_item
handle_message(message)
break if @quit
end
@server.check_out(@name)
rescue StandardError => e
logger.warn("exception raised: #{e}")
e.backtrace.each do |line|
logger.warn(" #{line}")
end
end
class RemoteServerLogger
def initialize(prefix, server)
@prefix = prefix
@server = server
end
def wrapped_message(msg)
"#{@prefix} - #{msg}"
end
[:debug, :info, :warn].each do |name|
define_method(name) do |msg|
@server.log(name, wrapped_message(msg))
end
end
end
end
|
module Docs
class Node < UrlScraper
self.name = 'Node.js'
self.slug = 'node'
self.type = 'node'
self.links = {
home: 'https://nodejs.org/',
code: 'https://github.com/nodejs/node'
}
html_filters.push 'node/clean_html', 'node/entries', 'title'
options[:title] = false
options[:root_title] = 'Node.js'
options[:container] = '#apicontent'
options[:skip] = %w(index.html all.html documentation.html synopsis.html)
options[:attribution] = <<-HTML
© Joyent, Inc. and other Node contributors<br>
Licensed under the MIT License.<br>
Node.js is a trademark of Joyent, Inc. and is used with its permission.<br>
We are not endorsed by or affiliated with Joyent.
HTML
version do
self.release = '6.2.2'
self.base_url = 'https://nodejs.org/api/'
end
version '4 LTS' do
self.release = '4.4.6'
self.base_url = "https://nodejs.org/dist/v#{release}/docs/api/"
end
end
end
Update Node.js documentation (6.3.1, 4.4.7)
module Docs
class Node < UrlScraper
self.name = 'Node.js'
self.slug = 'node'
self.type = 'node'
self.links = {
home: 'https://nodejs.org/',
code: 'https://github.com/nodejs/node'
}
html_filters.push 'node/clean_html', 'node/entries', 'title'
options[:title] = false
options[:root_title] = 'Node.js'
options[:container] = '#apicontent'
options[:skip] = %w(index.html all.html documentation.html synopsis.html)
options[:attribution] = <<-HTML
© Joyent, Inc. and other Node contributors<br>
Licensed under the MIT License.<br>
Node.js is a trademark of Joyent, Inc. and is used with its permission.<br>
We are not endorsed by or affiliated with Joyent.
HTML
version do
self.release = '6.3.1'
self.base_url = 'https://nodejs.org/api/'
end
version '4 LTS' do
self.release = '4.4.7'
self.base_url = "https://nodejs.org/dist/v#{release}/docs/api/"
end
end
end
|
require 'doorkeeper/request/authorization_code'
require 'doorkeeper/request/client_credentials'
require 'doorkeeper/request/code'
require 'doorkeeper/request/password'
require 'doorkeeper/request/refresh_token'
require 'doorkeeper/request/token'
module Doorkeeper
module Request
extend self
def authorization_strategy(strategy)
get_strategy strategy, %w(code token)
rescue NameError
raise Errors::InvalidAuthorizationStrategy
end
def token_strategy(strategy)
get_strategy strategy, %w(password client_credentials authorization_code refresh_token)
rescue NameError
raise Errors::InvalidTokenStrategy
end
def get_strategy(strategy, available)
fail Errors::MissingRequestStrategy unless strategy.present?
fail NameError unless available.include?(strategy.to_s)
"Doorkeeper::Request::#{strategy.to_s.camelize}".constantize
end
end
end
Use module_function instead of extending self
require 'doorkeeper/request/authorization_code'
require 'doorkeeper/request/client_credentials'
require 'doorkeeper/request/code'
require 'doorkeeper/request/password'
require 'doorkeeper/request/refresh_token'
require 'doorkeeper/request/token'
module Doorkeeper
module Request
module_function
def authorization_strategy(strategy)
get_strategy strategy, %w(code token)
rescue NameError
raise Errors::InvalidAuthorizationStrategy
end
def token_strategy(strategy)
get_strategy strategy, %w(password client_credentials authorization_code refresh_token)
rescue NameError
raise Errors::InvalidTokenStrategy
end
def get_strategy(strategy, available)
fail Errors::MissingRequestStrategy unless strategy.present?
fail NameError unless available.include?(strategy.to_s)
"Doorkeeper::Request::#{strategy.to_s.camelize}".constantize
end
end
end
|
require 'socket'
require 'json'
require 'dredd_hooks/runner'
module DreddHooks
class Server
#
# The hooks worker server
#
HOST = '127.0.0.1'
PORT = 61321
MESSAGE_DELIMITER = "\n"
@server = nil
def process_message message, client
event = message['event']
transaction = message['data']
if event == "beforeEach"
transaction = DreddHooks::Runner.run_before_each_hooks_for_transaction(transaction)
transaction = DreddHooks::Runner.run_before_hooks_for_transaction(transaction)
end
if event == "beforeEachValidation"
transaction = DreddHooks::Runner.run_before_each_validation_hooks_for_transaction(transaction)
transaction = DreddHooks::Runner.run_before_validation_hooks_for_transaction(transaction)
end
if event == "afterEach"
transaction = DreddHooks::Runner.run_after_hooks_for_transaction(transaction)
transaction = DreddHooks::Runner.run_after_each_hooks_for_transaction(transaction)
end
if event == "beforeAll"
transaction = DreddHooks::Runner.run_before_all_hooks_for_transaction(transaction)
end
if event == "afterAll"
transaction = DreddHooks::Runner.run_after_all_hooks_for_transaction(transaction)
end
to_send = {
"uuid" => message['uuid'],
"event" => event,
"data" => transaction
}.to_json
client.puts to_send + "\n"
end
def run
@server = TCPServer.new HOST, PORT
loop do
#Thread.abort_on_exception=true
client = @server.accept
STDERR.puts 'Dredd connected to Ruby Dredd hooks worker'
buffer = ""
while (data = client.recv(10))
buffer += data
if buffer.include? MESSAGE_DELIMITER
splitted_buffer = buffer.split(MESSAGE_DELIMITER)
buffer = ""
messages = []
splitted_buffer.each do |message|
begin
messages.push JSON.parse(message)
rescue JSON::ParserError
# if message aftger delimiter is not parseable json, it's
# a chunk of next message, put it back to the buffer
buffer += message
end
end
messages.each do |message|
process_message message, client
end
end
end
client.close
end
end
end
end
Minor refactor remove unused variable
require 'socket'
require 'json'
require 'dredd_hooks/runner'
module DreddHooks
class Server
#
# The hooks worker server
#
HOST = '127.0.0.1'
PORT = 61321
MESSAGE_DELIMITER = "\n"
def process_message message, client
event = message['event']
transaction = message['data']
if event == "beforeEach"
transaction = DreddHooks::Runner.run_before_each_hooks_for_transaction(transaction)
transaction = DreddHooks::Runner.run_before_hooks_for_transaction(transaction)
end
if event == "beforeEachValidation"
transaction = DreddHooks::Runner.run_before_each_validation_hooks_for_transaction(transaction)
transaction = DreddHooks::Runner.run_before_validation_hooks_for_transaction(transaction)
end
if event == "afterEach"
transaction = DreddHooks::Runner.run_after_hooks_for_transaction(transaction)
transaction = DreddHooks::Runner.run_after_each_hooks_for_transaction(transaction)
end
if event == "beforeAll"
transaction = DreddHooks::Runner.run_before_all_hooks_for_transaction(transaction)
end
if event == "afterAll"
transaction = DreddHooks::Runner.run_after_all_hooks_for_transaction(transaction)
end
to_send = {
"uuid" => message['uuid'],
"event" => event,
"data" => transaction
}.to_json
client.puts to_send + "\n"
end
def run
@server = TCPServer.new HOST, PORT
loop do
#Thread.abort_on_exception=true
client = @server.accept
STDERR.puts 'Dredd connected to Ruby Dredd hooks worker'
buffer = ""
while (data = client.recv(10))
buffer += data
if buffer.include? MESSAGE_DELIMITER
splitted_buffer = buffer.split(MESSAGE_DELIMITER)
buffer = ""
messages = []
splitted_buffer.each do |message|
begin
messages.push JSON.parse(message)
rescue JSON::ParserError
# if message aftger delimiter is not parseable json, it's
# a chunk of next message, put it back to the buffer
buffer += message
end
end
messages.each do |message|
process_message message, client
end
end
end
client.close
end
end
end
end
|
module Eatl
class DotGenerator
attr_reader :tables
DEFAULT_TEMPLATE_PATH = 'lib/eatl/dot_template.dot'
def initialize(schema_paths, template_path: DEFAULT_TEMPLATE_PATH)
@tables = Array[schema_paths].flatten.map { |s| Schema.new(YAML.load(File.read(s))) }
@template_path = template_path
end
def to_dot
ERB.new(File.read(@template_path), nil, '-').result(binding)
end
private
def table_included?(belongs_to_str)
table_name, _ = belongs_to_str.split('.')
@tables.any? { |t| t.table_name == table_name }
end
def arrow_target(belongs_to_str)
table_name, column = belongs_to_str.split('.')
"\"#{table_name}\":\"#{column}\""
end
end
end
Add file path
module Eatl
class DotGenerator
attr_reader :tables
DEFAULT_TEMPLATE_PATH = "#{File.dirname(__FILE__)}/dot_template.dot"
def initialize(schema_paths, template_path: DEFAULT_TEMPLATE_PATH)
@tables = Array[schema_paths].flatten.map { |s| Schema.new(YAML.load(File.read(s))) }
@template_path = template_path
end
def to_dot
ERB.new(File.read(@template_path), nil, '-').result(binding)
end
private
def table_included?(belongs_to_str)
table_name, _ = belongs_to_str.split('.')
@tables.any? { |t| t.table_name == table_name }
end
def arrow_target(belongs_to_str)
table_name, column = belongs_to_str.split('.')
"\"#{table_name}\":\"#{column}\""
end
end
end
|
require "ecommerce/exception"
module Ecommerce
RequestTimeout = Class.new(Exception)
RequestError = Class.new(Exception)
class Response < SimpleDelegator
def resolve!(&block)
if success? || redirected?
block_given? ? yield(self) : self
elsif timed_out?
timeout!
else
error!
end
end
def redirected?
response_code && response_code >= 300 && response_code < 400
end
private
def timeout!
raise RequestTimeout
end
def error!
raise RequestError.new(
code: code,
message: status_message,
body: body
)
end
end
end
Removes redirect cases
require "ecommerce/exception"
module Ecommerce
RequestTimeout = Class.new(Exception)
RequestError = Class.new(Exception)
class Response < SimpleDelegator
def resolve!(&block)
if success?
block_given? ? yield(self) : self
elsif timed_out?
timeout!
else
error!
end
end
private
def timeout!
raise RequestTimeout
end
def error!
raise RequestError.new(
code: code,
message: status_message,
body: body
)
end
end
end
|
module EventMachine
module Socksify
class SOCKSError < Exception
def self.define (message)
Class.new(self) do
def initialize
super(message)
end
end
end
ServerFailure = define('general SOCKS server failure')
NotAllowed = define('connection not allowed by ruleset')
NetworkUnreachable = define('Network unreachable')
HostUnreachable = define('Host unreachable')
ConnectionRefused = define('Connection refused')
TTLExpired = define('TTL expired')
CommandNotSupported = define('Command not supported')
AddressTypeNotSupported = define('Address type not supported')
def self.for_response_code(code)
case code.is_a?(String) ? code.ord : code
when 1 then ServerFailure
when 2 then NotAllowed
when 3 then NetworkUnreachable
when 4 then HostUnreachable
when 5 then ConnectionRefused
when 6 then TTLExpired
when 7 then CommandNotSupported
when 8 then AddressTypeNotSupported
else self
end
end
end
end
module Connectify
class CONNECTError < Exception
end
end
end
Remove space between method call and parentheses.
Avoid `warning: parentheses after method name is interpreted as an argument list, not a decomposed argument`
module EventMachine
module Socksify
class SOCKSError < Exception
def self.define(message)
Class.new(self) do
def initialize
super(message)
end
end
end
ServerFailure = define('general SOCKS server failure')
NotAllowed = define('connection not allowed by ruleset')
NetworkUnreachable = define('Network unreachable')
HostUnreachable = define('Host unreachable')
ConnectionRefused = define('Connection refused')
TTLExpired = define('TTL expired')
CommandNotSupported = define('Command not supported')
AddressTypeNotSupported = define('Address type not supported')
def self.for_response_code(code)
case code.is_a?(String) ? code.ord : code
when 1 then ServerFailure
when 2 then NotAllowed
when 3 then NetworkUnreachable
when 4 then HostUnreachable
when 5 then ConnectionRefused
when 6 then TTLExpired
when 7 then CommandNotSupported
when 8 then AddressTypeNotSupported
else self
end
end
end
end
module Connectify
class CONNECTError < Exception
end
end
end
|
class Ettu
class Configuration < ActiveSupport::OrderedOptions
def initialize
super
set_defaults
end
def reset
set_defaults
end
private
def set_defaults
self.js = 'application.js'
self.css = 'application.css'
self.assets = []
# Don't actually set view by default.
# This'll allow #fetch to return the real default
# at runtime.
# self.view = "#{controller_name}/#{action_name}"
delete :view if key? :view
if defined? ActionView::Digestor
self.template_digestor = ActionView::Digestor
end
end
end
end
Try to use CacheDigests if ActionView::Digestor isn't around.
class Ettu
class Configuration < ActiveSupport::OrderedOptions
def initialize
super
set_defaults
end
def reset
set_defaults
end
private
def set_defaults
self.js = 'application.js'
self.css = 'application.css'
self.assets = []
# Don't actually set view by default.
# This'll allow #fetch to return the real default
# at runtime.
# self.view = "#{controller_name}/#{action_name}"
delete :view if key? :view
if defined? ActionView::Digestor
# Attempt to use ActionView::Digestor on Rails 4
self.template_digestor = ActionView::Digestor
elsif defined? CacheDigests::TemplateDigestor
# Attempt to use CacheDigests::TemplateDigestor on Rails 3
self.template_digestor = CacheDigests::TemplateDigestor
end
end
end
end
|
module Facebooker #:nodoc:
module VERSION #:nodoc:
MAJOR = 0
MINOR = 0
TINY = 1
STRING = [MAJOR, MINOR, TINY].join('.')
end
end
updating version
git-svn-id: abd3364c23bece18756deaf5558fcfa62d3b1b71@60 06148572-b36b-44fe-9aa8-f68b04d8b080
module Facebooker #:nodoc:
module VERSION #:nodoc:
MAJOR = 0
MINOR = 5
TINY = 0
STRING = [MAJOR, MINOR, TINY].join('.')
end
end
|
module FeastFast
VERSION = "0.1.0"
end
version bump
module FeastFast
VERSION = "0.2.0"
end
|
module Filters
class FilterSet
include Enumerable
attr_reader :name
def initialize(name, multi_select_allowed, selected_value)
@name = name
@multi_select_allowed = multi_select_allowed
@selected_values = multi_select_allowed ? selected_value.split(',') : (selected_value.empty? ? [] : [selected_value])
@filters = []
end
def add_filter(name, value)
@filters << Filter.new(self, name, value, @selected_values.include?(value))
end
def each(&block)
@filters.each(&block)
end
def selected_filters
select(&:selected?)
end
def multi_select_allowed?
@multi_select_allowed
end
def params_for_filter(filter)
new_values = if multi_select_allowed?
filter.selected? ? @selected_values - [filter.value] : @selected_values + [filter.value]
else
filter.selected? ? [] : [filter.value]
end
new_values.empty? ? "" : "#{name}:#{new_values.map.join(",")}"
end
class Filter
attr_reader :name, :value
def initialize(filter_set, name, value, selected)
@filter_set, @name, @value, @selected = filter_set, name, value, selected
end
def value_after_toggle
@filter_set.params_for_filter(self)
end
def selected?
@selected
end
end
end
end
Refactor nested if
module Filters
class FilterSet
include Enumerable
attr_reader :name
def initialize(name, multi_select_allowed, selected_value)
@name = name
@multi_select_allowed = multi_select_allowed
@selected_values = multi_select_allowed ? selected_value.split(',') : (selected_value.empty? ? [] : [selected_value])
@filters = []
end
def add_filter(name, value)
@filters << Filter.new(self, name, value, @selected_values.include?(value))
end
def each(&block)
@filters.each(&block)
end
def selected_filters
select(&:selected?)
end
def multi_select_allowed?
@multi_select_allowed
end
def params_for_filter(filter)
starting_values = multi_select_allowed? ? @selected_values : []
new_values = filter.selected? ? starting_values - [filter.value] : starting_values + [filter.value]
new_values.empty? ? "" : "#{name}:#{new_values.map.join(",")}"
end
class Filter
attr_reader :name, :value
def initialize(filter_set, name, value, selected)
@filter_set, @name, @value, @selected = filter_set, name, value, selected
end
def value_after_toggle
@filter_set.params_for_filter(self)
end
def selected?
@selected
end
end
end
end
|
module Fl::Framework
# Generic query support.
# This module defines a number of general support methods used by various query packages.
module Query
protected
# Converts a list of references to a list of object identifiers.
# This method takes an array containing references to objects of a single class, and returns
# an array of object identifiers for all the converted references.
# The elements of *rl* are one of the following.
#
# - An integer value is assumed to be an object identifier and is added to the return value as is.
# - If the value is an instance of *klass*, the return from the value's `id` method is added to
# the result.
# - If the value is a String, check if it is an integer representation (it contains just numeric
# characters); if so, convert it to an integer and add it to the result.
# Otherwise, treat it as a fingerprint: call {ActiveRecord::Base.split_fingerprint} and, if
# the fingerprint is a reference to an instance of *klass*, add the **id** component to the
# result value.
#
# Note that elements that do not match any of these conditions are dropped from the return value.
#
# @param rl [Array<Integer,String,ActiveRecord::Base>] The array of references to convert.
# @param klass [Class] The ActiveRecord::Base subclass for the references.
#
# @return [Array<Integer>] Returns an array of object identifiers.
def convert_list_of_references(rl, klass)
rl.reduce([ ]) do |acc, r|
if r.is_a?(Integer)
acc << r
elsif r.is_a?(klass)
acc << r.id
elsif r.is_a?(String)
if r =~ /^[0-9]+$/
acc << r.to_i
else
c, id = ActiveRecord::Base.split_fingerprint(r, klass)
acc << id.to_i unless id.nil?
end
end
acc
end
end
# Converts a list of polymorphic references to a list of object fingerprints.
# This method takes an array containing references to objects of potentially different classes, and
# returns an array of object fingerprints for all the converted references.
# The elements of *rl* are one of the following.
#
# - If the value is an instance of a subclass of `ActiveRecord::Base`, the return from the value's
# `fingerprint` method is added to the result.
# - If the value is a String, treat it as a fingerprint: call {ActiveRecord::Base.split_fingerprint}
# and, if the result indicates a valid fingerprint, add it to the return value.
#
# Note that elements that do not match any of these conditions are dropped from the return value.
#
# @param rl [Array<Integer,String,ActiveRecord::Base>] The array of references to convert.
#
# @return [Array<String>] Returns an array of object fingerprints.
def convert_list_of_polymorphic_references(rl)
rl.reduce([ ]) do |acc, r|
case r
when ActiveRecord::Base
acc << r.fingerprint if r.respond_to?(:fingerprint)
when String
# Technically, we could get the class from the name, check that it exists and that it is
# a subclass of ActiveRecord::Base, but for the time being we don't
c, id = ActiveRecord::Base.split_fingerprint(r)
acc << r unless c.nil? || id.nil?
end
acc
end
end
# Partition **only_** and **except_** lists in a set of query options.
# This method looks up the two options <b>only\_<i>suffix</i></b> and <b>except\_<i>suffix</i></b>
# in *opts* and
# converts them using {#convert_list_of_references}. It then generates new values of **only_** and
# **except_** lists from the converted references as follows.
#
# 1. If the **only_** references is empty or not present, the return value contains the references
# as is.
# 2. If the **except_** references is empty or not present, the return value contains the references
# as is.
# 3. If both reference array are present, remove the contents of the **except_** array from the
# **only_** array, and return the **only_** array and `nil` for the **except_** array.
#
# For example, if *opts* is `{ only_groups: [ 1, 2, 3, 4 ], except_groups: [ 2, 4 ] }`, the return
# value from `partition_lists_of_references(opts, 'groups', MyGroup)` is
# `{ only_groups: [ 1, 3 ], except_groups: nil }`.
# If *opts* is `{ only_groups: [ 1, 2, 3, 4 ] }`, the return
# value from `partition_lists_of_references(opts, 'groups', MyGroup)` is
# `{ only_groups: [ 1, 2, 3, 4 ] }`.
# If *opts* is `{ except_groups: [ 2, 4 ] }`, the return
# value from `partition_lists_of_references(opts, 'groups', MyGroup)` is
# `{ except_groups: [ 2, 4 ] }`.
#
# @param opts [Hash] The query options.
# @param suffix [String,Symbol] The suffix for the option names.
# @param klass [Class] The class to pass to {#convert_list_of_references}.
#
# @return [Hash] Returns a hash that contains up to two key/value pairs: the **only_** key is the
# list of object identifiers to accept, and **except_** the list to reject. If the value of the
# keys is `nil`, or if the key is missing, the value should be ignored.
def partition_lists_of_references(opts, suffix, klass)
rv = { }
only_name = "only_#{suffix}".to_sym
except_name = "except_#{suffix}".to_sym
if opts.has_key?(only_name)
if opts[only_name].nil?
rv[only_name] = nil
else
only_l = (opts[only_name].is_a?(Array)) ? opts[only_name] : [ opts[only_name] ]
rv[only_name] = convert_list_of_references(only_l, klass)
end
end
if opts.has_key?(except_name)
if opts[except_name].nil?
rv[except_name] = nil
else
x_l = (opts[except_name].is_a?(Array)) ? opts[except_name] : [ opts[except_name] ]
except_refs = convert_list_of_references(x_l, klass)
# if there is a `only_name`, then we need to remove the `except_name` members from it.
# otherwise, we return `except_name`
if rv[only_name].is_a?(Array)
rv[only_name] = rv[only_name] - except_refs
else
rv[except_name] = except_refs
end
end
end
rv
end
# Partition **only_** and **except_** lists in a set of query options.
# This method looks up the two options <b>only\_<i>suffix</i></b> and <b>except\_<i>suffix</i></b>
# in *opts* and
# converts them using the given block. It then generates new values of **only_** and
# **except_** lists from the converted items as follows.
#
# 1. If the **only_** array is empty or not present, the return value contains the array as is.
# 2. If the **except_** array is empty or not present, the return value contains the array as is.
# 3. If both arrays are present, remove the contents of the **except_** array from the
# **only_** array, and return the **only_** array and `nil` for the **except_** array.
#
# @param opts [Hash] The query options.
# @param suffix [String,Symbol] The suffix for the option names.
# @yield [list] The array containing the list to convert.
#
# @return [Hash] Returns a hash that contains up to two key/value pairs: the **only_** key is the
# list of object identifiers to accept, and **except_** the list to reject. If the value of the
# keys is `nil`, or if the key is missing, the value should be ignored.
def partition_filter_lists(opts, suffix)
rv = { }
only_name = "only_#{suffix}".to_sym
except_name = "except_#{suffix}".to_sym
if opts.has_key?(only_name)
if opts[only_name].nil?
rv[only_name] = nil
else
only_l = (opts[only_name].is_a?(Array)) ? opts[only_name] : [ opts[only_name] ]
rv[only_name] = yield only_l
end
end
if opts.has_key?(except_name)
if opts[except_name].nil?
rv[except_name] = nil
else
x_l = (opts[except_name].is_a?(Array)) ? opts[except_name] : [ opts[except_name] ]
except_refs = yield x_l
# if there is a `only_name`, then we need to remove the `except_name` members from it.
# otherwise, we return `except_name`
if rv[only_name].is_a?(Array)
rv[only_name] = rv[only_name] - except_refs
else
rv[except_name] = except_refs
end
end
end
rv
end
# Partition **only_** and **except_** lists in a set of query options, for polymorphic references.
# This method looks up the two options <b>only\_<i>suffix</i></b> and <b>except\_<i>suffix</i></b>
# in *opts* and
# converts them using {#convert_list_of_polymorphic_references}. It then generates new values of
# **only_** and **except_** lists from the converted references as follows.
#
# 1. If the **only_** references is empty or not present, the return value contains the references
# as is.
# 2. If the **except_** references is empty or not present, the return value contains the references
# as is.
# 3. If both reference array are present, remove the contents of the **except_** array from the
# **only_** array, and return the **only_** array and `nil` for the **except_** array.
#
# @param opts [Hash] The query options.
# @param suffix [String,Symbol] The suffix for the option names.
#
# @return [Hash] Returns a hash that contains up to two key/value pairs: the **only_** key is the
# list of object identifiers to accept, and **except_** the list to reject. If the value of the
# keys is `nil`, or if the key is missing, the value should be ignored.
def partition_lists_of_polymorphic_references(opts, suffix)
rv = { }
only_name = "only_#{suffix}".to_sym
except_name = "except_#{suffix}".to_sym
if opts.has_key?(only_name)
if opts[only_name].nil?
rv[only_name] = nil
else
only_l = (opts[only_name].is_a?(Array)) ? opts[only_name] : [ opts[only_name] ]
rv[only_name] = convert_list_of_polymorphic_references(only_l)
end
end
if opts.has_key?(except_name)
if opts[except_name].nil?
rv[except_name] = nil
else
x_l = (opts[except_name].is_a?(Array)) ? opts[except_name] : [ opts[except_name] ]
except_refs = convert_list_of_polymorphic_references(x_l)
# if there is a `only_name`, then we need to remove the `except_name` members from it.
# otherwise, we return `except_name`
if rv[only_name].is_a?(Array)
rv[only_name] = rv[only_name] - except_refs
else
rv[except_name] = except_refs
end
end
end
rv
end
# Generate the author lists from query options.
# This method builds two lists, one that contains the fingerprints of authors to return
# in the query, and one of authors to ignore in the query.
#
# The method expects the objects in the group lists to respond to the +members+ method, which returns
# the list of group members.
#
# @param opts [Hash] A Hash containing configuration options for the query.
# @option opts [Array<Object, String>, Object, String] :only_authors If given, return only comments
# generated by the given author or, if the value is an array, authors.
# The values are either objects, or strings containing the object's fingerprint
# (see {ActiveRecord::Base#fingerprint}).
# If an author is listed in both *:only_authors* and *:except_authors*, it is removed
# from *:only_authors* before the where clause component is generated; therefore, *:except_authors*
# has higher priority than *:only_authors*.
# @option opts [Array<Object, String>, Object, String] :except_authors If given, return only comments
# not generated by the given author or, if the value is an array, authors.
# See the documentation for *:only_authors*.
# @option opts [Array<Object, String>, Object, String] :only_groups If present, an array of group
# objects (or fingerprints) that contains the list used
# to limit the returned values to comments generated by authors in the groups. A single value
# is converted to an array. Note that the groups are converted to an array of author ids,
# for all the authors in the groups, and a where clause based on that list is added to the query.
# Therefore, this has a similar effect to the *:only_authors* option.
# If both expanded *:only_groups* and *:except_groups* values contain the same author id, that
# author is dropped from the expanded *:only_groups* list; therefore, *:except_groups* has higher
# precedence than *:only_groups*.
# @option opts [Array<Object, String>, Object, String] :except_groups If given, return only comments
# not generated by any members of the group or,
# if the value is an array, groups. See the documentation for *:only_groups*.
# The *:except_groups* option expands to a list of object identifiers for authors whose comments
# should be excluded from the return value; therefore, *:except_groups* acts
# like *:except_authors*.
#
# @return [Hash] Returns a hash with two entries:
# - *:only_ids* is +nil+, to indicate that no "must-have" author selection is requested; or it is
# an array whose elements are authors' fingerprints.
# - *:except_ids* is +nil+, to indicate that no "must-not-have" author selection is requested; or it is
# an array whose elements are authors' fingerprints.
def _expand_author_lists(opts)
only_authors = opts[:only_authors]
only_groups = opts[:only_groups]
except_authors = opts[:except_authors]
except_groups = opts[:except_groups]
return {
:only_ids => nil,
:except_ids => nil
} if only_authors.nil? && only_groups.nil? && except_authors.nil? && except_groups.nil?
# 1. Build the arrays of object identifiers
only_uids = if only_authors
t = (only_authors.is_a?(Array)) ? only_authors : [ only_authors ]
t.map { |u| (u.is_a?(String)) ? u : u.fingerprint }
else
nil
end
if only_groups
t = (only_groups.is_a?(Array)) ? only_groups : [ only_groups ]
glist = t.map { |g| (g.is_a?(String)) ? ActiveRecord::Base.find_by_fingerprint(g) : g }
only_gids = []
glist.each do |g|
if g
g.members.each do |u|
f = u.fingerprint
only_gids << f unless only_gids.include?(f)
end
end
end
else
only_gids = nil
end
except_uids = if except_authors
t = (except_authors.is_a?(Array)) ? except_authors : [ except_authors ]
t.map { |u| (u.is_a?(String)) ? u : u.fingerprint }
else
nil
end
if except_groups
t = (except_groups.is_a?(Array)) ? except_groups : [ except_groups ]
glist = t.map { |g| (g.is_a?(String)) ? ActiveRecord::Base.find_by_fingerprint(g) : g }
except_gids = []
glist.each do |g|
if g
g.members.each do |u|
f = u.fingerprint
except_gids << f unless except_gids.include?(f)
end
end
end
else
except_gids = nil
end
# 2. The list of author ids is the union of the groups/authors arrays
only_ids = (only_uids.nil?) ? nil : only_uids
unless only_gids.nil?
if only_ids.nil?
only_ids = only_gids
else
only_ids |= only_gids
end
end
except_ids = (except_uids.nil?) ? nil : except_uids
unless except_gids.nil?
if except_ids.nil?
except_ids = except_gids
else
except_ids |= except_gids
end
end
# 3. Remove any except ids from the only list
only_ids = only_ids - except_ids if only_ids.is_a?(Array) && except_ids.is_a?(Array)
{
:only_ids => only_ids,
:except_ids => except_ids
}
end
# Partition author lists.
# Calls {#_partition_one_author_list} for each entry in _hlist_, and returns their partitioned values.
#
# @param [Hash] hlist A hash containing author lists.
# @option hlist [Array<String>] :only_ids The fingerprints of the objects to place in the "must-have"
# clauses. Could be +nil+ if no "must-have" objects were requested.
# @option hlist [Array<String>] :except_ids The fingerprints of the objects to place in the "must-not-have"
# clauses. Could be +nil+ if no "must-have" objects were requested.
#
# @return [Hash] Returns a hash containing two entries, *:only_ids* and *:except_ids*, generated as
# described above.
def _partition_author_lists(hlist)
h = { }
if hlist.has_key?(:only_ids) && hlist[:only_ids]
h[:only_ids] = _partition_one_author_list(hlist[:only_ids])
else
h[:only_ids] = nil
end
if hlist.has_key?(:except_ids) && hlist[:except_ids]
h[:except_ids] = _partition_one_author_list(hlist[:except_ids])
else
h[:except_ids] = nil
end
h
end
# Partition a list of authors.
# This method groups all authors whose fingerprints use the same class name, and places in the
# return value an entry whose key is the class name, and whose value is an array of object identifiers
# as extracted from the fingerprints.
# This is how WHERE clauses will be set up.
#
# @param [Array<String>] clist An array of object fingerprints. A +nil+ value causes a +nil+ return value.
#
# @return [Hash] Returns a hash whose keys are the distinct class names from the fingerprints, and
# values the corresponding object identifiers. If _clist_ is +nil+, it returns +nil+.
# Note that the object identifiers are returned as strings, and for some ORMs (Active Record comes to
# mind...), they will likely have to be converted to integers in order to be used in WHERE clauses.
def _partition_one_author_list(clist)
return nil if clist.nil?
h = { }
clist.each do |f|
if f
cname, id = f.split('/')
if h.has_key?(cname)
h[cname] << id
else
h[cname] = [ id ]
end
end
end
h
end
# Parse a timestamp parameter's value.
# The value *value* is either an integer containing a UNIX timestamp, a Time object, or a string
# containing a string representation of the time; the value is converted to a
# {Fl::Framework::Core::Icalendar::Datetime} and returned in that format.
#
# @param value [Integer, Time, String] The timestamp to parse.
#
# @return [Fl::Framework::Core::Icalendar::Datetime, String] On success, returns the parsed timestamp.
# On failure, returns a string containing an error message from the parser.
def _parse_timestamp(value)
begin
return Fl::Framework::Core::Icalendar::Datetime.new(value)
rescue => exc
return exc.message
end
end
# Sets up the parameters for time-related filters.
# For each of the options listed below, the method places a corresponding entry in the return value
# containing the timestamp generated from the entry.
#
# All parameters are either an integer containing a UNIX timestamp, a Time object, or a string
# containing a string representation of the time; the value is converted to a
# {Fl::Framework::Core::Icalendar::Datetime} and stored in that format.
#
# @param opts [Hash] A Hash containing configuration options for the query.
# @option opts [Integer, Time, String] :updated_after to select comments updated after a given time.
# @option opts [Integer, Time, String] :created_after to select comments created after a given time.
# @option opts [Integer, Time, String] :updated_before to select comments updated before a given time.
# @option opts [Integer, Time, String] :created_before to select comments created before a given time.
#
# @return [Hash] Returns a hash containing any number of the following keys; all values are timestamps.
# - *:c_after_ts* from *:created_after*.
# - *:c_before_ts* from *:created_before*.
# - *:u_after_ts* from *:updated_after*.
# - *:u_before_ts* from *:updated_before*.
def _date_filter_timestamps(opts)
rv = {}
if opts.has_key?(:created_after)
begin
dt = Fl::Framework::Core::Icalendar::Datetime.new(opts[:created_after])
rv[:c_after_ts] = dt if dt.valid?
rescue => exc
end
end
if opts.has_key?(:updated_after)
begin
dt = Fl::Framework::Core::Icalendar::Datetime.new(opts[:updated_after])
rv[:u_after_ts] = dt if dt.valid?
rescue => exc
end
end
if opts.has_key?(:created_before)
begin
dt = Fl::Framework::Core::Icalendar::Datetime.new(opts[:created_before])
rv[:c_before_ts] = dt if dt.valid?
rescue => exc
end
end
if opts.has_key?(:updated_before)
begin
dt = Fl::Framework::Core::Icalendar::Datetime.new(opts[:updated_before])
rv[:u_before_ts] = dt if dt.valid?
rescue => exc
end
end
rv
end
# Parse the *:order* option and generate an order clause.
# This method processes the *:order* key in _opts_ and generates an
# array of converted order clauses.
#
# @param opts [Hash] A hash of query options.
# @param df [String, Array] The default value for the order option if **:order** is not present
# in *opts*. A `nil` value maps to `updated_at DESC'.
#
# @option opts [String, Array] :order A string or array containing the <tt>ORDER BY</tt> clauses
# to process. The string value is converted to an array by splitting it at commas.
# A `false` value or an empty string or array causes the option to be ignored.
#
# @return [Array] Returns an array of converted order clauses.
def _parse_order_option(opts, df = nil)
ord = case opts[:order]
when String
opts[:order].split(/,\s*/)
when Array
opts[:order]
when FalseClass
nil
else
if df.is_a?(Array)
df
elsif df.is_a?(String)
df.split(/,\s*/)
else
[ 'updated_at DESC' ]
end
end
return nil if ord.nil? or (ord.count < 1)
ord.map { |e| e.strip }
end
end
end
added the query support method boolean_query_flag
module Fl::Framework
# Generic query support.
# This module defines a number of general support methods used by various query packages.
module Query
protected
# Normalize a boolean query flag.
# This method accepts a flag value in multiple formats, and converts it to `true` or `false`.
#
# @param f [Boolean,Numeric,String,nil] The flag value. If a boolean, it is returned as is.
# If a numeric value, it returns `true` if f != 0, and `false` otherwise; therefore, a numeric
# value has the same semantics as numeric to boolean conversion in C.
# If a string value, and the string is made up wholly of digits, the value is converted to an
# integer and processed as for numeric values.
# Otherwise, the strings `true`, `t`, `yes`, and `y` are converted to `true`, and `false`, `f`,
# `no`, and `n` are converted to `false`. A `nil` value is converted to `false`.
# Any other value is also converted to `false`.
#
# @return [Boolean] Returns a boolean value as outlined above.
def boolean_query_flag(f)
case f
when TrueClass, FalseClass
f
when Numeric
f != 0
when String
if f =~ /^[0-9]+$/
f.to_i != 0
elsif f =~ /^t(rue)?$/i
true
elsif f =~ /^f(alse)?$/i
false
elsif f =~ /^y(es)?$/i
true
elsif f =~ /^n(o)?$/i
false
end
when NilClass
false
else
false
end
end
# Converts a list of references to a list of object identifiers.
# This method takes an array containing references to objects of a single class, and returns
# an array of object identifiers for all the converted references.
# The elements of *rl* are one of the following.
#
# - An integer value is assumed to be an object identifier and is added to the return value as is.
# - If the value is an instance of *klass*, the return from the value's `id` method is added to
# the result.
# - If the value is a String, check if it is an integer representation (it contains just numeric
# characters); if so, convert it to an integer and add it to the result.
# Otherwise, treat it as a fingerprint: call {ActiveRecord::Base.split_fingerprint} and, if
# the fingerprint is a reference to an instance of *klass*, add the **id** component to the
# result value.
#
# Note that elements that do not match any of these conditions are dropped from the return value.
#
# @param rl [Array<Integer,String,ActiveRecord::Base>] The array of references to convert.
# @param klass [Class] The ActiveRecord::Base subclass for the references.
#
# @return [Array<Integer>] Returns an array of object identifiers.
def convert_list_of_references(rl, klass)
rl.reduce([ ]) do |acc, r|
if r.is_a?(Integer)
acc << r
elsif r.is_a?(klass)
acc << r.id
elsif r.is_a?(String)
if r =~ /^[0-9]+$/
acc << r.to_i
else
c, id = ActiveRecord::Base.split_fingerprint(r, klass)
acc << id.to_i unless id.nil?
end
end
acc
end
end
# Converts a list of polymorphic references to a list of object fingerprints.
# This method takes an array containing references to objects of potentially different classes, and
# returns an array of object fingerprints for all the converted references.
# The elements of *rl* are one of the following.
#
# - If the value is an instance of a subclass of `ActiveRecord::Base`, the return from the value's
# `fingerprint` method is added to the result.
# - If the value is a String, treat it as a fingerprint: call {ActiveRecord::Base.split_fingerprint}
# and, if the result indicates a valid fingerprint, add it to the return value.
#
# Note that elements that do not match any of these conditions are dropped from the return value.
#
# @param rl [Array<Integer,String,ActiveRecord::Base>] The array of references to convert.
#
# @return [Array<String>] Returns an array of object fingerprints.
def convert_list_of_polymorphic_references(rl)
rl.reduce([ ]) do |acc, r|
case r
when ActiveRecord::Base
acc << r.fingerprint if r.respond_to?(:fingerprint)
when String
# Technically, we could get the class from the name, check that it exists and that it is
# a subclass of ActiveRecord::Base, but for the time being we don't
c, id = ActiveRecord::Base.split_fingerprint(r)
acc << r unless c.nil? || id.nil?
end
acc
end
end
# Partition **only_** and **except_** lists in a set of query options.
# This method looks up the two options <b>only\_<i>suffix</i></b> and <b>except\_<i>suffix</i></b>
# in *opts* and
# converts them using {#convert_list_of_references}. It then generates new values of **only_** and
# **except_** lists from the converted references as follows.
#
# 1. If the **only_** references is empty or not present, the return value contains the references
# as is.
# 2. If the **except_** references is empty or not present, the return value contains the references
# as is.
# 3. If both reference array are present, remove the contents of the **except_** array from the
# **only_** array, and return the **only_** array and `nil` for the **except_** array.
#
# For example, if *opts* is `{ only_groups: [ 1, 2, 3, 4 ], except_groups: [ 2, 4 ] }`, the return
# value from `partition_lists_of_references(opts, 'groups', MyGroup)` is
# `{ only_groups: [ 1, 3 ], except_groups: nil }`.
# If *opts* is `{ only_groups: [ 1, 2, 3, 4 ] }`, the return
# value from `partition_lists_of_references(opts, 'groups', MyGroup)` is
# `{ only_groups: [ 1, 2, 3, 4 ] }`.
# If *opts* is `{ except_groups: [ 2, 4 ] }`, the return
# value from `partition_lists_of_references(opts, 'groups', MyGroup)` is
# `{ except_groups: [ 2, 4 ] }`.
#
# @param opts [Hash] The query options.
# @param suffix [String,Symbol] The suffix for the option names.
# @param klass [Class] The class to pass to {#convert_list_of_references}.
#
# @return [Hash] Returns a hash that contains up to two key/value pairs: the **only_** key is the
# list of object identifiers to accept, and **except_** the list to reject. If the value of the
# keys is `nil`, or if the key is missing, the value should be ignored.
def partition_lists_of_references(opts, suffix, klass)
rv = { }
only_name = "only_#{suffix}".to_sym
except_name = "except_#{suffix}".to_sym
if opts.has_key?(only_name)
if opts[only_name].nil?
rv[only_name] = nil
else
only_l = (opts[only_name].is_a?(Array)) ? opts[only_name] : [ opts[only_name] ]
rv[only_name] = convert_list_of_references(only_l, klass)
end
end
if opts.has_key?(except_name)
if opts[except_name].nil?
rv[except_name] = nil
else
x_l = (opts[except_name].is_a?(Array)) ? opts[except_name] : [ opts[except_name] ]
except_refs = convert_list_of_references(x_l, klass)
# if there is a `only_name`, then we need to remove the `except_name` members from it.
# otherwise, we return `except_name`
if rv[only_name].is_a?(Array)
rv[only_name] = rv[only_name] - except_refs
else
rv[except_name] = except_refs
end
end
end
rv
end
# Partition **only_** and **except_** lists in a set of query options.
# This method looks up the two options <b>only\_<i>suffix</i></b> and <b>except\_<i>suffix</i></b>
# in *opts* and
# converts them using the given block. It then generates new values of **only_** and
# **except_** lists from the converted items as follows.
#
# 1. If the **only_** array is empty or not present, the return value contains the array as is.
# 2. If the **except_** array is empty or not present, the return value contains the array as is.
# 3. If both arrays are present, remove the contents of the **except_** array from the
# **only_** array, and return the **only_** array and `nil` for the **except_** array.
#
# @param opts [Hash] The query options.
# @param suffix [String,Symbol] The suffix for the option names.
# @yield [list] The array containing the list to convert.
#
# @return [Hash] Returns a hash that contains up to two key/value pairs: the **only_** key is the
# list of object identifiers to accept, and **except_** the list to reject. If the value of the
# keys is `nil`, or if the key is missing, the value should be ignored.
def partition_filter_lists(opts, suffix)
rv = { }
only_name = "only_#{suffix}".to_sym
except_name = "except_#{suffix}".to_sym
if opts.has_key?(only_name)
if opts[only_name].nil?
rv[only_name] = nil
else
only_l = (opts[only_name].is_a?(Array)) ? opts[only_name] : [ opts[only_name] ]
rv[only_name] = yield only_l
end
end
if opts.has_key?(except_name)
if opts[except_name].nil?
rv[except_name] = nil
else
x_l = (opts[except_name].is_a?(Array)) ? opts[except_name] : [ opts[except_name] ]
except_refs = yield x_l
# if there is a `only_name`, then we need to remove the `except_name` members from it.
# otherwise, we return `except_name`
if rv[only_name].is_a?(Array)
rv[only_name] = rv[only_name] - except_refs
else
rv[except_name] = except_refs
end
end
end
rv
end
# Partition **only_** and **except_** lists in a set of query options, for polymorphic references.
# This method looks up the two options <b>only\_<i>suffix</i></b> and <b>except\_<i>suffix</i></b>
# in *opts* and
# converts them using {#convert_list_of_polymorphic_references}. It then generates new values of
# **only_** and **except_** lists from the converted references as follows.
#
# 1. If the **only_** references is empty or not present, the return value contains the references
# as is.
# 2. If the **except_** references is empty or not present, the return value contains the references
# as is.
# 3. If both reference array are present, remove the contents of the **except_** array from the
# **only_** array, and return the **only_** array and `nil` for the **except_** array.
#
# @param opts [Hash] The query options.
# @param suffix [String,Symbol] The suffix for the option names.
#
# @return [Hash] Returns a hash that contains up to two key/value pairs: the **only_** key is the
# list of object identifiers to accept, and **except_** the list to reject. If the value of the
# keys is `nil`, or if the key is missing, the value should be ignored.
def partition_lists_of_polymorphic_references(opts, suffix)
rv = { }
only_name = "only_#{suffix}".to_sym
except_name = "except_#{suffix}".to_sym
if opts.has_key?(only_name)
if opts[only_name].nil?
rv[only_name] = nil
else
only_l = (opts[only_name].is_a?(Array)) ? opts[only_name] : [ opts[only_name] ]
rv[only_name] = convert_list_of_polymorphic_references(only_l)
end
end
if opts.has_key?(except_name)
if opts[except_name].nil?
rv[except_name] = nil
else
x_l = (opts[except_name].is_a?(Array)) ? opts[except_name] : [ opts[except_name] ]
except_refs = convert_list_of_polymorphic_references(x_l)
# if there is a `only_name`, then we need to remove the `except_name` members from it.
# otherwise, we return `except_name`
if rv[only_name].is_a?(Array)
rv[only_name] = rv[only_name] - except_refs
else
rv[except_name] = except_refs
end
end
end
rv
end
# Generate the author lists from query options.
# This method builds two lists, one that contains the fingerprints of authors to return
# in the query, and one of authors to ignore in the query.
#
# The method expects the objects in the group lists to respond to the +members+ method, which returns
# the list of group members.
#
# @param opts [Hash] A Hash containing configuration options for the query.
# @option opts [Array<Object, String>, Object, String] :only_authors If given, return only comments
# generated by the given author or, if the value is an array, authors.
# The values are either objects, or strings containing the object's fingerprint
# (see {ActiveRecord::Base#fingerprint}).
# If an author is listed in both *:only_authors* and *:except_authors*, it is removed
# from *:only_authors* before the where clause component is generated; therefore, *:except_authors*
# has higher priority than *:only_authors*.
# @option opts [Array<Object, String>, Object, String] :except_authors If given, return only comments
# not generated by the given author or, if the value is an array, authors.
# See the documentation for *:only_authors*.
# @option opts [Array<Object, String>, Object, String] :only_groups If present, an array of group
# objects (or fingerprints) that contains the list used
# to limit the returned values to comments generated by authors in the groups. A single value
# is converted to an array. Note that the groups are converted to an array of author ids,
# for all the authors in the groups, and a where clause based on that list is added to the query.
# Therefore, this has a similar effect to the *:only_authors* option.
# If both expanded *:only_groups* and *:except_groups* values contain the same author id, that
# author is dropped from the expanded *:only_groups* list; therefore, *:except_groups* has higher
# precedence than *:only_groups*.
# @option opts [Array<Object, String>, Object, String] :except_groups If given, return only comments
# not generated by any members of the group or,
# if the value is an array, groups. See the documentation for *:only_groups*.
# The *:except_groups* option expands to a list of object identifiers for authors whose comments
# should be excluded from the return value; therefore, *:except_groups* acts
# like *:except_authors*.
#
# @return [Hash] Returns a hash with two entries:
# - *:only_ids* is +nil+, to indicate that no "must-have" author selection is requested; or it is
# an array whose elements are authors' fingerprints.
# - *:except_ids* is +nil+, to indicate that no "must-not-have" author selection is requested; or it is
# an array whose elements are authors' fingerprints.
def _expand_author_lists(opts)
only_authors = opts[:only_authors]
only_groups = opts[:only_groups]
except_authors = opts[:except_authors]
except_groups = opts[:except_groups]
return {
:only_ids => nil,
:except_ids => nil
} if only_authors.nil? && only_groups.nil? && except_authors.nil? && except_groups.nil?
# 1. Build the arrays of object identifiers
only_uids = if only_authors
t = (only_authors.is_a?(Array)) ? only_authors : [ only_authors ]
t.map { |u| (u.is_a?(String)) ? u : u.fingerprint }
else
nil
end
if only_groups
t = (only_groups.is_a?(Array)) ? only_groups : [ only_groups ]
glist = t.map { |g| (g.is_a?(String)) ? ActiveRecord::Base.find_by_fingerprint(g) : g }
only_gids = []
glist.each do |g|
if g
g.members.each do |u|
f = u.fingerprint
only_gids << f unless only_gids.include?(f)
end
end
end
else
only_gids = nil
end
except_uids = if except_authors
t = (except_authors.is_a?(Array)) ? except_authors : [ except_authors ]
t.map { |u| (u.is_a?(String)) ? u : u.fingerprint }
else
nil
end
if except_groups
t = (except_groups.is_a?(Array)) ? except_groups : [ except_groups ]
glist = t.map { |g| (g.is_a?(String)) ? ActiveRecord::Base.find_by_fingerprint(g) : g }
except_gids = []
glist.each do |g|
if g
g.members.each do |u|
f = u.fingerprint
except_gids << f unless except_gids.include?(f)
end
end
end
else
except_gids = nil
end
# 2. The list of author ids is the union of the groups/authors arrays
only_ids = (only_uids.nil?) ? nil : only_uids
unless only_gids.nil?
if only_ids.nil?
only_ids = only_gids
else
only_ids |= only_gids
end
end
except_ids = (except_uids.nil?) ? nil : except_uids
unless except_gids.nil?
if except_ids.nil?
except_ids = except_gids
else
except_ids |= except_gids
end
end
# 3. Remove any except ids from the only list
only_ids = only_ids - except_ids if only_ids.is_a?(Array) && except_ids.is_a?(Array)
{
:only_ids => only_ids,
:except_ids => except_ids
}
end
# Partition author lists.
# Calls {#_partition_one_author_list} for each entry in _hlist_, and returns their partitioned values.
#
# @param [Hash] hlist A hash containing author lists.
# @option hlist [Array<String>] :only_ids The fingerprints of the objects to place in the "must-have"
# clauses. Could be +nil+ if no "must-have" objects were requested.
# @option hlist [Array<String>] :except_ids The fingerprints of the objects to place in the "must-not-have"
# clauses. Could be +nil+ if no "must-have" objects were requested.
#
# @return [Hash] Returns a hash containing two entries, *:only_ids* and *:except_ids*, generated as
# described above.
def _partition_author_lists(hlist)
h = { }
if hlist.has_key?(:only_ids) && hlist[:only_ids]
h[:only_ids] = _partition_one_author_list(hlist[:only_ids])
else
h[:only_ids] = nil
end
if hlist.has_key?(:except_ids) && hlist[:except_ids]
h[:except_ids] = _partition_one_author_list(hlist[:except_ids])
else
h[:except_ids] = nil
end
h
end
# Partition a list of authors.
# This method groups all authors whose fingerprints use the same class name, and places in the
# return value an entry whose key is the class name, and whose value is an array of object identifiers
# as extracted from the fingerprints.
# This is how WHERE clauses will be set up.
#
# @param [Array<String>] clist An array of object fingerprints. A +nil+ value causes a +nil+ return value.
#
# @return [Hash] Returns a hash whose keys are the distinct class names from the fingerprints, and
# values the corresponding object identifiers. If _clist_ is +nil+, it returns +nil+.
# Note that the object identifiers are returned as strings, and for some ORMs (Active Record comes to
# mind...), they will likely have to be converted to integers in order to be used in WHERE clauses.
def _partition_one_author_list(clist)
return nil if clist.nil?
h = { }
clist.each do |f|
if f
cname, id = f.split('/')
if h.has_key?(cname)
h[cname] << id
else
h[cname] = [ id ]
end
end
end
h
end
# Parse a timestamp parameter's value.
# The value *value* is either an integer containing a UNIX timestamp, a Time object, or a string
# containing a string representation of the time; the value is converted to a
# {Fl::Framework::Core::Icalendar::Datetime} and returned in that format.
#
# @param value [Integer, Time, String] The timestamp to parse.
#
# @return [Fl::Framework::Core::Icalendar::Datetime, String] On success, returns the parsed timestamp.
# On failure, returns a string containing an error message from the parser.
def _parse_timestamp(value)
begin
return Fl::Framework::Core::Icalendar::Datetime.new(value)
rescue => exc
return exc.message
end
end
# Sets up the parameters for time-related filters.
# For each of the options listed below, the method places a corresponding entry in the return value
# containing the timestamp generated from the entry.
#
# All parameters are either an integer containing a UNIX timestamp, a Time object, or a string
# containing a string representation of the time; the value is converted to a
# {Fl::Framework::Core::Icalendar::Datetime} and stored in that format.
#
# @param opts [Hash] A Hash containing configuration options for the query.
# @option opts [Integer, Time, String] :updated_after to select comments updated after a given time.
# @option opts [Integer, Time, String] :created_after to select comments created after a given time.
# @option opts [Integer, Time, String] :updated_before to select comments updated before a given time.
# @option opts [Integer, Time, String] :created_before to select comments created before a given time.
#
# @return [Hash] Returns a hash containing any number of the following keys; all values are timestamps.
# - *:c_after_ts* from *:created_after*.
# - *:c_before_ts* from *:created_before*.
# - *:u_after_ts* from *:updated_after*.
# - *:u_before_ts* from *:updated_before*.
def _date_filter_timestamps(opts)
rv = {}
if opts.has_key?(:created_after)
begin
dt = Fl::Framework::Core::Icalendar::Datetime.new(opts[:created_after])
rv[:c_after_ts] = dt if dt.valid?
rescue => exc
end
end
if opts.has_key?(:updated_after)
begin
dt = Fl::Framework::Core::Icalendar::Datetime.new(opts[:updated_after])
rv[:u_after_ts] = dt if dt.valid?
rescue => exc
end
end
if opts.has_key?(:created_before)
begin
dt = Fl::Framework::Core::Icalendar::Datetime.new(opts[:created_before])
rv[:c_before_ts] = dt if dt.valid?
rescue => exc
end
end
if opts.has_key?(:updated_before)
begin
dt = Fl::Framework::Core::Icalendar::Datetime.new(opts[:updated_before])
rv[:u_before_ts] = dt if dt.valid?
rescue => exc
end
end
rv
end
# Parse the *:order* option and generate an order clause.
# This method processes the *:order* key in _opts_ and generates an
# array of converted order clauses.
#
# @param opts [Hash] A hash of query options.
# @param df [String, Array] The default value for the order option if **:order** is not present
# in *opts*. A `nil` value maps to `updated_at DESC'.
#
# @option opts [String, Array] :order A string or array containing the <tt>ORDER BY</tt> clauses
# to process. The string value is converted to an array by splitting it at commas.
# A `false` value or an empty string or array causes the option to be ignored.
#
# @return [Array] Returns an array of converted order clauses.
def _parse_order_option(opts, df = nil)
ord = case opts[:order]
when String
opts[:order].split(/,\s*/)
when Array
opts[:order]
when FalseClass
nil
else
if df.is_a?(Array)
df
elsif df.is_a?(String)
df.split(/,\s*/)
else
[ 'updated_at DESC' ]
end
end
return nil if ord.nil? or (ord.count < 1)
ord.map { |e| e.strip }
end
end
end
|
module Flexslider
VERSION = "2.0.0"
end
Updated version
module Flexslider
VERSION = "2.0.1"
end
|
module Fog
module Dynect
VERSION = '0.0.2'
end
end
v0.0.3
module Fog
module Dynect
VERSION = '0.0.3'
end
end
|
require "foreman/engine"
class Foreman::Engine::CLI < Foreman::Engine
module Color
ANSI = {
:reset => 0,
:black => 30,
:red => 31,
:green => 32,
:yellow => 33,
:blue => 34,
:magenta => 35,
:cyan => 36,
:white => 37,
:bright_black => 30,
:bright_red => 31,
:bright_green => 32,
:bright_yellow => 33,
:bright_blue => 34,
:bright_magenta => 35,
:bright_cyan => 36,
:bright_white => 37,
}
def self.enable(io, force=false)
io.extend(self)
@@color_force = force
end
def color?
return true if @@color_force
return false if Foreman.windows?
return false unless self.respond_to?(:isatty)
self.isatty && ENV["TERM"]
end
def color(name)
return "" unless color?
return "" unless ansi = ANSI[name.to_sym]
"\e[#{ansi}m"
end
end
FOREMAN_COLORS = %w( cyan yellow green magenta red blue intense_cyan intense_yellow
intense_green intense_magenta intense_red, intense_blue )
def startup
@colors = map_colors
proctitle "foreman: master" unless Foreman.windows?
Color.enable($stdout, options[:color])
end
def output(name, data)
data.to_s.chomp.split("\n").each do |message|
output = ""
output += $stdout.color(@colors[name.split(".").first].to_sym)
output += "#{Time.now.strftime("%H:%M:%S")} #{pad_process_name(name)} | "
output += $stdout.color(:reset)
output += message
$stdout.puts output
$stdout.flush
end
rescue Errno::EPIPE
terminate_gracefully
end
def shutdown
end
private
def name_padding
@name_padding ||= begin
index_padding = @names.values.map { |n| formation[n] }.max.to_s.length + 1
name_padding = @names.values.map { |n| n.length + index_padding }.sort.last
[ 6, name_padding ].max
end
end
def pad_process_name(name)
name.ljust(name_padding, " ")
end
def map_colors
colors = Hash.new("white")
@names.values.each_with_index do |name, index|
colors[name] = FOREMAN_COLORS[index % FOREMAN_COLORS.length]
end
colors["system"] = "intense_white"
colors
end
def proctitle(title)
$0 = title
end
def termtitle(title)
printf("\033]0;#{title}\007") unless Foreman.windows?
end
end
Don't ignore blank lines in the output
This fixes the stdout code to ensure that empty lines are outputted.
Many times, these blank lines are intentional, so foreman should not
suppress them.
This fixes #286
require "foreman/engine"
class Foreman::Engine::CLI < Foreman::Engine
module Color
ANSI = {
:reset => 0,
:black => 30,
:red => 31,
:green => 32,
:yellow => 33,
:blue => 34,
:magenta => 35,
:cyan => 36,
:white => 37,
:bright_black => 30,
:bright_red => 31,
:bright_green => 32,
:bright_yellow => 33,
:bright_blue => 34,
:bright_magenta => 35,
:bright_cyan => 36,
:bright_white => 37,
}
def self.enable(io, force=false)
io.extend(self)
@@color_force = force
end
def color?
return true if @@color_force
return false if Foreman.windows?
return false unless self.respond_to?(:isatty)
self.isatty && ENV["TERM"]
end
def color(name)
return "" unless color?
return "" unless ansi = ANSI[name.to_sym]
"\e[#{ansi}m"
end
end
FOREMAN_COLORS = %w( cyan yellow green magenta red blue intense_cyan intense_yellow
intense_green intense_magenta intense_red, intense_blue )
def startup
@colors = map_colors
proctitle "foreman: master" unless Foreman.windows?
Color.enable($stdout, options[:color])
end
def output(name, data)
data.to_s.lines.map(&:chomp).each do |message|
output = ""
output += $stdout.color(@colors[name.split(".").first].to_sym)
output += "#{Time.now.strftime("%H:%M:%S")} #{pad_process_name(name)} | "
output += $stdout.color(:reset)
output += message
$stdout.puts output
$stdout.flush
end
rescue Errno::EPIPE
terminate_gracefully
end
def shutdown
end
private
def name_padding
@name_padding ||= begin
index_padding = @names.values.map { |n| formation[n] }.max.to_s.length + 1
name_padding = @names.values.map { |n| n.length + index_padding }.sort.last
[ 6, name_padding ].max
end
end
def pad_process_name(name)
name.ljust(name_padding, " ")
end
def map_colors
colors = Hash.new("white")
@names.values.each_with_index do |name, index|
colors[name] = FOREMAN_COLORS[index % FOREMAN_COLORS.length]
end
colors["system"] = "intense_white"
colors
end
def proctitle(title)
$0 = title
end
def termtitle(title)
printf("\033]0;#{title}\007") unless Foreman.windows?
end
end
|
require "fpm/namespace"
require "fpm/package"
require "fpm/util"
require "rubygems/package"
require "rubygems"
require "fileutils"
require "tmpdir"
require "json"
# Support for python packages.
#
# This supports input, but not output.
#
# Example:
#
# # Download the django python package:
# pkg = FPM::Package::Python.new
# pkg.input("Django")
#
class FPM::Package::Python < FPM::Package
# Flags '--foo' will be accessable as attributes[:python_foo]
option "--bin", "PYTHON_EXECUTABLE",
"The path to the python executable you wish to run.", :default => "python"
option "--easyinstall", "EASYINSTALL_EXECUTABLE",
"The path to the easy_install executable tool", :default => "easy_install"
option "--pip", "PIP_EXECUTABLE",
"The path to the pip executable tool. If not specified, easy_install " \
"is used instead", :default => nil
option "--pypi", "PYPI_URL",
"PyPi Server uri for retrieving packages.",
:default => "http://pypi.python.org/simple"
option "--package-prefix", "NAMEPREFIX",
"(DEPRECATED, use --package-name-prefix) Name to prefix the package " \
"name with." do |value|
@logger.warn("Using deprecated flag: --package-prefix. Please use " \
"--package-name-prefix")
value
end
option "--package-name-prefix", "PREFIX", "Name to prefix the package " \
"name with.", :default => "python"
option "--fix-name", :flag, "Should the target package name be prefixed?",
:default => true
option "--fix-dependencies", :flag, "Should the package dependencies be " \
"prefixed?", :default => true
option "--downcase-name", :flag, "Should the target package name be in " \
"lowercase?", :default => true
option "--downcase-dependencies", :flag, "Should the package dependencies " \
"be in lowercase?", :default => true
option "--install-bin", "BIN_PATH", "The path to where python scripts " \
"should be installed to."
option "--install-lib", "LIB_PATH", "The path to where python libs " \
"should be installed to (default depends on your python installation). " \
"Want to what your target platform is using? Run this: " \
"python -c 'from distutils.sysconfig import get_python_lib; " \
"print get_python_lib()'"
option "--install-data", "DATA_PATH", "The path to where data should be." \
"installed to. This is equivalent to 'python setup.py --install-data " \
"DATA_PATH"
option "--dependencies", :flag, "Include requirements defined in setup.py" \
" as dependencies.", :default => true
option "--obey-requirements-txt", :flag, "Use a requirements.txt file" \
"in the top-level directory of the python package for dependency " \
"detection.", :default => false
private
# Input a package.
#
# The 'package' can be any of:
#
# * A name of a package on pypi (ie; easy_install some-package)
# * The path to a directory containing setup.py
# * The path to a setup.py
def input(package)
path_to_package = download_if_necessary(package, version)
if File.directory?(path_to_package)
setup_py = File.join(path_to_package, "setup.py")
else
setup_py = path_to_package
end
if !File.exists?(setup_py)
@logger.error("Could not find 'setup.py'", :path => setup_py)
raise "Unable to find python package; tried #{setup_py}"
end
load_package_info(setup_py)
install_to_staging(setup_py)
end # def input
# Download the given package if necessary. If version is given, that version
# will be downloaded, otherwise the latest is fetched.
def download_if_necessary(package, version=nil)
# TODO(sissel): this should just be a 'download' method, the 'if_necessary'
# part should go elsewhere.
path = package
# If it's a path, assume local build.
if File.directory?(path) or (File.exists?(path) and File.basename(path) == "setup.py")
return path
end
@logger.info("Trying to download", :package => package)
if version.nil?
want_pkg = "#{package}"
else
want_pkg = "#{package}==#{version}"
end
target = build_path(package)
FileUtils.mkdir(target) unless File.directory?(target)
if attributes[:python_pip].nil?
# no pip, use easy_install
@logger.debug("no pip, defaulting to easy_install", :easy_install => attributes[:python_easyinstall])
safesystem(attributes[:python_easyinstall], "-i",
attributes[:python_pypi], "--editable", "-U",
"--build-directory", target, want_pkg)
else
@logger.debug("using pip", :pip => attributes[:python_pip])
safesystem(attributes[:python_pip], "install", "--no-install",
"-U", "--build", target, want_pkg)
end
# easy_install will put stuff in @tmpdir/packagename/, so find that:
# @tmpdir/somepackage/setup.py
dirs = ::Dir.glob(File.join(target, "*"))
if dirs.length != 1
raise "Unexpected directory layout after easy_install. Maybe file a bug? The directory is #{build_path}"
end
return dirs.first
end # def download
# Load the package information like name, version, dependencies.
def load_package_info(setup_py)
if !attributes[:python_package_prefix].nil?
attributes[:python_package_name_prefix] = attributes[:python_package_prefix]
end
# Add ./pyfpm/ to the python library path
pylib = File.expand_path(File.dirname(__FILE__))
# chdir to the directory holding setup.py because some python setup.py's assume that you are
# in the same directory.
setup_dir = File.dirname(setup_py)
output = ::Dir.chdir(setup_dir) do
tmp = build_path("metadata.json")
setup_cmd = "env PYTHONPATH=#{pylib} #{attributes[:python_bin]} " \
"setup.py --command-packages=pyfpm get_metadata --output=#{tmp}"
if attributes[:python_obey_requirements_txt?]
setup_cmd += " --load-requirements-txt"
end
# Capture the output, which will be JSON metadata describing this python
# package. See fpm/lib/fpm/package/pyfpm/get_metadata.py for more
# details.
@logger.info("fetching package metadata", :setup_cmd => setup_cmd)
success = safesystem(setup_cmd)
#%x{#{setup_cmd}}
if !success
@logger.error("setup.py get_metadata failed", :command => setup_cmd,
:exitcode => $?.exitstatus)
raise "An unexpected error occurred while processing the setup.py file"
end
File.read(tmp)
end
@logger.debug("result from `setup.py get_metadata`", :data => output)
metadata = JSON.parse(output)
@logger.info("object output of get_metadata", :json => metadata)
self.architecture = metadata["architecture"]
self.description = metadata["description"]
# Sometimes the license field is multiple lines; do best-effort and just
# use the first line.
self.license = metadata["license"].split(/[\r\n]+/).first
self.version = metadata["version"]
self.url = metadata["url"]
# name prefixing is optional, if enabled, a name 'foo' will become
# 'python-foo' (depending on what the python_package_name_prefix is)
if attributes[:python_fix_name?]
self.name = fix_name(metadata["name"])
else
self.name = metadata["name"]
end
# convert python-Foo to python-foo if flag is set
self.name = self.name.downcase if attributes[:python_downcase_name?]
if !attributes[:no_auto_depends?] and attributes[:python_dependencies?]
self.dependencies = metadata["dependencies"].collect do |dep|
dep_re = /^([^<>!= ]+)\s*(?:([<>!=]{1,2})\s*(.*))?$/
match = dep_re.match(dep)
if match.nil?
@logger.error("Unable to parse dependency", :dependency => dep)
raise FPM::InvalidPackageConfiguration, "Invalid dependency '#{dep}'"
end
name, cmp, version = match.captures
# convert == to =
if cmp == "=="
@logger.info("Converting == dependency requirement to =", :dependency => dep )
cmp = "="
end
# dependency name prefixing is optional, if enabled, a name 'foo' will
# become 'python-foo' (depending on what the python_package_name_prefix
# is)
name = fix_name(name) if attributes[:python_fix_dependencies?]
# convert dependencies from python-Foo to python-foo
name = name.downcase if attributes[:python_downcase_dependencies?]
"#{name} #{cmp} #{version}"
end
end # if attributes[:python_dependencies?]
end # def load_package_info
# Sanitize package name.
# Some PyPI packages can be named 'python-foo', so we don't want to end up
# with a package named 'python-python-foo'.
# But we want packages named like 'pythonweb' to be suffixed
# 'python-pythonweb'.
def fix_name(name)
if name.start_with?("python")
# If the python package is called "python-foo" strip the "python-" part while
# prepending the package name prefix.
return [attributes[:python_package_name_prefix], name.gsub(/^python-/, "")].join("-")
else
return [attributes[:python_package_name_prefix], name].join("-")
end
end # def fix_name
# Install this package to the staging directory
def install_to_staging(setup_py)
project_dir = File.dirname(setup_py)
prefix = "/"
prefix = attributes[:prefix] unless attributes[:prefix].nil?
# Some setup.py's assume $PWD == current directory of setup.py, so let's
# chdir first.
::Dir.chdir(project_dir) do
flags = [ "--root", staging_path ]
if !attributes[:python_install_lib].nil?
flags += [ "--install-lib", File.join(prefix, attributes[:python_install_lib]) ]
elsif !attributes[:prefix].nil?
# setup.py install --prefix PREFIX still installs libs to
# PREFIX/lib64/python2.7/site-packages/
# but we really want something saner.
#
# since prefix is given, but not python_install_lib, assume PREFIX/lib
flags += [ "--install-lib", File.join(prefix, "lib") ]
end
if !attributes[:python_install_data].nil?
flags += [ "--install-data", File.join(prefix, attributes[:python_install_data]) ]
elsif !attributes[:prefix].nil?
# prefix given, but not python_install_data, assume PREFIX/data
flags += [ "--install-data", File.join(prefix, "data") ]
end
if !attributes[:python_install_bin].nil?
flags += [ "--install-scripts", File.join(prefix, attributes[:python_install_bin]) ]
elsif !attributes[:prefix].nil?
# prefix given, but not python_install_bin, assume PREFIX/bin
flags += [ "--install-scripts", File.join(prefix, "bin") ]
end
safesystem(attributes[:python_bin], "setup.py", "install", *flags)
end
end # def install_to_staging
public(:input)
end # class FPM::Package::Python
Update python.rb
help text fixes
require "fpm/namespace"
require "fpm/package"
require "fpm/util"
require "rubygems/package"
require "rubygems"
require "fileutils"
require "tmpdir"
require "json"
# Support for python packages.
#
# This supports input, but not output.
#
# Example:
#
# # Download the django python package:
# pkg = FPM::Package::Python.new
# pkg.input("Django")
#
class FPM::Package::Python < FPM::Package
# Flags '--foo' will be accessable as attributes[:python_foo]
option "--bin", "PYTHON_EXECUTABLE",
"The path to the python executable you wish to run.", :default => "python"
option "--easyinstall", "EASYINSTALL_EXECUTABLE",
"The path to the easy_install executable tool", :default => "easy_install"
option "--pip", "PIP_EXECUTABLE",
"The path to the pip executable tool. If not specified, easy_install " \
"is used instead", :default => nil
option "--pypi", "PYPI_URL",
"PyPi Server uri for retrieving packages.",
:default => "http://pypi.python.org/simple"
option "--package-prefix", "NAMEPREFIX",
"(DEPRECATED, use --package-name-prefix) Name to prefix the package " \
"name with." do |value|
@logger.warn("Using deprecated flag: --package-prefix. Please use " \
"--package-name-prefix")
value
end
option "--package-name-prefix", "PREFIX", "Name to prefix the package " \
"name with.", :default => "python"
option "--fix-name", :flag, "Should the target package name be prefixed?",
:default => true
option "--fix-dependencies", :flag, "Should the package dependencies be " \
"prefixed?", :default => true
option "--downcase-name", :flag, "Should the target package name be in " \
"lowercase?", :default => true
option "--downcase-dependencies", :flag, "Should the package dependencies " \
"be in lowercase?", :default => true
option "--install-bin", "BIN_PATH", "The path to where python scripts " \
"should be installed to."
option "--install-lib", "LIB_PATH", "The path to where python libs " \
"should be installed to (default depends on your python installation). " \
"Want to what your target platform is using? Run this: " \
"python -c 'from distutils.sysconfig import get_python_lib; " \
"print get_python_lib()'"
option "--install-data", "DATA_PATH", "The path to where data should be " \
"installed to. This is equivalent to 'python setup.py --install-data " \
"DATA_PATH"
option "--dependencies", :flag, "Include requirements defined in setup.py" \
" as dependencies.", :default => true
option "--obey-requirements-txt", :flag, "Use a requirements.txt file " \
"in the top-level directory of the python package for dependency " \
"detection.", :default => false
private
# Input a package.
#
# The 'package' can be any of:
#
# * A name of a package on pypi (ie; easy_install some-package)
# * The path to a directory containing setup.py
# * The path to a setup.py
def input(package)
path_to_package = download_if_necessary(package, version)
if File.directory?(path_to_package)
setup_py = File.join(path_to_package, "setup.py")
else
setup_py = path_to_package
end
if !File.exists?(setup_py)
@logger.error("Could not find 'setup.py'", :path => setup_py)
raise "Unable to find python package; tried #{setup_py}"
end
load_package_info(setup_py)
install_to_staging(setup_py)
end # def input
# Download the given package if necessary. If version is given, that version
# will be downloaded, otherwise the latest is fetched.
def download_if_necessary(package, version=nil)
# TODO(sissel): this should just be a 'download' method, the 'if_necessary'
# part should go elsewhere.
path = package
# If it's a path, assume local build.
if File.directory?(path) or (File.exists?(path) and File.basename(path) == "setup.py")
return path
end
@logger.info("Trying to download", :package => package)
if version.nil?
want_pkg = "#{package}"
else
want_pkg = "#{package}==#{version}"
end
target = build_path(package)
FileUtils.mkdir(target) unless File.directory?(target)
if attributes[:python_pip].nil?
# no pip, use easy_install
@logger.debug("no pip, defaulting to easy_install", :easy_install => attributes[:python_easyinstall])
safesystem(attributes[:python_easyinstall], "-i",
attributes[:python_pypi], "--editable", "-U",
"--build-directory", target, want_pkg)
else
@logger.debug("using pip", :pip => attributes[:python_pip])
safesystem(attributes[:python_pip], "install", "--no-install",
"-U", "--build", target, want_pkg)
end
# easy_install will put stuff in @tmpdir/packagename/, so find that:
# @tmpdir/somepackage/setup.py
dirs = ::Dir.glob(File.join(target, "*"))
if dirs.length != 1
raise "Unexpected directory layout after easy_install. Maybe file a bug? The directory is #{build_path}"
end
return dirs.first
end # def download
# Load the package information like name, version, dependencies.
def load_package_info(setup_py)
if !attributes[:python_package_prefix].nil?
attributes[:python_package_name_prefix] = attributes[:python_package_prefix]
end
# Add ./pyfpm/ to the python library path
pylib = File.expand_path(File.dirname(__FILE__))
# chdir to the directory holding setup.py because some python setup.py's assume that you are
# in the same directory.
setup_dir = File.dirname(setup_py)
output = ::Dir.chdir(setup_dir) do
tmp = build_path("metadata.json")
setup_cmd = "env PYTHONPATH=#{pylib} #{attributes[:python_bin]} " \
"setup.py --command-packages=pyfpm get_metadata --output=#{tmp}"
if attributes[:python_obey_requirements_txt?]
setup_cmd += " --load-requirements-txt"
end
# Capture the output, which will be JSON metadata describing this python
# package. See fpm/lib/fpm/package/pyfpm/get_metadata.py for more
# details.
@logger.info("fetching package metadata", :setup_cmd => setup_cmd)
success = safesystem(setup_cmd)
#%x{#{setup_cmd}}
if !success
@logger.error("setup.py get_metadata failed", :command => setup_cmd,
:exitcode => $?.exitstatus)
raise "An unexpected error occurred while processing the setup.py file"
end
File.read(tmp)
end
@logger.debug("result from `setup.py get_metadata`", :data => output)
metadata = JSON.parse(output)
@logger.info("object output of get_metadata", :json => metadata)
self.architecture = metadata["architecture"]
self.description = metadata["description"]
# Sometimes the license field is multiple lines; do best-effort and just
# use the first line.
self.license = metadata["license"].split(/[\r\n]+/).first
self.version = metadata["version"]
self.url = metadata["url"]
# name prefixing is optional, if enabled, a name 'foo' will become
# 'python-foo' (depending on what the python_package_name_prefix is)
if attributes[:python_fix_name?]
self.name = fix_name(metadata["name"])
else
self.name = metadata["name"]
end
# convert python-Foo to python-foo if flag is set
self.name = self.name.downcase if attributes[:python_downcase_name?]
if !attributes[:no_auto_depends?] and attributes[:python_dependencies?]
self.dependencies = metadata["dependencies"].collect do |dep|
dep_re = /^([^<>!= ]+)\s*(?:([<>!=]{1,2})\s*(.*))?$/
match = dep_re.match(dep)
if match.nil?
@logger.error("Unable to parse dependency", :dependency => dep)
raise FPM::InvalidPackageConfiguration, "Invalid dependency '#{dep}'"
end
name, cmp, version = match.captures
# convert == to =
if cmp == "=="
@logger.info("Converting == dependency requirement to =", :dependency => dep )
cmp = "="
end
# dependency name prefixing is optional, if enabled, a name 'foo' will
# become 'python-foo' (depending on what the python_package_name_prefix
# is)
name = fix_name(name) if attributes[:python_fix_dependencies?]
# convert dependencies from python-Foo to python-foo
name = name.downcase if attributes[:python_downcase_dependencies?]
"#{name} #{cmp} #{version}"
end
end # if attributes[:python_dependencies?]
end # def load_package_info
# Sanitize package name.
# Some PyPI packages can be named 'python-foo', so we don't want to end up
# with a package named 'python-python-foo'.
# But we want packages named like 'pythonweb' to be suffixed
# 'python-pythonweb'.
def fix_name(name)
if name.start_with?("python")
# If the python package is called "python-foo" strip the "python-" part while
# prepending the package name prefix.
return [attributes[:python_package_name_prefix], name.gsub(/^python-/, "")].join("-")
else
return [attributes[:python_package_name_prefix], name].join("-")
end
end # def fix_name
# Install this package to the staging directory
def install_to_staging(setup_py)
project_dir = File.dirname(setup_py)
prefix = "/"
prefix = attributes[:prefix] unless attributes[:prefix].nil?
# Some setup.py's assume $PWD == current directory of setup.py, so let's
# chdir first.
::Dir.chdir(project_dir) do
flags = [ "--root", staging_path ]
if !attributes[:python_install_lib].nil?
flags += [ "--install-lib", File.join(prefix, attributes[:python_install_lib]) ]
elsif !attributes[:prefix].nil?
# setup.py install --prefix PREFIX still installs libs to
# PREFIX/lib64/python2.7/site-packages/
# but we really want something saner.
#
# since prefix is given, but not python_install_lib, assume PREFIX/lib
flags += [ "--install-lib", File.join(prefix, "lib") ]
end
if !attributes[:python_install_data].nil?
flags += [ "--install-data", File.join(prefix, attributes[:python_install_data]) ]
elsif !attributes[:prefix].nil?
# prefix given, but not python_install_data, assume PREFIX/data
flags += [ "--install-data", File.join(prefix, "data") ]
end
if !attributes[:python_install_bin].nil?
flags += [ "--install-scripts", File.join(prefix, attributes[:python_install_bin]) ]
elsif !attributes[:prefix].nil?
# prefix given, but not python_install_bin, assume PREFIX/bin
flags += [ "--install-scripts", File.join(prefix, "bin") ]
end
safesystem(attributes[:python_bin], "setup.py", "install", *flags)
end
end # def install_to_staging
public(:input)
end # class FPM::Package::Python
|
module Freighthop
VERSION = "0.0.2"
end
bump version to 0.0.3
module Freighthop
VERSION = "0.0.3"
end
|
require 'thread'
module Futuroscope
# A Future is an object that gets initialized with a block and will behave
# exactly like the block's result, but being able to "borrow" its result from
# the future. That is, will block when the result is not ready until it is,
# and will return it instantly if the thread's execution already finished.
#
class Future < BasicObject
extend ::Forwardable
# Initializes a future with a block and starts its execution.
#
# Examples:
#
# future = Futuroscope::Future.new { sleep(1); :edballs }
# sleep(1)
# puts future
# => :edballs
# # This will return in 1 second and not 2 if the execution wasn't
# # deferred to a thread.
#
# pool - A pool where all the futures will be scheduled.
# block - A block that will be run in the background.
#
# Returns a Future
def initialize(pool = ::Futuroscope.default_pool, &block)
@queue = ::SizedQueue.new(1)
@pool = pool
@block = block
@pool.queue self
end
# Semipublic: Forces this future to be run.
def run_future
@queue.push(value: @block.call)
rescue ::Exception => e
@queue.push(exception: e)
end
# Semipublic: Returns the future's value. Will wait for the future to be
# completed or return its value otherwise. Can be called multiple times.
#
# Returns the Future's block execution result.
def future_value
resolved = resolved_future_value
raise resolved[:exception] if resolved[:exception]
resolved[:value]
end
def_delegators :future_value, *::BasicObject.instance_methods
private
def resolved_future_value
@resolved_future ||= @queue.pop
end
def method_missing(method, *args)
future_value.send(method, *args)
end
def respond_to_missing?(method, include_private = false)
future_value.respond_to?(method, include_private)
end
end
end
Make delegation more explicit
Delegating all BasicObject.instance_methods behaves weirdly on JRuby and it certainly looks like an ugly hack.
require 'thread'
module Futuroscope
# A Future is an object that gets initialized with a block and will behave
# exactly like the block's result, but being able to "borrow" its result from
# the future. That is, will block when the result is not ready until it is,
# and will return it instantly if the thread's execution already finished.
#
class Future < BasicObject
extend ::Forwardable
# Initializes a future with a block and starts its execution.
#
# Examples:
#
# future = Futuroscope::Future.new { sleep(1); :edballs }
# sleep(1)
# puts future
# => :edballs
# # This will return in 1 second and not 2 if the execution wasn't
# # deferred to a thread.
#
# pool - A pool where all the futures will be scheduled.
# block - A block that will be run in the background.
#
# Returns a Future
def initialize(pool = ::Futuroscope.default_pool, &block)
@queue = ::SizedQueue.new(1)
@pool = pool
@block = block
@pool.queue self
end
# Semipublic: Forces this future to be run.
def run_future
@queue.push(value: @block.call)
rescue ::Exception => e
@queue.push(exception: e)
end
# Semipublic: Returns the future's value. Will wait for the future to be
# completed or return its value otherwise. Can be called multiple times.
#
# Returns the Future's block execution result.
def future_value
resolved = resolved_future_value
raise resolved[:exception] if resolved[:exception]
resolved[:value]
end
def_delegators :future_value, :!, :!=, :==, :equal?
private
def resolved_future_value
@resolved_future ||= @queue.pop
end
def method_missing(method, *args)
future_value.send(method, *args)
end
def respond_to_missing?(method, include_private = false)
future_value.respond_to?(method, include_private)
end
end
end
|
module TransamCore
VERSION = "1.0.33"
end
Bump version
module TransamCore
VERSION = "1.0.34"
end
|
require 'json'
require 'net/http'
require 'ostruct'
require_relative 'core-ext/openstruct'
require_relative 'version'
require_relative 'exceptions'
module GdsApi::JsonUtils
REQUEST_HEADERS = {
'Accept' => 'application/json',
'Content-Type' => 'application/json',
'User-Agent' => "GDS Api Client v. #{GdsApi::VERSION}"
}
DEFAULT_TIMEOUT_IN_SECONDS = 0.5
def do_request(url, &block)
loggable = {request_uri: url, start_time: Time.now.to_f}
url = URI.parse(url)
request = url.path
request = request + "?" + url.query if url.query
logger.debug "I will request #{request}"
response = Net::HTTP.start(url.host, url.port) do |http|
http.read_timeout = options[:timeout] || DEFAULT_TIMEOUT_IN_SECONDS
yield http, request
end
if response.is_a?(Net::HTTPSuccess)
logger.info loggable.merge(status: 'success', end_time: Time.now).to_json
JSON.parse(response.body)
else
body = begin
JSON.parse(response.body)
rescue
response.body
end
loggable.merge!(status: response.code, end_time: Time.now.to_f, body: body)
logger.warn loggable.to_json
nil
end
rescue Errno::ECONNREFUSED
logger.error loggable.merge(status: 'refused', end_time: Time.now.to_f).to_json
raise GdsApi::EndpointNotFound.new("Could not connect to #{url}")
rescue Timeout::Error, Errno::ECONNRESET => e
logger.error loggable.merge(status: 'failed', end_time: Time.now.to_f).to_json
nil
end
def logger
GdsApi::Base.logger
end
private :logger
def get_json(url)
do_request(url) do |http, path|
http.get(path, REQUEST_HEADERS)
end
end
def post_json(url, params)
do_request(url) do |http, path|
http.post(path, params.to_json, REQUEST_HEADERS)
end
end
def put_json(url, params)
do_request(url) do |http, path|
http.put(path, params.to_json, REQUEST_HEADERS)
end
end
def to_ostruct(object)
case object
when Hash
OpenStruct.new Hash[object.map { |key, value| [key, to_ostruct(value)] }]
when Array
object.map { |k| to_ostruct(k) }
else
object
end
end
end
Initialise an SSL session properly where required
require 'json'
require 'net/http'
require 'ostruct'
require_relative 'core-ext/openstruct'
require_relative 'version'
require_relative 'exceptions'
module GdsApi::JsonUtils
REQUEST_HEADERS = {
'Accept' => 'application/json',
'Content-Type' => 'application/json',
'User-Agent' => "GDS Api Client v. #{GdsApi::VERSION}"
}
DEFAULT_TIMEOUT_IN_SECONDS = 0.5
def do_request(url, &block)
loggable = {request_uri: url, start_time: Time.now.to_f}
url = URI.parse(url)
request = url.path
request = request + "?" + url.query if url.query
logger.debug "I will request #{request}"
response = Net::HTTP.start(url.host, url.port, nil, nil, nil, nil, {use_ssl: url.port == 443}) do |http|
http.read_timeout = options[:timeout] || DEFAULT_TIMEOUT_IN_SECONDS
yield http, request
end
if response.is_a?(Net::HTTPSuccess)
logger.info loggable.merge(status: 'success', end_time: Time.now).to_json
JSON.parse(response.body)
else
body = begin
JSON.parse(response.body)
rescue
response.body
end
loggable.merge!(status: response.code, end_time: Time.now.to_f, body: body)
logger.warn loggable.to_json
nil
end
rescue Errno::ECONNREFUSED
logger.error loggable.merge(status: 'refused', end_time: Time.now.to_f).to_json
raise GdsApi::EndpointNotFound.new("Could not connect to #{url}")
rescue Timeout::Error, Errno::ECONNRESET => e
logger.error loggable.merge(status: 'failed', end_time: Time.now.to_f).to_json
nil
end
def logger
GdsApi::Base.logger
end
private :logger
def get_json(url)
do_request(url) do |http, path|
http.get(path, REQUEST_HEADERS)
end
end
def post_json(url, params)
do_request(url) do |http, path|
http.post(path, params.to_json, REQUEST_HEADERS)
end
end
def put_json(url, params)
do_request(url) do |http, path|
http.put(path, params.to_json, REQUEST_HEADERS)
end
end
def to_ostruct(object)
case object
when Hash
OpenStruct.new Hash[object.map { |key, value| [key, to_ostruct(value)] }]
when Array
object.map { |k| to_ostruct(k) }
else
object
end
end
end
|
module TransamCore
VERSION = "1.0.2"
end
Bump version
module TransamCore
VERSION = "1.0.3"
end
|
module TransamCore
VERSION = "0.9.25"
end
Bump version
module TransamCore
VERSION = "0.9.26"
end
|
module Geocluster
VERSION = "0.0.1"
end
Bump up version
module Geocluster
VERSION = "0.1.1"
end
|
require 'countries'
require 'base64'
require 'socket'
require 'easypost'
require 'net/http'
require 'net/scp'
require 'uri'
class Admin::Store::ShipmentsController < Admin::BaseController
skip_before_filter :verify_authenticity_token, only: :label_print
def index
authorize Shipment.new
q = params[:q]
s = Shipment.includes(:order, :items, :inventory_transaction, [items: :order_item]).order(sort_column + " " + sort_direction)
s = s.where("recipient_name LIKE '%#{q}%' OR recipient_company LIKE '%#{q}%' OR recipient_city LIKE '%#{q}%'") unless q.blank?
s = s.where("store_orders.user_id = ?", params[:user_id]) unless params[:user_id].blank?
s = s.where("store_orders.affiliate_id = ?", params[:affiliate_id]) unless params[:affiliate_id].blank?
s = s.where(carrier: params[:carrier]) unless params[:carrier].blank?
s = s.where(ship_date: params[:ship_date]) unless params[:ship_date].blank?
s = s.where(status: params[:status]) unless params[:status].blank?
s = s.where(manifest_id: params[:manifest_id]) unless params[:manifest_id].blank?
respond_to do |format|
format.html { @shipments = s.paginate(page: params[:page], per_page: @per_page) }
format.csv { send_data Shipment.to_csv(s, skip_cols: ['label_data']) }
end
end
def new
# check if order id was passed in?
return redirect_to action: 'choose_order' if params[:order_id].nil?
@order = Order.find(params[:order_id])
if @order.nil?
flash[:notice] = "Order #{params[:order_id]} was not found."
return redirect_to action: 'choose_order'
end
begin
@shipment = authorize @order.create_shipment(session[:user_id], false)
rescue => e
flash[:error] = e.message
return redirect_to :back
end
@shipment.invoice_amount = @order.total if @shipment.sequence == 1
render 'edit'
end
def create
@shipment = authorize Shipment.new(shipment_params)
@shipment.fulfilled_by_id = current_user.id
if @shipment.save
# create order history item
OrderHistory.create order_id: @shipment.order_id, user_id: current_user.id, event_type: :shipment_created,
system_name: 'Rhombus', identifier: @shipment.id, comment: "shipment created: #{@shipment}"
flash[:notice] = "Shipment #{@shipment.order_id}-#{@shipment.sequence} was successfully created."
redirect_to action: 'show', id: @shipment.id
else
render 'edit'
end
end
def show
@shipment = authorize Shipment.includes(items: { order_item: {product: :brand} }).find(params[:id])
end
def edit
@shipment = authorize Shipment.includes(:items, [items: :order_item]).find(params[:id])
# add any items that were added to Order later and not currently present in Shipment
@shipment.order.items.each do |oi|
unless @shipment.items.any? { |x| x.order_item_id == oi.id }
@shipment.items.build(order_item_id: oi.id, quantity: 0)
end
end
end
def update
@shipment = authorize Shipment.find(params[:id])
@shipment.fulfilled_by_id = current_user.id
if @shipment.update(shipment_params)
flash[:notice] = "Shipment #{@shipment.order_id}-#{@shipment.sequence} was updated."
redirect_to action: 'show', id: @shipment.id
else
render 'edit'
end
end
def destroy
@shipment = authorize Shipment.find(params[:id])
@shipment.destroy
redirect_to :back
end
def packing_slip
@shipment = Shipment.includes(:items, [items: :order_item]).find(params[:id])
render 'packing_slip', layout: false
end
def invoice
@shipment = Shipment.find(params[:id])
render 'invoice', layout: false
end
def email_invoice
@shipment = Shipment.find(params[:id])
SendInvoiceJob.perform_later(@shipment.id, session[:user_id])
flash[:success] = "Invoice was emailed to #{@shipment.order.notify_email}"
redirect_to :back
end
def choose_order
end
def label_image
shipment = Shipment.find(params[:id])
send_data shipment.label_data, type: shipment.label_format
end
def label
return render text: :ok
# used background processing for printing to thermal printer as it can take a few seconds
if ['epl2','zpl'].include?(params[:format])
ShippingLabelJob.perform_later(session[:user_id], params[:id], params[:format])
flash[:info] = "Shipping label dispatched to printer"
return redirect_to :back
end
# requested a PNG probably
shipment = Shipment.find(params[:id])
courier_data = JSON.parse(shipment.courier_data)
begin
EasyPost.api_key = Cache.setting(shipment.order.domain_id, 'Shipping', 'EasyPost API Key')
ep_shipment = EasyPost::Shipment.retrieve(courier_data['id'])
response = ep_shipment.label({'file_format' => params[:format]})
# download label
label_url = response[:postage_label]["label_#{params[:format]}_url"]
label_data = Net::HTTP.get(URI.parse(label_url))
rescue => e
flash[:error] = "Error downloading shipping label: " + e.message
return redirect_to :back
end
send_data label_data, filename: shipment.to_s + "." + params[:format]
end
def void_label
shipment = Shipment.find(params[:id])
EasyPost.api_key = Cache.setting(shipment.order.domain_id, 'Shipping', 'EasyPost API Key')
courier_data = JSON.parse(shipment.courier_data)
begin
ep_shipment = EasyPost::Shipment.retrieve(courier_data['id'])
response = ep_shipment.refund
flash[:info] = "Refund status: #{response[:refund_status]} - / - Tracking: #{response[:tracking_code]} - / - Confirmation: #{response[:confirmation_number] || "n/a"}"
shipment.update_attribute(:status, 'void') if response[:refund_status] == 'submitted'
rescue => e
flash[:error] = e.message
end
redirect_to :back
end
def product_labels_pending
@shipments = Shipment.where(status: :pending)
sql = <<-EOF
select si.shipment_id, si.id as shipment_item_id, sheet.name as label, oi.item_number, p.name, p.option_title,
sum(si.quantity) as quantity, uploaded_file, upload_file_preview, rendered_file
from store_shipment_items si
join store_order_items oi on oi.id = si.order_item_id
join store_products p on p.id = oi.product_id
join store_label_sheets sheet on sheet.id = p.label_sheet_id
where shipment_id in (#{@shipments.map(&:id).join(",")})
and si.quantity > 0
group by oi.item_number, uploaded_file
order by sheet.name, oi.item_number;
EOF
@items = []
ActiveRecord::Base.connection.execute(sql).each(as: :hash) { |row| @items << row.with_indifferent_access }
render 'product_labels'
end
def product_labels
if params[:shipment_id].blank?
flash[:error] = "No shipments selected. Please check at least one."
return redirect_to :back
end
@shipments = Shipment.where(id: params[:shipment_id])
sql = <<-EOF
select si.shipment_id, si.id as shipment_item_id, sheet.name as label, oi.item_number, p.name, p.option_title,
sum(si.quantity) as quantity, uploaded_file, upload_file_preview, rendered_file
from store_shipment_items si
join store_order_items oi on oi.id = si.order_item_id
join store_products p on p.id = oi.product_id
join store_label_sheets sheet on sheet.id = p.label_sheet_id
where shipment_id in (#{params[:shipment_id].join(",")})
and si.quantity > 0
group by oi.item_number, uploaded_file
order by sheet.name, oi.item_number;
EOF
@items = []
ActiveRecord::Base.connection.execute(sql).each(as: :hash) { |row| @items << row.with_indifferent_access }
end
# Print one size of labels
def label_print
# First find the printer
p = Printer.find_by(id: params[:printer_id])
if p.nil?
flash[:error] = "Printer not found"
return redirect_to :back
end
# See which label size this job is for (poor English here)
label_prefix = Setting.get(:kiaro, "Label Prefix")
label = params[:label].split(" ", 2)[1] + ".alf"
label_count = 0
str = ""
logs = []
# LOOP through the items selected
params[:print_items].each do |h|
# puts h.inspect
next if h['quantity'] == "0"
next if h['personalized'] == 'true' && h['rendered_file'].blank?
qty = h['quantity'].to_i
label_count += qty
### QUICKCOMMAND LABEL SPECS #########
str << "LABELNAME=#{label}\r\n"
if h['personalized'] == 'true'
img = h['rendered_file'].split('/').last
str << "FIELD 001=#{label_prefix}\\personalized_labels\\#{img}\r\n"
else
sku, breed, variant = h['item_number'].split("-")
str << "FIELD 001=#{label_prefix}\\hb_labels\\#{breed}\\#{h['item_number']}.pdf\r\n"
end
str << "LABELQUANTITY=#{qty}\r\n"
str << "PRINTER=#{p.url}\r\n\r\n"
######################################
logs << Log.new(timestamp: DateTime.now,
loggable_type: 'Printer',
loggable_id: p.id,
event: :label_printed,
data1: h['item_number'],
data2: qty,
data3: p.name,
ip_address: request.remote_ip,
user_id: session[:user_id])
end
# handle nothing to print
if label_count == 0
flash[:error] = "No labels specified for printing."
return redirect_to :back
end
# puts str
# flash[:error] = "Testing short circuit."
# return redirect_to :back
# SCP file over to server
tmp_file = "/tmp/" + Time.now.strftime("%Y-%m-%d-%H%M%S") + ".acf"
File.write(tmp_file, str)
# example scp://user:pass@server1.mydomain.com:/home/kiaro/monitor/
uri = URI(Setting.get(:kiaro, "Print Job URI"))
begin
Net::SCP.upload!(uri.host, uri.user, tmp_file, uri.path, :ssh => { :password => uri.password, :port => uri.port || 22 })
flash[:success] = "#{label_count} labels submitted for printing"
logs.each(&:save)
Log.create(timestamp: DateTime.now, loggable_type: 'Printer', loggable_id: p.id, event: :job_submitted,
data1: label, data2: label_count, ip_address: request.remote_ip, user_id: session[:user_id])
rescue => e
flash[:error] = e.message
end
File.delete(tmp_file)
redirect_to :back
end
def packing_slip_batch
if params[:shipment_id].blank?
flash[:error] = "No shipments selected. Please check at least one."
return redirect_to :back
end
urls = ''
token = Cache.setting(Rails.configuration.domain_id, :system, 'Security Token')
website_url = Cache.setting(Rails.configuration.domain_id, :system, 'Website URL')
Shipment.where(id: params[:shipment_id]).each do |s|
digest = Digest::MD5.hexdigest(s.id.to_s + token)
urls += " " + website_url + packing_slip_admin_store_shipment_path(s, digest: digest)
OrderHistory.create(order_id: s.order_id, user_id: session[:user_id], event_type: :packing_slip_print,system_name: 'Rhombus',comment: "Packing slip printed")
end
output_file = "/tmp/#{SecureRandom.hex(6)}.pdf"
ret = system("wkhtmltopdf -q -s Letter #{urls} #{output_file}")
unless File.exists?(output_file)
flash[:error] = "Unable to generate PDF [Debug: #{$?}]"
return redirect_to :back
end
if params[:printer_id].blank?
send_file output_file
else
printer = Printer.find(params[:printer_id])
job = printer.print_file(output_file)
flash[:info] = "Print job submitted to '#{printer.name} [#{printer.location}]'. CUPS JobID: #{job.id}"
redirect_to :back
end
end
def shipping_label_batch
EasyPost.api_key = Cache.setting(Rails.configuration.domain_id, 'Shipping', 'EasyPost API Key')
if params[:printer_id].blank?
file_format = 'pdf'
else
p = Printer.find(params[:printer_id])
file_format = p.preferred_format
mime_type = (file_format == 'pdf' ? 'application/pdf' : 'text/plain')
end
count = 0
begin
Shipment.where(id: params[:shipment_id]).each do |s|
courier_data = JSON.parse(s.courier_data)
ep_shipment = EasyPost::Shipment.retrieve(courier_data['id'])
response = ep_shipment.label({'file_format' => file_format})
# download label
label_url = response[:postage_label]["label_#{file_format}_url"]
label_data = Net::HTTP.get(URI.parse(label_url))
if params[:printer_id].blank?
return send_data label_data, filename: s.to_s + "." + file_format
else
p.print_data(label_data, mime_type)
count += 1
end
end
rescue => e
flash[:error] = e.message
return redirect_to :back
end
flash[:info] = "#{count} label(s) sent to #{p.name} [#{p.location}]"
redirect_to :back
end
def update_status
authorize Shipment.new, :update?
shipments = Shipment.where(id: params[:shipment_id]).where.not(status: params[:status])
shipments.each do |s|
s.update_attribute(:status, params[:status])
if s.status == 'shipped' && s.ship_date.nil?
s.update_attribute(:ship_date, Date.today)
end
end
flash[:info] = "Status of #{shipments.length} shipment(s) updated to '#{params[:status]}'"
redirect_to :back
end
def email_confirmation
shipment = Shipment.find(params[:id])
begin
OrderMailer.order_shipped(shipment.id, session[:user_id]).deliver_now
flash[:info] = "Shipment email sent to '#{shipment.order.notify_email}'"
rescue => e
flash[:info] = e.message
end
redirect_to :back
end
def batch
if params[:shipment_id].blank?
flash[:error] = "No shipments selected. Please check at least one."
return redirect_to :back
end
@shipments = Shipment.where(id: params[:shipment_id])
@batch = Shipment.new(ship_date: Date.today)
# Check to make sure that the batch contains shipments with identical contents
if @shipments.collect(&:items_hash).uniq.length > 1
flash.now[:warning] = "Selected batch contains more that one configuration of shipment."
#return redirect_to :back
end
# try to autopopulate fields
# if identical shipment was recentely shipped with same contents, set box size and weight
s = @shipments[0].similar_shipment
unless s.nil?
@batch = s.dup
@batch.ship_date = Date.today
end
end
def auto_batch
@shipments = Shipment.where(status: :pending, items_hash: params[:items_hash])
@batch = Shipment.new(ship_date: Date.today, items_hash: params[:items_hash])
# try to autopopulate fields
# if identical shipment was recentely shipped with same contents, set box size and weight
s = @shipments[0].similar_shipment
unless s.nil?
@batch = s.dup
@batch.ship_date = Date.today
end
render 'batch'
end
def scan
@shipment = Shipment.includes(:items, [items: :product], [items: :affiliate]).find(params[:id])
end
def verify_scan
@shipment = Shipment.includes(:items, [items: :product], [items: :affiliate]).find(params[:id])
scan_list = params["upc_list"].split("\n").map { |x| x.chomp! }
@scans = {}
scan_list.each do |scan|
@scans[scan] = 0 if @scans[scan].nil?
@scans[scan] += 1
end
render 'scan'
end
def create_inventory_transaction
@shipment = Shipment.includes(:items, [items: :product]).find(params[:id])
begin
tran = @shipment.new_inventory_transaction
tran.shipment_id = @shipment.id
tran.save!
rescue => e
flash[:error] = e.message
end
redirect_to :back
end
private
def shipment_params
params.require(:shipment).permit!
end
def sort_column
params[:sort] || "store_shipments.id"
end
def sort_direction
%w[asc desc].include?(params[:direction]) ? params[:direction] : "desc"
end
end
inventory updates
require 'countries'
require 'base64'
require 'socket'
require 'easypost'
require 'net/http'
require 'net/scp'
require 'uri'
class Admin::Store::ShipmentsController < Admin::BaseController
skip_before_filter :verify_authenticity_token, only: :label_print
def index
authorize Shipment.new
q = params[:q]
s = Shipment.includes(:order, :items, :inventory_transaction, [items: :order_item]).order(sort_column + " " + sort_direction)
s = s.where("recipient_name LIKE '%#{q}%' OR recipient_company LIKE '%#{q}%' OR recipient_city LIKE '%#{q}%'") unless q.blank?
s = s.where("store_orders.user_id = ?", params[:user_id]) unless params[:user_id].blank?
s = s.where("store_orders.affiliate_id = ?", params[:affiliate_id]) unless params[:affiliate_id].blank?
s = s.where(carrier: params[:carrier]) unless params[:carrier].blank?
s = s.where(ship_date: params[:ship_date]) unless params[:ship_date].blank?
s = s.where(status: params[:status]) unless params[:status].blank?
s = s.where(manifest_id: params[:manifest_id]) unless params[:manifest_id].blank?
respond_to do |format|
format.html { @shipments = s.paginate(page: params[:page], per_page: @per_page) }
format.csv { send_data Shipment.to_csv(s, skip_cols: ['label_data']) }
end
end
def new
# check if order id was passed in?
return redirect_to action: 'choose_order' if params[:order_id].nil?
@order = Order.find(params[:order_id])
if @order.nil?
flash[:notice] = "Order #{params[:order_id]} was not found."
return redirect_to action: 'choose_order'
end
begin
@shipment = authorize @order.create_shipment(session[:user_id], false)
rescue => e
flash[:error] = e.message
return redirect_to :back
end
@shipment.invoice_amount = @order.total if @shipment.sequence == 1
render 'edit'
end
def create
@shipment = authorize Shipment.new(shipment_params)
@shipment.fulfilled_by_id = current_user.id
if @shipment.save
# create order history item
OrderHistory.create order_id: @shipment.order_id, user_id: current_user.id, event_type: :shipment_created,
system_name: 'Rhombus', identifier: @shipment.id, comment: "shipment created: #{@shipment}"
flash[:notice] = "Shipment #{@shipment.order_id}-#{@shipment.sequence} was successfully created."
redirect_to action: 'show', id: @shipment.id
else
render 'edit'
end
end
def show
@shipment = authorize Shipment.includes(items: { order_item: {product: :brand} }).find(params[:id])
end
def edit
@shipment = authorize Shipment.includes(:items, [items: :order_item]).find(params[:id])
# add any items that were added to Order later and not currently present in Shipment
@shipment.order.items.each do |oi|
unless @shipment.items.any? { |x| x.order_item_id == oi.id }
@shipment.items.build(order_item_id: oi.id, quantity: 0)
end
end
end
def update
@shipment = authorize Shipment.find(params[:id])
@shipment.fulfilled_by_id = current_user.id
if @shipment.update(shipment_params)
flash[:notice] = "Shipment #{@shipment.order_id}-#{@shipment.sequence} was updated."
redirect_to action: 'show', id: @shipment.id
else
render 'edit'
end
end
def destroy
@shipment = authorize Shipment.find(params[:id])
@shipment.destroy
redirect_to :back
end
def packing_slip
@shipment = Shipment.includes(:items, [items: :order_item]).find(params[:id])
render 'packing_slip', layout: false
end
def invoice
@shipment = Shipment.find(params[:id])
render 'invoice', layout: false
end
def email_invoice
@shipment = Shipment.find(params[:id])
SendInvoiceJob.perform_later(@shipment.id, session[:user_id])
flash[:success] = "Invoice was emailed to #{@shipment.order.notify_email}"
redirect_to :back
end
def choose_order
end
def label_image
shipment = Shipment.find(params[:id])
send_data shipment.label_data, type: shipment.label_format
end
def label
return render text: :ok
# used background processing for printing to thermal printer as it can take a few seconds
if ['epl2','zpl'].include?(params[:format])
ShippingLabelJob.perform_later(session[:user_id], params[:id], params[:format])
flash[:info] = "Shipping label dispatched to printer"
return redirect_to :back
end
# requested a PNG probably
shipment = Shipment.find(params[:id])
courier_data = JSON.parse(shipment.courier_data)
begin
EasyPost.api_key = Cache.setting(shipment.order.domain_id, 'Shipping', 'EasyPost API Key')
ep_shipment = EasyPost::Shipment.retrieve(courier_data['id'])
response = ep_shipment.label({'file_format' => params[:format]})
# download label
label_url = response[:postage_label]["label_#{params[:format]}_url"]
label_data = Net::HTTP.get(URI.parse(label_url))
rescue => e
flash[:error] = "Error downloading shipping label: " + e.message
return redirect_to :back
end
send_data label_data, filename: shipment.to_s + "." + params[:format]
end
def void_label
shipment = Shipment.find(params[:id])
EasyPost.api_key = Cache.setting(shipment.order.domain_id, 'Shipping', 'EasyPost API Key')
courier_data = JSON.parse(shipment.courier_data)
begin
ep_shipment = EasyPost::Shipment.retrieve(courier_data['id'])
response = ep_shipment.refund
flash[:info] = "Refund status: #{response[:refund_status]} - / - Tracking: #{response[:tracking_code]} - / - Confirmation: #{response[:confirmation_number] || "n/a"}"
shipment.update_attribute(:status, 'void') if response[:refund_status] == 'submitted'
rescue => e
flash[:error] = e.message
end
redirect_to :back
end
def product_labels_pending
@shipments = Shipment.where(status: :pending)
sql = <<-EOF
select si.shipment_id, si.id as shipment_item_id, sheet.name as label, oi.item_number, p.name, p.option_title,
sum(si.quantity) as quantity, uploaded_file, upload_file_preview, rendered_file
from store_shipment_items si
join store_order_items oi on oi.id = si.order_item_id
join store_products p on p.id = oi.product_id
join store_label_sheets sheet on sheet.id = p.label_sheet_id
where shipment_id in (#{@shipments.map(&:id).join(",")})
and si.quantity > 0
group by oi.item_number, uploaded_file
order by sheet.name, oi.item_number;
EOF
@items = []
ActiveRecord::Base.connection.execute(sql).each(as: :hash) { |row| @items << row.with_indifferent_access }
render 'product_labels'
end
def product_labels
if params[:shipment_id].blank?
flash[:error] = "No shipments selected. Please check at least one."
return redirect_to :back
end
@shipments = Shipment.where(id: params[:shipment_id])
sql = <<-EOF
select si.shipment_id, si.id as shipment_item_id, sheet.name as label, oi.item_number, p.name, p.option_title,
sum(si.quantity) as quantity, uploaded_file, upload_file_preview, rendered_file
from store_shipment_items si
join store_order_items oi on oi.id = si.order_item_id
join store_products p on p.id = oi.product_id
join store_label_sheets sheet on sheet.id = p.label_sheet_id
where shipment_id in (#{params[:shipment_id].join(",")})
and si.quantity > 0
group by oi.item_number, uploaded_file
order by sheet.name, oi.item_number;
EOF
@items = []
ActiveRecord::Base.connection.execute(sql).each(as: :hash) { |row| @items << row.with_indifferent_access }
end
# Print one size of labels
def label_print
# First find the printer
p = Printer.find_by(id: params[:printer_id])
if p.nil?
flash[:error] = "Printer not found"
return redirect_to :back
end
# See which label size this job is for (poor English here)
label_prefix = Setting.get(:kiaro, "Label Prefix")
label = params[:label].split(" ", 2)[1] + ".alf"
label_count = 0
str = ""
logs = []
# LOOP through the items selected
params[:print_items].each do |h|
# puts h.inspect
next if h['quantity'] == "0"
next if h['personalized'] == 'true' && h['rendered_file'].blank?
qty = h['quantity'].to_i
label_count += qty
### QUICKCOMMAND LABEL SPECS #########
str << "LABELNAME=#{label}\r\n"
if h['personalized'] == 'true'
img = h['rendered_file'].split('/').last
str << "FIELD 001=#{label_prefix}\\personalized_labels\\#{img}\r\n"
else
sku, breed, variant = h['item_number'].split("-")
str << "FIELD 001=#{label_prefix}\\hb_labels\\#{breed}\\#{h['item_number']}.pdf\r\n"
end
str << "LABELQUANTITY=#{qty}\r\n"
str << "PRINTER=#{p.url}\r\n\r\n"
######################################
logs << Log.new(timestamp: DateTime.now,
loggable_type: 'Printer',
loggable_id: p.id,
event: :label_printed,
data1: h['item_number'],
data2: qty,
data3: p.name,
ip_address: request.remote_ip,
user_id: session[:user_id])
end
# handle nothing to print
if label_count == 0
flash[:error] = "No labels specified for printing."
return redirect_to :back
end
# puts str
# flash[:error] = "Testing short circuit."
# return redirect_to :back
# SCP file over to server
tmp_file = "/tmp/" + Time.now.strftime("%Y-%m-%d-%H%M%S") + ".acf"
File.write(tmp_file, str)
# example scp://user:pass@server1.mydomain.com:/home/kiaro/monitor/
uri = URI(Setting.get(:kiaro, "Print Job URI"))
begin
Net::SCP.upload!(uri.host, uri.user, tmp_file, uri.path, :ssh => { :password => uri.password, :port => uri.port || 22 })
flash[:success] = "#{label_count} labels submitted for printing"
logs.each(&:save)
Log.create(timestamp: DateTime.now, loggable_type: 'Printer', loggable_id: p.id, event: :job_submitted,
data1: label, data2: label_count, ip_address: request.remote_ip, user_id: session[:user_id])
rescue => e
flash[:error] = e.message
end
File.delete(tmp_file)
redirect_to :back
end
def packing_slip_batch
if params[:shipment_id].blank?
flash[:error] = "No shipments selected. Please check at least one."
return redirect_to :back
end
urls = ''
token = Cache.setting(Rails.configuration.domain_id, :system, 'Security Token')
website_url = Cache.setting(Rails.configuration.domain_id, :system, 'Website URL')
Shipment.where(id: params[:shipment_id]).each do |s|
digest = Digest::MD5.hexdigest(s.id.to_s + token)
urls += " " + website_url + packing_slip_admin_store_shipment_path(s, digest: digest)
OrderHistory.create(order_id: s.order_id, user_id: session[:user_id], event_type: :packing_slip_print,system_name: 'Rhombus',comment: "Packing slip printed")
end
output_file = "/tmp/#{SecureRandom.hex(6)}.pdf"
ret = system("wkhtmltopdf -q -s Letter #{urls} #{output_file}")
unless File.exists?(output_file)
flash[:error] = "Unable to generate PDF [Debug: #{$?}]"
return redirect_to :back
end
if params[:printer_id].blank?
send_file output_file
else
printer = Printer.find(params[:printer_id])
job = printer.print_file(output_file)
flash[:info] = "Print job submitted to '#{printer.name} [#{printer.location}]'. CUPS JobID: #{job.id}"
redirect_to :back
end
end
def shipping_label_batch
EasyPost.api_key = Cache.setting(Rails.configuration.domain_id, 'Shipping', 'EasyPost API Key')
if params[:printer_id].blank?
file_format = 'pdf'
else
p = Printer.find(params[:printer_id])
file_format = p.preferred_format
mime_type = (file_format == 'pdf' ? 'application/pdf' : 'text/plain')
end
count = 0
begin
Shipment.where(id: params[:shipment_id]).each do |s|
courier_data = JSON.parse(s.courier_data)
ep_shipment = EasyPost::Shipment.retrieve(courier_data['id'])
response = ep_shipment.label({'file_format' => file_format})
# download label
label_url = response[:postage_label]["label_#{file_format}_url"]
label_data = Net::HTTP.get(URI.parse(label_url))
if params[:printer_id].blank?
return send_data label_data, filename: s.to_s + "." + file_format
else
p.print_data(label_data, mime_type)
count += 1
end
end
rescue => e
flash[:error] = e.message
return redirect_to :back
end
flash[:info] = "#{count} label(s) sent to #{p.name} [#{p.location}]"
redirect_to :back
end
def update_status
authorize Shipment.new, :update?
shipments = Shipment.where(id: params[:shipment_id]).where.not(status: params[:status])
shipments.each do |s|
s.update_attribute(:status, params[:status])
if s.status == 'shipped' && s.ship_date.nil?
s.update_attribute(:ship_date, Date.today)
end
end
flash[:info] = "Status of #{shipments.length} shipment(s) updated to '#{params[:status]}'"
redirect_to :back
end
def email_confirmation
shipment = Shipment.find(params[:id])
begin
OrderMailer.order_shipped(shipment.id, session[:user_id]).deliver_now
flash[:info] = "Shipment email sent to '#{shipment.order.notify_email}'"
rescue => e
flash[:info] = e.message
end
redirect_to :back
end
def batch
if params[:shipment_id].blank?
flash[:error] = "No shipments selected. Please check at least one."
return redirect_to :back
end
@shipments = Shipment.where(id: params[:shipment_id])
@batch = Shipment.new(ship_date: Date.today)
# Check to make sure that the batch contains shipments with identical contents
if @shipments.collect(&:items_hash).uniq.length > 1
flash.now[:warning] = "Selected batch contains more that one configuration of shipment."
#return redirect_to :back
end
# try to autopopulate fields
# if identical shipment was recentely shipped with same contents, set box size and weight
s = @shipments[0].similar_shipment
unless s.nil?
@batch = s.dup
@batch.ship_date = Date.today
end
end
def auto_batch
@shipments = Shipment.where(status: :pending, items_hash: params[:items_hash])
@batch = Shipment.new(ship_date: Date.today, items_hash: params[:items_hash])
# try to autopopulate fields
# if identical shipment was recentely shipped with same contents, set box size and weight
s = @shipments[0].similar_shipment
unless s.nil?
@batch = s.dup
@batch.ship_date = Date.today
end
render 'batch'
end
def scan
@shipment = Shipment.includes(:items, [items: :product], [items: :affiliate]).find(params[:id])
end
def verify_scan
@shipment = Shipment.includes(:items, [items: :product], [items: :affiliate]).find(params[:id])
scan_list = params["upc_list"].split("\n").map { |x| x.chomp! }
@scans = {}
scan_list.each do |scan|
@scans[scan] = 0 if @scans[scan].nil?
@scans[scan] += 1
end
render 'scan'
end
def create_inventory_transaction
@shipment = Shipment.includes(:items).find(params[:id])
begin
tran = @shipment.new_inventory_transaction
tran.external_id = @shipment.uuid
tran.responsible_party = current_user.name
tran.save!
rescue => e
flash[:error] = e.message
end
redirect_to :back
end
private
def shipment_params
params.require(:shipment).permit!
end
def sort_column
params[:sort] || "store_shipments.id"
end
def sort_direction
%w[asc desc].include?(params[:direction]) ? params[:direction] : "desc"
end
end
|
#
# Copyright 2015-2016, Noah Kantrowitz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'poise_boiler/version'
Gem::Specification.new do |spec|
spec.name = 'poise-boiler'
spec.version = PoiseBoiler::VERSION
spec.authors = ['Noah Kantrowitz']
spec.email = %w{noah@coderanger.net}
spec.description = 'Boilerplate-reduction helpers for Poise/Halite-style gemss.'
spec.summary = spec.description
spec.homepage = 'https://github.com/poise/poise-boiler'
spec.license = 'Apache 2.0'
spec.metadata['halite_ignore'] = 'true'
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = %w{lib}
# Development gems
spec.add_dependency 'bundler' # Used for Bundler.load_gemspec
spec.add_dependency 'rake', '>= 10.4', '< 12.0'
spec.add_dependency 'travis', '~> 1.8', '>= 1.8.1'
spec.add_dependency 'yard', '~> 0.8'
spec.add_dependency 'yard-classmethods', '~> 1.0'
spec.add_dependency 'halite', '~> 1.2' # This is a circular dependency
spec.add_dependency 'mixlib-shellout', '>= 1.4', '< 3.0' # Chef 11 means shellout 1.4 :-(
spec.add_dependency 'pry'
spec.add_dependency 'pry-byebug'
spec.add_dependency 'git', '~> 1.2'
# Test gems
spec.add_dependency 'rspec', '~> 3.2'
spec.add_dependency 'rspec-its', '~> 1.2'
spec.add_dependency 'chefspec', '~> 5.0'
spec.add_dependency 'fuubar', '~> 2.0'
spec.add_dependency 'simplecov', '~> 0.9'
spec.add_dependency 'foodcritic', '~> 7.0'
# Integration gems
# https://github.com/test-kitchen/test-kitchen/issues/922
spec.add_dependency 'test-kitchen', '~> 1.7', '>= 1.7.1'
spec.add_dependency 'kitchen-vagrant'
spec.add_dependency 'vagrant-wrapper'
spec.add_dependency 'kitchen-docker', '>= 2.6.0.rc.0'
spec.add_dependency 'kitchen-sync', '~> 2.1'
spec.add_dependency 'poise-profiler', '~> 1.0'
# Windows integration gems
spec.add_dependency 'kitchen-ec2', '~> 1.0'
spec.add_dependency 'winrm', '~> 2.0'
spec.add_dependency 'winrm-fs', '~> 1.0'
# Travis gems
spec.add_dependency 'codeclimate-test-reporter', '~> 0.4'
spec.add_dependency 'codecov', '~> 0.0', '>= 0.0.2'
# Development dependencies (yo dawg)
spec.add_development_dependency 'rspec-command', '~> 1.0'
spec.add_development_dependency 'kitchen-rackspace', '~> 0.20'
end
bump foodcritic
#
# Copyright 2015-2016, Noah Kantrowitz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'poise_boiler/version'
Gem::Specification.new do |spec|
spec.name = 'poise-boiler'
spec.version = PoiseBoiler::VERSION
spec.authors = ['Noah Kantrowitz']
spec.email = %w{noah@coderanger.net}
spec.description = 'Boilerplate-reduction helpers for Poise/Halite-style gemss.'
spec.summary = spec.description
spec.homepage = 'https://github.com/poise/poise-boiler'
spec.license = 'Apache 2.0'
spec.metadata['halite_ignore'] = 'true'
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = %w{lib}
# Development gems
spec.add_dependency 'bundler' # Used for Bundler.load_gemspec
spec.add_dependency 'rake', '>= 10.4', '< 12.0'
spec.add_dependency 'travis', '~> 1.8', '>= 1.8.1'
spec.add_dependency 'yard', '~> 0.8'
spec.add_dependency 'yard-classmethods', '~> 1.0'
spec.add_dependency 'halite', '~> 1.2' # This is a circular dependency
spec.add_dependency 'mixlib-shellout', '>= 1.4', '< 3.0' # Chef 11 means shellout 1.4 :-(
spec.add_dependency 'pry'
spec.add_dependency 'pry-byebug'
spec.add_dependency 'git', '~> 1.2'
# Test gems
spec.add_dependency 'rspec', '~> 3.2'
spec.add_dependency 'rspec-its', '~> 1.2'
spec.add_dependency 'chefspec', '~> 5.0'
spec.add_dependency 'fuubar', '~> 2.0'
spec.add_dependency 'simplecov', '~> 0.9'
spec.add_dependency 'foodcritic', '~> 8.0'
# Integration gems
# https://github.com/test-kitchen/test-kitchen/issues/922
spec.add_dependency 'test-kitchen', '~> 1.7', '>= 1.7.1'
spec.add_dependency 'kitchen-vagrant'
spec.add_dependency 'vagrant-wrapper'
spec.add_dependency 'kitchen-docker', '>= 2.6.0.rc.0'
spec.add_dependency 'kitchen-sync', '~> 2.1'
spec.add_dependency 'poise-profiler', '~> 1.0'
# Windows integration gems
spec.add_dependency 'kitchen-ec2', '~> 1.0'
spec.add_dependency 'winrm', '~> 2.0'
spec.add_dependency 'winrm-fs', '~> 1.0'
# Travis gems
spec.add_dependency 'codeclimate-test-reporter', '~> 0.4'
spec.add_dependency 'codecov', '~> 0.0', '>= 0.0.2'
# Development dependencies (yo dawg)
spec.add_development_dependency 'rspec-command', '~> 1.0'
spec.add_development_dependency 'kitchen-rackspace', '~> 0.20'
end
|
require 'styles'
require 'line'
require 'effects'
require 'view'
require 'keys'
require 'clipboard'
class Notes
extend ElMixin
def self.menu
puts "- .help_wiki_format"
end
def self.block regex="^| "
left, after_header, right = View.block_positions regex
buffer_substring(after_header, right)
end
def self.expand_block up=false
# If nothing hidden, hide all but current
if point_min == 1 && (buffer_size + 1 == point_max)
left, after_header, right = View.block_positions "^| "
narrow_to_region left, right
return
end
# Otherwise, expand all, go to next heading, hide all but current
widen
Notes.to_block
left, after_header, right = View.block_positions "^| "
narrow_to_region left, right
end
def self.archive
block = get_block
block.archive
end
def self.show_text
block = get_block
block.show_text
end
def self.hide_text
block = get_block
block.hide_text
end
def self.to_block up=false
if up
(Keys.prefix || 1).times do
Line.to_left
re_search_backward "^| "
end
else
(Keys.prefix || 1).times do
Line.next if Line.matches(/^\| /)
re_search_forward "^| "
Line.to_left
end
end
end
def self.move_block up=false
block = get_block
block.blink
block.delete_content
if up
(Keys.prefix || 1).times do
re_search_backward "^| ", nil, 1
end
insert block.content
search_backward_regexp "^| "
else
re_search_forward "^| "
(Keys.prefix || 1).times do
re_search_forward "^| ", nil, 1
end
beginning_of_line
insert block.content
search_backward_regexp "^| "
end
moved_block = get_block
moved_block.blink
end
def self.insert_heading
Line.start
times = Keys.prefix_u? ? 1 : (Keys.prefix || 1)
times.times { insert "|" }
insert " "
open_line(4) if Keys.prefix_u? # If U create blank lines.
#PauseMeansSpace.go
# Exit control lock mode if on
#ControlLock.disable
end
def self.cut_block no_clipboard=false
block = get_block
block.blink
unless no_clipboard
Clipboard.set("0", block.content)
end
block.delete_content
end
def self.move_block_to_top no_clipboard=false
block = get_block
block.blink
block.delete_content
beginning_of_buffer
insert block.content
goto_line 2
moved_block = get_block
moved_block.blink
end
def self.keys
# Get reference to map if already there (don't mess with existing buffers)
elvar.notes_mode_map = make_sparse_keymap unless boundp :notes_mode_map
Keys.CC(:notes_mode_map) { Agenda.quick_add_line }
Keys.CA(:notes_mode_map) { Notes.archive }
Keys.CO(:notes_mode_map) { Notes.show_text }
Keys.CM(:notes_mode_map) { Notes.hide_text }
Keys.CS(:notes_mode_map) { $el.insert Time.now.strftime("- %Y-%m-%d %I:%M%p: ").downcase.sub(' 0', ' ') }
Keys.CT(:notes_mode_map) { Notes.move_block_to_top }
Keys.CD(:notes_mode_map) { Notes.cut_block true }
Keys.CK(:notes_mode_map) { Notes.cut_block }
Keys.CH(:notes_mode_map) { Notes.insert_heading } # Insert ||... etc. heading
Keys.CN(:notes_mode_map) { Notes.to_block } # Go to block after next block
Keys.CP(:notes_mode_map) { Notes.to_block true } # Go to block before next block
Keys.CE(:notes_mode_map) { Notes.expand_block } # Show just block
Keys.CF(:notes_mode_map) { Notes.move_block } # Move block down to after next block
Keys.CB(:notes_mode_map) { Notes.move_block true } # Move block up to before next block
define_key :notes_mode_map, kbd("C-\\") do
widen; Hide.show
Hide.hide_unless /^\| /
recenter -2
Hide.search
end
# Make right-click launch a line
define_key(:notes_mode_map, kbd("<mouse-3>"), :notes_mouse_launch)
define_key(:notes_mode_map, kbd("<double-mouse-1>"), :notes_mouse_double_click)
define_key(:notes_mode_map, kbd("<mouse-1>"), :notes_mouse_toggle)
end
def self.define_styles
# - foo (r): <here>
Styles.define :notes_light_gray,
:fg => "bbb"
# - foo (r): <here>
Styles.define :variable,
:face => 'verdana' #, :size => "+2"
# - foo (r): <here>
Styles.define :notes_label_parens,
:fg => "bbb",
:size => "-2",
:face => 'arial'
# |...
#h1_size = "+2"
Styles.define :notes_h1,
:face => 'arial', :size => "+2",
:fg => 'ffffff', :bg => "666699",
:bold => true
Styles.define :notes_h1_pipe,
:face => 'arial', :size => "+2",
:fg => '9999cc', :bg => "666699",
:bold => true
Styles.define :notes_h1i,
:face => 'arial', :size => "+2",
:fg => 'ffffff', :bg => "66aa66",
:bold => true
Styles.define :notes_h1i_pipe,
:face => 'arial', :size => "+2",
:fg => 'aad2aa', :bg => "66aa66",
:bold => true
Styles.define :notes_h1e,
:face => 'arial', :size => "+2",
:fg => 'ffffff', :bg => "cccc66",
:bold => true
Styles.define :notes_h1e_pipe,
:face => 'arial', :size => "+2",
:fg => 'f3f399', :bg => "cccc66",
:bold => true
Styles.define :notes_h1c,
:face => 'arial', :size => "+2",
:fg => 'ffffff', :bg => "996699",
:bold => true
Styles.define :notes_h1c_pipe,
:face => 'arial', :size => "+2",
:fg => 'bb99bb', :bg => "996699",
:bold => true
Styles.define :notes_h1s,
:face => 'arial', :size => "+2",
:fg => 'ffffff', :bg => "449688",
:bold => true
Styles.define :notes_h1s_pipe,
:face => 'arial', :size => "+2",
:fg => '77c6aa', :bg => "449688",
:bold => true
Styles.define :notes_h1n,
:face => 'arial', :size => "+2",
:fg => 'ffffff', :bg => "eeaa33",
:bold => true
Styles.define :notes_h1n_pipe,
:face => 'arial', :size => "+2",
:fg => 'ffdd88', :bg => "eeaa33",
:bold => true
# ||...
Styles.define :notes_h2,
:face => 'arial', :size => "0",
:fg => '8888bb', :bg => "e0e0f2",
# :fg => 'ffffff', :bg => "ddddee",
# :fg => 'ffffff', :bg => "aaaacc",
:bold => true
Styles.define :notes_h2_pipe,
:face => 'arial', :size => "0",
:fg => 'bbbbdd', :bg => "e0e0f2",
# :fg => 'ddddf0', :bg => "ddddee",
# :fg => 'ddddf0', :bg => "aaaacc",
:bold => true
# |||...
Styles.define :notes_h3,
:face => 'arial', :size => "-1",
:fg => '9999ee',#, :bg => "9999cc",
:bold => true
Styles.define :notes_h3_pipe,
:face => 'arial', :size => "-1",
:fg => 'ddddf0'
# ||||...
Styles.define :notes_h4,
:face => 'arial', :size => "-2",
:fg => 'bbbbee',
:bold => true
Styles.define :notes_h4_pipe,
:face => 'arial', :size => "-2",
:fg => 'ddddf0'
# Labels, emphasis
Styles.define :notes_label,
:face => 'arial black', :size => "0", # Mac
#:face => 'courier', :size => "0", # Mac
:fg => "dd7700", :bold => true
Styles.define :notes_bullet_parens,
:face => 'arial', :size => "-2",
:fg => "ee7700", :bold => true
Styles.define :notes_exclamation, # Green bold text
:face => 'arial black', :size => "0",
:fg => "55aa22", :bold => true
# Strikethrough
Styles.define(:strike, :strike => true)
# - <here> (r): foo
Styles.define :notes_label_link,
:face => 'verdana', :size => "-1",
:fg => "66f",
:bold => true, :underline => true
Styles.define :notes_g, :fg => "00B"
Styles.define :notes_blue, :fg => "46f"
Styles.define :notes_red, :fg => "c00"
Styles.define :notes_yellow, :fg => "CC0"
Styles.define :notes_green, :fg => "0C0"
if Styles.inverse
Styles.define :notes_h1,
:fg => 'ffffff', :bg => "333366"
Styles.define :notes_h1_pipe,
:fg => '7777aa', :bg => "333366"
Styles.define :notes_h2,
:bg => "181833"
Styles.define :notes_h2_pipe,
:fg => '333366', :bg => "181833"
Styles.define :notes_h1e,
:bg => "666633"
Styles.define :notes_h1e_pipe,
:fg => 'aaaa77', :bg => "666633"
end
end
def self.apply_styles
# Don't format quotes (it overrides the following formatting)
Styles.clear
# |... lines
Styles.apply("^\\(| \\)\\(.*\n\\)", nil, :notes_h1_pipe, :notes_h1)
Styles.apply("^\\(| .+?: \\)\\(.+\n\\)", nil, :notes_h1_pipe, :notes_h1)
# | i ... lines
Styles.apply("^\\(| i \\)\\(.+\n\\)", nil, :notes_h1i_pipe, :notes_h1i)
Styles.apply("^\\(| i .+?: \\)\\(.+\n\\)", nil, :notes_h1i_pipe, :notes_h1i)
# | e ... lines
Styles.apply("^\\(| e \\)\\(.+\n\\)", nil, :notes_h1e_pipe, :notes_h1e)
Styles.apply("^\\(| e .+?: \\)\\(.+\n\\)", nil, :notes_h1e_pipe, :notes_h1e)
# | c ... lines
Styles.apply("^\\(| c \\)\\(.+\n\\)", nil, :notes_h1c_pipe, :notes_h1c)
Styles.apply("^\\(| c .+?: \\)\\(.+\n\\)", nil, :notes_h1c_pipe, :notes_h1c)
# | s ... lines
Styles.apply("^\\(| s \\)\\(.+\n\\)", nil, :notes_h1s_pipe, :notes_h1s)
Styles.apply("^\\(| s .+?: \\)\\(.+\n\\)", nil, :notes_h1s_pipe, :notes_h1s)
# | n ... lines
Styles.apply("^\\(| n \\)\\(.+\n\\)", nil, :notes_h1n_pipe, :notes_h1n)
Styles.apply("^\\(| n .+?: \\)\\(.+\n\\)", nil, :notes_h1n_pipe, :notes_h1n)
# ||... lines
Styles.apply("^\\(|| \\)\\(.*\n\\)", nil, :notes_h2_pipe, :notes_h2)
Styles.apply("^\\(|| .+?: \\)\\(.+\n\\)", nil, :notes_h2_pipe, :notes_h2)
# |||... lines
Styles.apply("^\\(||| \\)\\(.+\n\\)", nil, :notes_h3_pipe, :notes_h3)
# ||||... lines
Styles.apply("^\\(|||| ?\\)\\(.+\n\\)", nil, :notes_h4_pipe, :notes_h4)
# # ~emphasis~ strings
# Styles.apply("\\(~\\)\\(.+?\\)\\(~\\)", :notes_label)
# - bullets with labels
Styles.apply("^[ \t]*\\([+-]\\) \\([!#-~ ]+?:\\) ", nil, :ls_bullet, :notes_label)
Styles.apply("^[ \t]*\\([+-]\\) \\([!#-~ ]+?:\\)$", nil, :ls_bullet, :notes_label)
#Styles.apply("^[ \t]*\\(\\+\\)\\( \\)", nil, :ls_bullet, :variable)
Styles.apply("^[ \t]*\\(x\\)\\( \\)\\(.+\\)", nil, :ls_bullet, :variable, :strike)
Styles.apply("^\\([ \t]*\\)\\([+-]\\) \\(.+?:\\) +\\(|.*\n\\)", nil, :default, :ls_bullet, :notes_label, :ls_quote)
Styles.apply("^ +\\(!.*\n\\)", nil, :ls_quote) # ^!... for commands
# - item exclamation! / todo
Styles.apply("^[ \t]*\\(-\\) \\(.+!\\)$", nil, :notes_exclamation, :notes_exclamation)
# - google:
Styles.apply "^ *\\(-\\) \\(g\\)\\(o\\)\\(o\\)\\(g\\)\\(l\\)\\(e:\\) .*", nil, :ls_bullet,
:notes_blue,
:notes_red,
:notes_yellow,
:notes_blue,
:notes_green,
:notes_red
end
# Startup
def self.init
defun(:notes_mouse_launch, :interactive => "e") do |e|
mouse_set_point(e)
# If search in progress
if TreeLs.search_going_or_interrupted and ! Line.blank?
TreeLs.search_going_or_interrupted = false
TreeLs.kill_siblings
end
LineLauncher.launch# :no_search => true
end
defun(:notes_mouse_double_click, :interactive => "e") do |e|
if Line.matches(/\/$/) # If dir, kill siblings first
TreeLs.kill_siblings
end
LineLauncher.launch
end
defun(:notes_mouse_toggle, :interactive => "e") do |e|
mouse_set_point(e)
Notes.mouse_toggle
end
defun(:notes_mode, :interactive => "", :docstring => "Apply notes styles, etc") {# |point|
el4r_lisp_eval "(setq font-lock-defaults '(nil t))"
TreeLs.apply_styles
Notes.apply_styles
use_local_map elvar.notes_mode_map
}
el4r_lisp_eval %q<
(progn
(add-to-list 'auto-mode-alist '("\\\\.notes\\\\'" . notes-mode))
(add-to-list 'auto-mode-alist '("\\\\.xik\\\\'" . notes-mode)))
>
# el4r_lisp_eval %q[(add-to-list 'auto-mode-alist '("\\\\.notes\\\\'" . notes-mode))]
# el4r_lisp_eval %q[(add-to-list 'auto-mode-alist '("\\\\.\\\\'" . notes-mode))]
end
def self.mode
notes_mode
end
def self.enter_label_bullet
Line.to_left
View.insert "- : "
Move.backward 2
end
def self.bullet bullet_text="- "
prefix = Keys.prefix
# If non-blank line
if ! Line.blank? # Line
if Line.matches(/^ *[|+-]/) # If bullet already, just make new line after
# Continue on below
else # If not bullet, make it a bullet
# Get line minus indent, and indent one deeper than previous
line = Line.value(1, :delete=>true).sub(/^ +/, '')
if prefix.is_a? Fixnum # If numeric prefix, indent by n
View.insert((" " * prefix) + "- #{line}")
else
prev_indent = Line.value(0)[/^ */]
View.insert "#{prev_indent} - #{line}"
end
return
end
# Make extra line if none there yet
Line.to_right
View.insert "\n"
end
if prefix.is_a? Fixnum # If numeric prefix, indent by n
View.insert (" " * prefix) + bullet_text
else # Get bullet indent of previous line
prev = Line.value(0)[/^( *)[+-]/, 1]
prev = prev ? " #{prev}#{bullet_text}" : bullet_text
prev.sub!(/^ /, '') if Keys.prefix_u? # Don't indent if U
View.insert prev
end
#ControlLock.disable
end
def self.help_wiki_format
View.to_buffer("*help wiki format*")
View.clear; Notes.mode
View.unindent %q<
| Headings
- syntax: put "| " at beginning
| | Foo
| Bullets
- syntax: put "- " at beginning:
| - foo
- looks like!
- foo
| Bullets with labels
- syntax: put "- " at beginning and colon after label:
| - foo: bar
- looks like!
- foo: bar
| Todo item
- syntax: put "- " at beginning and "!" at end:
| - foo!
- looks like!
- foo!
>
View.to_top
end
def self.mouse_toggle
# If next line is indented more, kill children
# If starts with plus or minus, and on plus or minus, launch
if Line.matches(/^\s*[+-]/) and View.char =~ /[+-]/
plus_or_minus = TreeLs.toggle_plus_and_minus
if plus_or_minus == '+' # If +, expand (launch
if TreeLs.dir? or ! TreeLs.is_tree_ls_path # If on a dir or code_tree
LineLauncher.launch
else # If on a file in a FileTree
TreeLs.enter_lines
end
else # If -, kill under
TreeLs.kill_under
Move.to_line_text_beginning
end
end
end
# returns an instance of BlockNotes representing the block the point is currently in
def self.get_block
left, after_header, right = View.block_positions "^| "
NotesBlock.new(left, after_header, right)
end
private
class NotesBlock
include ElMixin
attr_accessor :left, :after_header, :right
attr_accessor :header, :text
def initialize(left, after_header, right)
@left, @after_header, @right = left, after_header, right
@header = buffer_substring left, after_header
@text = buffer_substring after_header, right
end
def positions
[left, after_header, right]
end
def content
header + text
end
def to_s
content
end
def blink
Effects.blink :left => after_header, :right => right
end
def delete_content
delete_region left, right
end
# initialize an overlay for this notes block
# it has a special hook that updates name to be header always
# this way we can always find the overlay corresponding to header
def show_text
@header_overlay ||= Overlay.find_or_make(left, after_header - 1)
@header_overlay.before_string = ''
@header_overlay.after_string = ''
@body_overlay ||= Overlay.find_or_make(after_header, right)
@body_overlay.invisible = false
end
def hide_text
@header_overlay ||= Overlay.find_or_make(left, after_header - 1)
@header_overlay.before_string = ''
@header_overlay.after_string = ' (more...)'
@body_overlay ||= Overlay.find_or_make(after_header, right)
@body_overlay.invisible = true
end
# cuts the block, and stores it in archive.file.notes
# example: ruby.notes -> archive.ruby.notes
def archive
delete_content
filename = 'archive.' + $el.file_name_nondirectory(buffer_file_name)
timestamp = "--- archived on #{Time.now.strftime('%Y-%m-%d at %H:%M')} --- \n"
append_to_file timestamp, nil, filename
append_to_file content, nil, filename
end
end
end
Notes.define_styles
#Notes.keys # Define local keys
Notes.init
Notes.keys # Define local keys
Notes.rb: refactor h1 styles
require 'styles'
require 'line'
require 'effects'
require 'view'
require 'keys'
require 'clipboard'
class Notes
extend ElMixin
def self.menu
puts "- .help_wiki_format"
end
def self.block regex="^| "
left, after_header, right = View.block_positions regex
buffer_substring(after_header, right)
end
def self.expand_block up=false
# If nothing hidden, hide all but current
if point_min == 1 && (buffer_size + 1 == point_max)
left, after_header, right = View.block_positions "^| "
narrow_to_region left, right
return
end
# Otherwise, expand all, go to next heading, hide all but current
widen
Notes.to_block
left, after_header, right = View.block_positions "^| "
narrow_to_region left, right
end
def self.archive
block = get_block
block.archive
end
def self.show_text
block = get_block
block.show_text
end
def self.hide_text
block = get_block
block.hide_text
end
def self.to_block up=false
if up
(Keys.prefix || 1).times do
Line.to_left
re_search_backward "^| "
end
else
(Keys.prefix || 1).times do
Line.next if Line.matches(/^\| /)
re_search_forward "^| "
Line.to_left
end
end
end
def self.move_block up=false
block = get_block
block.blink
block.delete_content
if up
(Keys.prefix || 1).times do
re_search_backward "^| ", nil, 1
end
insert block.content
search_backward_regexp "^| "
else
re_search_forward "^| "
(Keys.prefix || 1).times do
re_search_forward "^| ", nil, 1
end
beginning_of_line
insert block.content
search_backward_regexp "^| "
end
moved_block = get_block
moved_block.blink
end
def self.insert_heading
Line.start
times = Keys.prefix_u? ? 1 : (Keys.prefix || 1)
times.times { insert "|" }
insert " "
open_line(4) if Keys.prefix_u? # If U create blank lines.
#PauseMeansSpace.go
# Exit control lock mode if on
#ControlLock.disable
end
def self.cut_block no_clipboard=false
block = get_block
block.blink
unless no_clipboard
Clipboard.set("0", block.content)
end
block.delete_content
end
def self.move_block_to_top no_clipboard=false
block = get_block
block.blink
block.delete_content
beginning_of_buffer
insert block.content
goto_line 2
moved_block = get_block
moved_block.blink
end
def self.keys
# Get reference to map if already there (don't mess with existing buffers)
elvar.notes_mode_map = make_sparse_keymap unless boundp :notes_mode_map
Keys.CC(:notes_mode_map) { Agenda.quick_add_line }
Keys.CA(:notes_mode_map) { Notes.archive }
Keys.CO(:notes_mode_map) { Notes.show_text }
Keys.CM(:notes_mode_map) { Notes.hide_text }
Keys.CS(:notes_mode_map) { $el.insert Time.now.strftime("- %Y-%m-%d %I:%M%p: ").downcase.sub(' 0', ' ') }
Keys.CT(:notes_mode_map) { Notes.move_block_to_top }
Keys.CD(:notes_mode_map) { Notes.cut_block true }
Keys.CK(:notes_mode_map) { Notes.cut_block }
Keys.CH(:notes_mode_map) { Notes.insert_heading } # Insert ||... etc. heading
Keys.CN(:notes_mode_map) { Notes.to_block } # Go to block after next block
Keys.CP(:notes_mode_map) { Notes.to_block true } # Go to block before next block
Keys.CE(:notes_mode_map) { Notes.expand_block } # Show just block
Keys.CF(:notes_mode_map) { Notes.move_block } # Move block down to after next block
Keys.CB(:notes_mode_map) { Notes.move_block true } # Move block up to before next block
define_key :notes_mode_map, kbd("C-\\") do
widen; Hide.show
Hide.hide_unless /^\| /
recenter -2
Hide.search
end
# Make right-click launch a line
define_key(:notes_mode_map, kbd("<mouse-3>"), :notes_mouse_launch)
define_key(:notes_mode_map, kbd("<double-mouse-1>"), :notes_mouse_double_click)
define_key(:notes_mode_map, kbd("<mouse-1>"), :notes_mouse_toggle)
end
def self.define_styles
# - foo (r): <here>
Styles.define :notes_light_gray,
:fg => "bbb"
# - foo (r): <here>
Styles.define :variable,
:face => 'verdana' #, :size => "+2"
# - foo (r): <here>
Styles.define :notes_label_parens,
:fg => "bbb",
:size => "-2",
:face => 'arial'
# |...
h1_size = "+2"
styles = { :notes_h1 => "666699",
:notes_h1i => "66aa66",
:notes_h1e => "cccc66",
:notes_h1c => "996699",
:notes_h1s => "449688",
:notes_h1n => "eeaa33"}
styles.each do |k, v|
header = v.gsub(/../) {|c| (c.to_i(16) + "33".to_i(16)).to_s(16)}
Styles.define k, :face => 'arial', :size => h1_size, :fg => 'ffffff', :bg => v, :bold => true
Styles.define "#{k}_pipe".to_sym, :face => 'arial', :size => h1_size, :fg => header, :bg => v, :bold => true
end
# ||...
Styles.define :notes_h2,
:face => 'arial', :size => "0",
:fg => '8888bb', :bg => "e0e0f2",
# :fg => 'ffffff', :bg => "ddddee",
# :fg => 'ffffff', :bg => "aaaacc",
:bold => true
Styles.define :notes_h2_pipe,
:face => 'arial', :size => "0",
:fg => 'bbbbdd', :bg => "e0e0f2",
# :fg => 'ddddf0', :bg => "ddddee",
# :fg => 'ddddf0', :bg => "aaaacc",
:bold => true
# |||...
Styles.define :notes_h3,
:face => 'arial', :size => "-1",
:fg => '9999ee',#, :bg => "9999cc",
:bold => true
Styles.define :notes_h3_pipe,
:face => 'arial', :size => "-1",
:fg => 'ddddf0'
# ||||...
Styles.define :notes_h4,
:face => 'arial', :size => "-2",
:fg => 'bbbbee',
:bold => true
Styles.define :notes_h4_pipe,
:face => 'arial', :size => "-2",
:fg => 'ddddf0'
# Labels, emphasis
Styles.define :notes_label,
:face => 'arial black', :size => "0", # Mac
#:face => 'courier', :size => "0", # Mac
:fg => "dd7700", :bold => true
Styles.define :notes_bullet_parens,
:face => 'arial', :size => "-2",
:fg => "ee7700", :bold => true
Styles.define :notes_exclamation, # Green bold text
:face => 'arial black', :size => "0",
:fg => "55aa22", :bold => true
# Strikethrough
Styles.define(:strike, :strike => true)
# - <here> (r): foo
Styles.define :notes_label_link,
:face => 'verdana', :size => "-1",
:fg => "66f",
:bold => true, :underline => true
Styles.define :notes_g, :fg => "00B"
Styles.define :notes_blue, :fg => "46f"
Styles.define :notes_red, :fg => "c00"
Styles.define :notes_yellow, :fg => "CC0"
Styles.define :notes_green, :fg => "0C0"
if Styles.inverse
Styles.define :notes_h1,
:fg => 'ffffff', :bg => "333366"
Styles.define :notes_h1_pipe,
:fg => '7777aa', :bg => "333366"
Styles.define :notes_h2,
:bg => "181833"
Styles.define :notes_h2_pipe,
:fg => '333366', :bg => "181833"
Styles.define :notes_h1e,
:bg => "666633"
Styles.define :notes_h1e_pipe,
:fg => 'aaaa77', :bg => "666633"
end
end
def self.apply_styles
# Don't format quotes (it overrides the following formatting)
Styles.clear
# |... lines
Styles.apply("^\\(| \\)\\(.*\n\\)", nil, :notes_h1_pipe, :notes_h1)
Styles.apply("^\\(| .+?: \\)\\(.+\n\\)", nil, :notes_h1_pipe, :notes_h1)
# | i ... lines
Styles.apply("^\\(| i \\)\\(.+\n\\)", nil, :notes_h1i_pipe, :notes_h1i)
Styles.apply("^\\(| i .+?: \\)\\(.+\n\\)", nil, :notes_h1i_pipe, :notes_h1i)
# | e ... lines
Styles.apply("^\\(| e \\)\\(.+\n\\)", nil, :notes_h1e_pipe, :notes_h1e)
Styles.apply("^\\(| e .+?: \\)\\(.+\n\\)", nil, :notes_h1e_pipe, :notes_h1e)
# | c ... lines
Styles.apply("^\\(| c \\)\\(.+\n\\)", nil, :notes_h1c_pipe, :notes_h1c)
Styles.apply("^\\(| c .+?: \\)\\(.+\n\\)", nil, :notes_h1c_pipe, :notes_h1c)
# | s ... lines
Styles.apply("^\\(| s \\)\\(.+\n\\)", nil, :notes_h1s_pipe, :notes_h1s)
Styles.apply("^\\(| s .+?: \\)\\(.+\n\\)", nil, :notes_h1s_pipe, :notes_h1s)
# | n ... lines
Styles.apply("^\\(| n \\)\\(.+\n\\)", nil, :notes_h1n_pipe, :notes_h1n)
Styles.apply("^\\(| n .+?: \\)\\(.+\n\\)", nil, :notes_h1n_pipe, :notes_h1n)
# ||... lines
Styles.apply("^\\(|| \\)\\(.*\n\\)", nil, :notes_h2_pipe, :notes_h2)
Styles.apply("^\\(|| .+?: \\)\\(.+\n\\)", nil, :notes_h2_pipe, :notes_h2)
# |||... lines
Styles.apply("^\\(||| \\)\\(.+\n\\)", nil, :notes_h3_pipe, :notes_h3)
# ||||... lines
Styles.apply("^\\(|||| ?\\)\\(.+\n\\)", nil, :notes_h4_pipe, :notes_h4)
# # ~emphasis~ strings
# Styles.apply("\\(~\\)\\(.+?\\)\\(~\\)", :notes_label)
# - bullets with labels
Styles.apply("^[ \t]*\\([+-]\\) \\([!#-~ ]+?:\\) ", nil, :ls_bullet, :notes_label)
Styles.apply("^[ \t]*\\([+-]\\) \\([!#-~ ]+?:\\)$", nil, :ls_bullet, :notes_label)
#Styles.apply("^[ \t]*\\(\\+\\)\\( \\)", nil, :ls_bullet, :variable)
Styles.apply("^[ \t]*\\(x\\)\\( \\)\\(.+\\)", nil, :ls_bullet, :variable, :strike)
Styles.apply("^\\([ \t]*\\)\\([+-]\\) \\(.+?:\\) +\\(|.*\n\\)", nil, :default, :ls_bullet, :notes_label, :ls_quote)
Styles.apply("^ +\\(!.*\n\\)", nil, :ls_quote) # ^!... for commands
# - item exclamation! / todo
Styles.apply("^[ \t]*\\(-\\) \\(.+!\\)$", nil, :notes_exclamation, :notes_exclamation)
# - google:
Styles.apply "^ *\\(-\\) \\(g\\)\\(o\\)\\(o\\)\\(g\\)\\(l\\)\\(e:\\) .*", nil, :ls_bullet,
:notes_blue,
:notes_red,
:notes_yellow,
:notes_blue,
:notes_green,
:notes_red
end
# Startup
def self.init
defun(:notes_mouse_launch, :interactive => "e") do |e|
mouse_set_point(e)
# If search in progress
if TreeLs.search_going_or_interrupted and ! Line.blank?
TreeLs.search_going_or_interrupted = false
TreeLs.kill_siblings
end
LineLauncher.launch# :no_search => true
end
defun(:notes_mouse_double_click, :interactive => "e") do |e|
if Line.matches(/\/$/) # If dir, kill siblings first
TreeLs.kill_siblings
end
LineLauncher.launch
end
defun(:notes_mouse_toggle, :interactive => "e") do |e|
mouse_set_point(e)
Notes.mouse_toggle
end
defun(:notes_mode, :interactive => "", :docstring => "Apply notes styles, etc") {# |point|
el4r_lisp_eval "(setq font-lock-defaults '(nil t))"
TreeLs.apply_styles
Notes.apply_styles
use_local_map elvar.notes_mode_map
}
el4r_lisp_eval %q<
(progn
(add-to-list 'auto-mode-alist '("\\\\.notes\\\\'" . notes-mode))
(add-to-list 'auto-mode-alist '("\\\\.xik\\\\'" . notes-mode)))
>
# el4r_lisp_eval %q[(add-to-list 'auto-mode-alist '("\\\\.notes\\\\'" . notes-mode))]
# el4r_lisp_eval %q[(add-to-list 'auto-mode-alist '("\\\\.\\\\'" . notes-mode))]
end
def self.mode
notes_mode
end
def self.enter_label_bullet
Line.to_left
View.insert "- : "
Move.backward 2
end
def self.bullet bullet_text="- "
prefix = Keys.prefix
# If non-blank line
if ! Line.blank? # Line
if Line.matches(/^ *[|+-]/) # If bullet already, just make new line after
# Continue on below
else # If not bullet, make it a bullet
# Get line minus indent, and indent one deeper than previous
line = Line.value(1, :delete=>true).sub(/^ +/, '')
if prefix.is_a? Fixnum # If numeric prefix, indent by n
View.insert((" " * prefix) + "- #{line}")
else
prev_indent = Line.value(0)[/^ */]
View.insert "#{prev_indent} - #{line}"
end
return
end
# Make extra line if none there yet
Line.to_right
View.insert "\n"
end
if prefix.is_a? Fixnum # If numeric prefix, indent by n
View.insert (" " * prefix) + bullet_text
else # Get bullet indent of previous line
prev = Line.value(0)[/^( *)[+-]/, 1]
prev = prev ? " #{prev}#{bullet_text}" : bullet_text
prev.sub!(/^ /, '') if Keys.prefix_u? # Don't indent if U
View.insert prev
end
#ControlLock.disable
end
def self.help_wiki_format
View.to_buffer("*help wiki format*")
View.clear; Notes.mode
View.unindent %q<
| Headings
- syntax: put "| " at beginning
| | Foo
| Bullets
- syntax: put "- " at beginning:
| - foo
- looks like!
- foo
| Bullets with labels
- syntax: put "- " at beginning and colon after label:
| - foo: bar
- looks like!
- foo: bar
| Todo item
- syntax: put "- " at beginning and "!" at end:
| - foo!
- looks like!
- foo!
>
View.to_top
end
def self.mouse_toggle
# If next line is indented more, kill children
# If starts with plus or minus, and on plus or minus, launch
if Line.matches(/^\s*[+-]/) and View.char =~ /[+-]/
plus_or_minus = TreeLs.toggle_plus_and_minus
if plus_or_minus == '+' # If +, expand (launch
if TreeLs.dir? or ! TreeLs.is_tree_ls_path # If on a dir or code_tree
LineLauncher.launch
else # If on a file in a FileTree
TreeLs.enter_lines
end
else # If -, kill under
TreeLs.kill_under
Move.to_line_text_beginning
end
end
end
# returns an instance of BlockNotes representing the block the point is currently in
def self.get_block
left, after_header, right = View.block_positions "^| "
NotesBlock.new(left, after_header, right)
end
private
class NotesBlock
include ElMixin
attr_accessor :left, :after_header, :right
attr_accessor :header, :text
def initialize(left, after_header, right)
@left, @after_header, @right = left, after_header, right
@header = buffer_substring left, after_header
@text = buffer_substring after_header, right
end
def positions
[left, after_header, right]
end
def content
header + text
end
def to_s
content
end
def blink
Effects.blink :left => after_header, :right => right
end
def delete_content
delete_region left, right
end
# initialize an overlay for this notes block
# it has a special hook that updates name to be header always
# this way we can always find the overlay corresponding to header
def show_text
@header_overlay ||= Overlay.find_or_make(left, after_header - 1)
@header_overlay.before_string = ''
@header_overlay.after_string = ''
@body_overlay ||= Overlay.find_or_make(after_header, right)
@body_overlay.invisible = false
end
def hide_text
@header_overlay ||= Overlay.find_or_make(left, after_header - 1)
@header_overlay.before_string = ''
@header_overlay.after_string = ' (more...)'
@body_overlay ||= Overlay.find_or_make(after_header, right)
@body_overlay.invisible = true
end
# cuts the block, and stores it in archive.file.notes
# example: ruby.notes -> archive.ruby.notes
def archive
delete_content
filename = 'archive.' + $el.file_name_nondirectory(buffer_file_name)
timestamp = "--- archived on #{Time.now.strftime('%Y-%m-%d at %H:%M')} --- \n"
append_to_file timestamp, nil, filename
append_to_file content, nil, filename
end
end
end
Notes.define_styles
#Notes.keys # Define local keys
Notes.init
Notes.keys # Define local keys
|
module GitService
class Commit
attr_reader :commit_oid, :rugged_repo
def initialize(rugged_repo, commit_oid)
@commit_oid = commit_oid
@rugged_repo = rugged_repo
end
def diff(other_ref = parent_oids.first)
Diff.new(rugged_diff(other_ref))
end
def parent_oids
@parent_oids ||= rugged_commit.parent_oids
end
def rugged_commit
@rugged_commit ||= Rugged::Commit.lookup(rugged_repo, commit_oid)
end
def rugged_diff(other_ref = parent_oids.first)
other_commit = Rugged::Commit.lookup(rugged_repo, other_ref)
other_commit.diff(rugged_commit)
end
def full_message
message = "commit #{commit_oid}\n"
message << "Merge: #{parent_oids.join(" ")}\n" if parent_oids.length > 1
message << "Author: #{rugged_commit.author[:name]} <#{rugged_commit.author[:email]}>\n"
message << "AuthorDate: #{rugged_commit.author[:time].to_time.strftime("%c %z")}>\n"
message << "Commit: #{rugged_commit.author[:name]} <#{rugged_commit.author[:email]}>\n"
message << "CommitDate: #{rugged_commit.author[:time].to_time.strftime("%c %z")}>\n"
message << "\n"
rugged_commit.message.each_line { |line| message << " #{line}" }
message << "\n"
diff.file_status.each do |file, stats|
message << " #{file} | #{stats[:additions].to_i + stats[:deletions].to_i} #{"+" * stats[:additions]}#{"-" * stats[:deletions]}\n"
end
message << " #{diff.status_summary}"
message
end
end
end
Prefer String#indent
module GitService
class Commit
attr_reader :commit_oid, :rugged_repo
def initialize(rugged_repo, commit_oid)
@commit_oid = commit_oid
@rugged_repo = rugged_repo
end
def diff(other_ref = parent_oids.first)
Diff.new(rugged_diff(other_ref))
end
def parent_oids
@parent_oids ||= rugged_commit.parent_oids
end
def rugged_commit
@rugged_commit ||= Rugged::Commit.lookup(rugged_repo, commit_oid)
end
def rugged_diff(other_ref = parent_oids.first)
other_commit = Rugged::Commit.lookup(rugged_repo, other_ref)
other_commit.diff(rugged_commit)
end
def full_message
message = "commit #{commit_oid}\n"
message << "Merge: #{parent_oids.join(" ")}\n" if parent_oids.length > 1
message << "Author: #{rugged_commit.author[:name]} <#{rugged_commit.author[:email]}>\n"
message << "AuthorDate: #{rugged_commit.author[:time].to_time.strftime("%c %z")}>\n"
message << "Commit: #{rugged_commit.author[:name]} <#{rugged_commit.author[:email]}>\n"
message << "CommitDate: #{rugged_commit.author[:time].to_time.strftime("%c %z")}>\n"
message << "\n"
message << rugged_commit.message.indent(4)
message << "\n"
diff.file_status.each do |file, stats|
message << " #{file} | #{stats[:additions].to_i + stats[:deletions].to_i} #{"+" * stats[:additions]}#{"-" * stats[:deletions]}\n"
end
message << " #{diff.status_summary}"
message
end
end
end
|
# == License
# Ekylibre - Simple agricultural ERP
# Copyright (C) 2013 Brice Texier
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
require_dependency 'procedo'
module Backend
class InterventionsController < Backend::BaseController
manage_restfully t3e: { procedure_name: '(RECORD.procedure ? RECORD.procedure.human_name : nil)'.c },
continue: %i[nature procedure_name]
respond_to :pdf, :odt, :docx, :xml, :json, :html, :csv
unroll
# params:
# :q Text search
# :cultivable_zone_id
# :campaign_id
# :product_nature_id
# :support_id
def self.list_conditions
conn = Intervention.connection
# , productions: [:name], campaigns: [:name], activities: [:name], products: [:name]
expressions = []
expressions << 'CASE ' + Procedo.selection.map { |l, n| "WHEN procedure_name = #{conn.quote(n)} THEN #{conn.quote(l)}" }.join(' ') + " ELSE '' END"
code = search_conditions({ interventions: %i[state procedure_name number] }, expressions: expressions) + " ||= []\n"
code << "unless params[:state].blank?\n"
code << " c[0] << ' AND #{Intervention.table_name}.state IN (?)'\n"
code << " c << params[:state]\n"
code << "end\n"
code << "unless params[:nature].blank?\n"
code << " c[0] << ' AND #{Intervention.table_name}.nature IN (?)'\n"
code << " c << params[:nature]\n"
code << "end\n"
code << "c[0] << ' AND ((#{Intervention.table_name}.nature = ? AND #{Intervention.table_name}.state != ? AND (#{Intervention.table_name}.request_intervention_id IS NULL OR #{Intervention.table_name}.request_intervention_id NOT IN (SELECT id from #{Intervention.table_name})) OR #{Intervention.table_name}.nature = ?))'\n"
code << "c << 'request'\n"
code << "c << '#{Intervention.state.rejected}'\n"
code << "c << 'record'\n"
code << "unless params[:procedure_name].blank?\n"
code << " c[0] << ' AND #{Intervention.table_name}.procedure_name IN (?)'\n"
code << " c << params[:procedure_name]\n"
code << "end\n"
# select the interventions according to the user current period
code << "unless current_period_interval.blank? && current_period.blank?\n"
code << " if current_period_interval.to_sym == :day\n"
code << " c[0] << ' AND EXTRACT(DAY FROM #{Intervention.table_name}.started_at) = ? AND EXTRACT(MONTH FROM #{Intervention.table_name}.started_at) = ? AND EXTRACT(YEAR FROM #{Intervention.table_name}.started_at) = ?'\n"
code << " c << current_period.to_date.day\n"
code << " c << current_period.to_date.month\n"
code << " c << current_period.to_date.year\n"
code << " elsif current_period_interval.to_sym == :week\n"
code << " c[0] << ' AND #{Intervention.table_name}.started_at >= ? AND #{Intervention.table_name}.stopped_at <= ?'\n"
code << " c << current_period.to_date.at_beginning_of_week.to_time.beginning_of_day\n"
code << " c << current_period.to_date.at_end_of_week.to_time.end_of_day\n"
code << " elsif current_period_interval.to_sym == :month\n"
code << " c[0] << ' AND EXTRACT(MONTH FROM #{Intervention.table_name}.started_at) = ? AND EXTRACT(YEAR FROM #{Intervention.table_name}.started_at) = ?'\n"
code << " c << current_period.to_date.month\n"
code << " c << current_period.to_date.year\n"
code << " elsif current_period_interval.to_sym == :year\n"
code << " c[0] << ' AND EXTRACT(YEAR FROM #{Intervention.table_name}.started_at) = ?'\n"
code << " c << current_period.to_date.year\n"
code << " end\n"
# Cultivable zones
code << " if params[:cultivable_zone_id].to_i > 0\n"
code << " c[0] << ' AND #{Intervention.table_name}.id IN (SELECT intervention_id FROM activity_productions_interventions INNER JOIN #{ActivityProduction.table_name} ON #{ActivityProduction.table_name}.id = activity_production_id INNER JOIN #{CultivableZone.table_name} ON #{CultivableZone.table_name}.id = #{ActivityProduction.table_name}.cultivable_zone_id WHERE #{CultivableZone.table_name}.id = ' + params[:cultivable_zone_id] + ')'\n"
code << " c \n"
code << " end\n"
# Current campaign
code << " if current_campaign\n"
code << " c[0] << \" AND EXTRACT(YEAR FROM #{Intervention.table_name}.started_at) = ?\"\n"
code << " c << current_campaign.harvest_year\n"
code << " end\n"
code << "end\n"
# Support
code << "if params[:product_id].to_i > 0\n"
code << " c[0] << ' AND #{Intervention.table_name}.id IN (SELECT intervention_id FROM intervention_parameters WHERE type = \\'InterventionTarget\\' AND product_id IN (?))'\n"
code << " c << params[:product_id].to_i\n"
code << "end\n"
# Label
code << "if params[:label_id].to_i > 0\n"
code << " c[0] << ' AND #{Intervention.table_name}.id IN (SELECT intervention_id FROM intervention_labellings WHERE label_id IN (?))'\n"
code << " c << params[:label_id].to_i\n"
code << "end\n"
# ActivityProduction || Activity
code << "if params[:production_id].to_i > 0\n"
code << " c[0] << ' AND #{Intervention.table_name}.id IN (SELECT intervention_id FROM intervention_parameters WHERE type = \\'InterventionTarget\\' AND product_id IN (SELECT target_id FROM target_distributions WHERE activity_production_id = ?))'\n"
code << " c << params[:production_id].to_i\n"
code << "elsif params[:activity_id].to_i > 0\n"
code << " c[0] << ' AND #{Intervention.table_name}.id IN (SELECT intervention_id FROM intervention_parameters WHERE type = \\'InterventionTarget\\' AND product_id IN (SELECT target_id FROM target_distributions WHERE activity_id = ?))'\n"
code << " c << params[:activity_id].to_i\n"
code << "end\n"
code << "c\n "
code.c
end
# INDEX
# @TODO conditions: list_conditions, joins: [:production, :activity, :campaign, :support]
# conditions: list_conditions,
list(conditions: list_conditions, order: { started_at: :desc }, line_class: :status) do |t|
t.action :purchase, on: :both, method: :post
t.action :sell, on: :both, method: :post
t.action :edit, if: :updateable?
t.action :destroy, if: :destroyable?
t.column :name, sort: :procedure_name, url: true
t.column :procedure_name, hidden: true
# t.column :production, url: true, hidden: true
# t.column :campaign, url: true
t.column :human_activities_names
t.column :started_at
t.column :stopped_at, hidden: true
t.column :human_working_duration, on_select: :sum, value_method: 'working_duration.in(:second).in(:hour)'
t.status
t.column :human_target_names
t.column :human_working_zone_area, on_select: :sum
t.column :total_cost, label_method: :human_total_cost, currency: true, on_select: :sum
t.column :nature
t.column :issue, url: true
t.column :trouble_encountered, hidden: true
# t.column :casting
# t.column :human_target_names, hidden: true
end
# SHOW
list(:product_parameters, model: :intervention_product_parameters, conditions: { intervention_id: 'params[:id]'.c }, order: { created_at: :desc }) do |t|
t.column :name, sort: :reference_name
t.column :product, url: true
# t.column :human_roles, sort: :roles, label: :roles
t.column :quantity_population
t.column :unit_name, through: :variant
# t.column :working_zone, hidden: true
t.column :variant, url: true
end
list(:record_interventions, model: :interventions, conditions: { request_intervention_id: 'params[:id]'.c }, order: 'interventions.started_at DESC') do |t|
# t.column :roles, hidden: true
t.column :name, sort: :reference_name
t.column :started_at, datatype: :datetime
t.column :stopped_at, datatype: :datetime
t.column :human_activities_names, through: :intervention
t.column :human_working_duration, through: :intervention
t.column :human_working_zone_area, through: :intervention
end
# Show one intervention with params_id
def show
return unless @intervention = find_and_check
t3e @intervention, procedure_name: @intervention.procedure.human_name
respond_with(@intervention, methods: %i[cost earn status name duration human_working_zone_area human_actions_names],
include: [
{ leaves_parameters: {
methods: %i[reference_name default_name working_zone_svg human_quantity human_working_zone_area],
include: {
product: {
methods: %i[picture_path nature_name unit_name]
}
}
} }, {
prescription: {
include: %i[prescriptor attachments]
}
}
],
procs: proc { |options| options[:builder].tag!(:url, backend_intervention_url(@intervention)) })
end
def new
options = {}
%i[actions custom_fields description event_id issue_id
nature number prescription_id procedure_name
request_intervention_id started_at state
stopped_at trouble_description trouble_encountered
whole_duration working_duration].each do |param|
options[param] = params[param]
end
# , :doers, :inputs, :outputs, :tools
%i[group_parameters targets].each do |param|
next unless params.include? :intervention
options[:"#{param}_attributes"] = permitted_params["#{param}_attributes"] || []
next unless options[:targets_attributes]
next if permitted_params.include? :working_periods
targets = if options[:targets_attributes].is_a? Array
options[:targets_attributes].collect { |k, _| k[:product_id] }
else
options[:targets_attributes].collect { |_, v| v[:product_id] }
end
availables = Product.where(id: targets).at(Time.zone.now - 1.hour).collect(&:id)
options[:targets_attributes].select! do |k, v|
obj = k.is_a?(Hash) ? k : v
obj.include?(:product_id) && availables.include?(obj[:product_id].to_i)
end
end
# consume preference and erase
if params[:keeper_id] && (p = current_user.preferences.get(params[:keeper_id])) && p.value.present?
options[:targets_attributes] = p.value.split(',').collect do |v|
hash = {}
hash[:product_id] = v if Product.find_by(id: v)
if params[:reference_name]
next unless params[:reference_name] == 'animal'
hash[:reference_name] = params[:reference_name]
end
if params[:new_group] && (g = Product.find_by(id: params[:new_group]))
hash[:new_group_id] = g.id
end
if params[:new_container] && (c = Product.find_by(id: params[:new_container]))
hash[:new_container_id] = c.id
end
hash
end.compact
p.set! nil
end
@intervention = Intervention.new(options)
from_request = Intervention.find_by(id: params[:request_intervention_id])
@intervention = from_request.initialize_record if from_request
render(locals: { cancel_url: { action: :index }, with_continue: true })
end
def sell
interventions = params[:id].split(',')
return unless interventions
if interventions
redirect_to new_backend_sale_path(intervention_ids: interventions)
else
redirect_to action: :index
end
end
def purchase
interventions = params[:id].split(',')
if interventions
redirect_to new_backend_purchase_path(intervention_ids: interventions)
else
redirect_to action: :index
end
end
# Computes impacts of a updated value in an intervention input context
def compute
unless params[:intervention]
head(:unprocessable_entity)
return
end
intervention_params = params[:intervention].deep_symbolize_keys
procedure = Procedo.find(intervention_params[:procedure_name])
unless procedure
head(:not_found)
return
end
intervention = Procedo::Engine.new_intervention(intervention_params)
begin
intervention.impact_with!(params[:updater])
updater_id = 'intervention_' + params[:updater].gsub('[', '_attributes_').tr(']', '_')
# raise intervention.to_hash.inspect
respond_to do |format|
# format.xml { render xml: intervention.to_xml }
format.json { render json: { updater_id: updater_id, intervention: intervention, handlers: intervention.handlers_states, procedure_states: intervention.procedure_states }.to_json }
end
rescue Procedo::Error => e
respond_to do |format|
# format.xml { render xml: { errors: e.message }, status: 500 }
format.json { render json: { errors: e.message }, status: 500 }
end
end
end
def modal
if params[:intervention_id]
@intervention = Intervention.find(params[:intervention_id])
render partial: 'backend/interventions/details_modal', locals: { intervention: @intervention }
end
if params[:interventions_ids]
@interventions = Intervention.find(params[:interventions_ids].split(','))
if params[:modal_type] == 'delete'
render partial: 'backend/interventions/delete_modal', locals: { interventions: @interventions }
else
render partial: 'backend/interventions/change_state_modal', locals: { interventions: @interventions }
end
end
end
def change_state
unless state_change_permitted_params
head :unprocessable_entity
return
end
interventions_ids = JSON.parse(state_change_permitted_params[:interventions_ids]).to_a
new_state = state_change_permitted_params[:state].to_sym
@interventions = Intervention.find(interventions_ids)
Intervention.transaction do
@interventions.each do |intervention|
if intervention.nature == :record && new_state == :rejected
unless intervention.request_intervention_id.nil?
intervention_request = Intervention.find(intervention.request_intervention_id)
if state_change_permitted_params[:delete_option].to_sym == :delete_request
intervention_request.destroy!
else
intervention_request.parameters = intervention.parameters
intervention_request.save!
end
end
intervention.destroy!
next
end
if intervention.nature == :request && new_state == :rejected
intervention.state = new_state
next unless intervention.valid?
intervention.save!
next
end
new_intervention = intervention
if intervention.nature == :request
new_intervention = intervention.dup
new_intervention.parameters = intervention.parameters
new_intervention.request_intervention_id = intervention.id
end
new_intervention.state = new_state
new_intervention.nature = :record
next unless new_intervention.valid?
new_intervention.save!
end
end
redirect_to_back
end
private
def find_interventions
intervention_ids = params[:id].split(',')
interventions = intervention_ids.map { |id| Intervention.find_by(id: id) }.compact
unless interventions.any?
notify_error :no_interventions_given
redirect_to(params[:redirect] || { action: :index })
return nil
end
interventions
end
def state_change_permitted_params
params.require(:intervention).permit(:interventions_ids, :state, :delete_option)
end
end
end
alignment of sum ok
# == License
# Ekylibre - Simple agricultural ERP
# Copyright (C) 2013 Brice Texier
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
require_dependency 'procedo'
module Backend
class InterventionsController < Backend::BaseController
manage_restfully t3e: { procedure_name: '(RECORD.procedure ? RECORD.procedure.human_name : nil)'.c },
continue: %i[nature procedure_name]
respond_to :pdf, :odt, :docx, :xml, :json, :html, :csv
unroll
# params:
# :q Text search
# :cultivable_zone_id
# :campaign_id
# :product_nature_id
# :support_id
def self.list_conditions
conn = Intervention.connection
# , productions: [:name], campaigns: [:name], activities: [:name], products: [:name]
expressions = []
expressions << 'CASE ' + Procedo.selection.map { |l, n| "WHEN procedure_name = #{conn.quote(n)} THEN #{conn.quote(l)}" }.join(' ') + " ELSE '' END"
code = search_conditions({ interventions: %i[state procedure_name number] }, expressions: expressions) + " ||= []\n"
code << "unless params[:state].blank?\n"
code << " c[0] << ' AND #{Intervention.table_name}.state IN (?)'\n"
code << " c << params[:state]\n"
code << "end\n"
code << "unless params[:nature].blank?\n"
code << " c[0] << ' AND #{Intervention.table_name}.nature IN (?)'\n"
code << " c << params[:nature]\n"
code << "end\n"
code << "c[0] << ' AND ((#{Intervention.table_name}.nature = ? AND #{Intervention.table_name}.state != ? AND (#{Intervention.table_name}.request_intervention_id IS NULL OR #{Intervention.table_name}.request_intervention_id NOT IN (SELECT id from #{Intervention.table_name})) OR #{Intervention.table_name}.nature = ?))'\n"
code << "c << 'request'\n"
code << "c << '#{Intervention.state.rejected}'\n"
code << "c << 'record'\n"
code << "unless params[:procedure_name].blank?\n"
code << " c[0] << ' AND #{Intervention.table_name}.procedure_name IN (?)'\n"
code << " c << params[:procedure_name]\n"
code << "end\n"
# select the interventions according to the user current period
code << "unless current_period_interval.blank? && current_period.blank?\n"
code << " if current_period_interval.to_sym == :day\n"
code << " c[0] << ' AND EXTRACT(DAY FROM #{Intervention.table_name}.started_at) = ? AND EXTRACT(MONTH FROM #{Intervention.table_name}.started_at) = ? AND EXTRACT(YEAR FROM #{Intervention.table_name}.started_at) = ?'\n"
code << " c << current_period.to_date.day\n"
code << " c << current_period.to_date.month\n"
code << " c << current_period.to_date.year\n"
code << " elsif current_period_interval.to_sym == :week\n"
code << " c[0] << ' AND #{Intervention.table_name}.started_at >= ? AND #{Intervention.table_name}.stopped_at <= ?'\n"
code << " c << current_period.to_date.at_beginning_of_week.to_time.beginning_of_day\n"
code << " c << current_period.to_date.at_end_of_week.to_time.end_of_day\n"
code << " elsif current_period_interval.to_sym == :month\n"
code << " c[0] << ' AND EXTRACT(MONTH FROM #{Intervention.table_name}.started_at) = ? AND EXTRACT(YEAR FROM #{Intervention.table_name}.started_at) = ?'\n"
code << " c << current_period.to_date.month\n"
code << " c << current_period.to_date.year\n"
code << " elsif current_period_interval.to_sym == :year\n"
code << " c[0] << ' AND EXTRACT(YEAR FROM #{Intervention.table_name}.started_at) = ?'\n"
code << " c << current_period.to_date.year\n"
code << " end\n"
# Cultivable zones
code << " if params[:cultivable_zone_id].to_i > 0\n"
code << " c[0] << ' AND #{Intervention.table_name}.id IN (SELECT intervention_id FROM activity_productions_interventions INNER JOIN #{ActivityProduction.table_name} ON #{ActivityProduction.table_name}.id = activity_production_id INNER JOIN #{CultivableZone.table_name} ON #{CultivableZone.table_name}.id = #{ActivityProduction.table_name}.cultivable_zone_id WHERE #{CultivableZone.table_name}.id = ' + params[:cultivable_zone_id] + ')'\n"
code << " c \n"
code << " end\n"
# Current campaign
code << " if current_campaign\n"
code << " c[0] << \" AND EXTRACT(YEAR FROM #{Intervention.table_name}.started_at) = ?\"\n"
code << " c << current_campaign.harvest_year\n"
code << " end\n"
code << "end\n"
# Support
code << "if params[:product_id].to_i > 0\n"
code << " c[0] << ' AND #{Intervention.table_name}.id IN (SELECT intervention_id FROM intervention_parameters WHERE type = \\'InterventionTarget\\' AND product_id IN (?))'\n"
code << " c << params[:product_id].to_i\n"
code << "end\n"
# Label
code << "if params[:label_id].to_i > 0\n"
code << " c[0] << ' AND #{Intervention.table_name}.id IN (SELECT intervention_id FROM intervention_labellings WHERE label_id IN (?))'\n"
code << " c << params[:label_id].to_i\n"
code << "end\n"
# ActivityProduction || Activity
code << "if params[:production_id].to_i > 0\n"
code << " c[0] << ' AND #{Intervention.table_name}.id IN (SELECT intervention_id FROM intervention_parameters WHERE type = \\'InterventionTarget\\' AND product_id IN (SELECT target_id FROM target_distributions WHERE activity_production_id = ?))'\n"
code << " c << params[:production_id].to_i\n"
code << "elsif params[:activity_id].to_i > 0\n"
code << " c[0] << ' AND #{Intervention.table_name}.id IN (SELECT intervention_id FROM intervention_parameters WHERE type = \\'InterventionTarget\\' AND product_id IN (SELECT target_id FROM target_distributions WHERE activity_id = ?))'\n"
code << " c << params[:activity_id].to_i\n"
code << "end\n"
code << "c\n "
code.c
end
# INDEX
# @TODO conditions: list_conditions, joins: [:production, :activity, :campaign, :support]
# conditions: list_conditions,
list(conditions: list_conditions, order: { started_at: :desc }, line_class: :status) do |t|
t.action :purchase, on: :both, method: :post
t.action :sell, on: :both, method: :post
t.action :edit, if: :updateable?
t.action :destroy, if: :destroyable?
t.column :name, sort: :procedure_name, url: true
t.column :procedure_name, hidden: true
# t.column :production, url: true, hidden: true
# t.column :campaign, url: true
t.column :human_activities_names
t.column :started_at
t.column :stopped_at, hidden: true
t.column :human_working_duration, on_select: :sum, value_method: 'working_duration.in(:second).in(:hour)', datatype: :decimal
t.status
t.column :human_target_names
t.column :human_working_zone_area, on_select: :sum, datatype: :decimal
t.column :total_cost, label_method: :human_total_cost, currency: true, on_select: :sum, datatype: :decimal
t.column :nature
t.column :issue, url: true
t.column :trouble_encountered, hidden: true
# t.column :casting
# t.column :human_target_names, hidden: true
end
# SHOW
list(:product_parameters, model: :intervention_product_parameters, conditions: { intervention_id: 'params[:id]'.c }, order: { created_at: :desc }) do |t|
t.column :name, sort: :reference_name
t.column :product, url: true
# t.column :human_roles, sort: :roles, label: :roles
t.column :quantity_population
t.column :unit_name, through: :variant
# t.column :working_zone, hidden: true
t.column :variant, url: true
end
list(:record_interventions, model: :interventions, conditions: { request_intervention_id: 'params[:id]'.c }, order: 'interventions.started_at DESC') do |t|
# t.column :roles, hidden: true
t.column :name, sort: :reference_name
t.column :started_at, datatype: :datetime
t.column :stopped_at, datatype: :datetime
t.column :human_activities_names, through: :intervention
t.column :human_working_duration, through: :intervention
t.column :human_working_zone_area, through: :intervention
end
# Show one intervention with params_id
def show
return unless @intervention = find_and_check
t3e @intervention, procedure_name: @intervention.procedure.human_name
respond_with(@intervention, methods: %i[cost earn status name duration human_working_zone_area human_actions_names],
include: [
{ leaves_parameters: {
methods: %i[reference_name default_name working_zone_svg human_quantity human_working_zone_area],
include: {
product: {
methods: %i[picture_path nature_name unit_name]
}
}
} }, {
prescription: {
include: %i[prescriptor attachments]
}
}
],
procs: proc { |options| options[:builder].tag!(:url, backend_intervention_url(@intervention)) })
end
def new
options = {}
%i[actions custom_fields description event_id issue_id
nature number prescription_id procedure_name
request_intervention_id started_at state
stopped_at trouble_description trouble_encountered
whole_duration working_duration].each do |param|
options[param] = params[param]
end
# , :doers, :inputs, :outputs, :tools
%i[group_parameters targets].each do |param|
next unless params.include? :intervention
options[:"#{param}_attributes"] = permitted_params["#{param}_attributes"] || []
next unless options[:targets_attributes]
next if permitted_params.include? :working_periods
targets = if options[:targets_attributes].is_a? Array
options[:targets_attributes].collect { |k, _| k[:product_id] }
else
options[:targets_attributes].collect { |_, v| v[:product_id] }
end
availables = Product.where(id: targets).at(Time.zone.now - 1.hour).collect(&:id)
options[:targets_attributes].select! do |k, v|
obj = k.is_a?(Hash) ? k : v
obj.include?(:product_id) && availables.include?(obj[:product_id].to_i)
end
end
# consume preference and erase
if params[:keeper_id] && (p = current_user.preferences.get(params[:keeper_id])) && p.value.present?
options[:targets_attributes] = p.value.split(',').collect do |v|
hash = {}
hash[:product_id] = v if Product.find_by(id: v)
if params[:reference_name]
next unless params[:reference_name] == 'animal'
hash[:reference_name] = params[:reference_name]
end
if params[:new_group] && (g = Product.find_by(id: params[:new_group]))
hash[:new_group_id] = g.id
end
if params[:new_container] && (c = Product.find_by(id: params[:new_container]))
hash[:new_container_id] = c.id
end
hash
end.compact
p.set! nil
end
@intervention = Intervention.new(options)
from_request = Intervention.find_by(id: params[:request_intervention_id])
@intervention = from_request.initialize_record if from_request
render(locals: { cancel_url: { action: :index }, with_continue: true })
end
def sell
interventions = params[:id].split(',')
return unless interventions
if interventions
redirect_to new_backend_sale_path(intervention_ids: interventions)
else
redirect_to action: :index
end
end
def purchase
interventions = params[:id].split(',')
if interventions
redirect_to new_backend_purchase_path(intervention_ids: interventions)
else
redirect_to action: :index
end
end
# Computes impacts of a updated value in an intervention input context
def compute
unless params[:intervention]
head(:unprocessable_entity)
return
end
intervention_params = params[:intervention].deep_symbolize_keys
procedure = Procedo.find(intervention_params[:procedure_name])
unless procedure
head(:not_found)
return
end
intervention = Procedo::Engine.new_intervention(intervention_params)
begin
intervention.impact_with!(params[:updater])
updater_id = 'intervention_' + params[:updater].gsub('[', '_attributes_').tr(']', '_')
# raise intervention.to_hash.inspect
respond_to do |format|
# format.xml { render xml: intervention.to_xml }
format.json { render json: { updater_id: updater_id, intervention: intervention, handlers: intervention.handlers_states, procedure_states: intervention.procedure_states }.to_json }
end
rescue Procedo::Error => e
respond_to do |format|
# format.xml { render xml: { errors: e.message }, status: 500 }
format.json { render json: { errors: e.message }, status: 500 }
end
end
end
def modal
if params[:intervention_id]
@intervention = Intervention.find(params[:intervention_id])
render partial: 'backend/interventions/details_modal', locals: { intervention: @intervention }
end
if params[:interventions_ids]
@interventions = Intervention.find(params[:interventions_ids].split(','))
if params[:modal_type] == 'delete'
render partial: 'backend/interventions/delete_modal', locals: { interventions: @interventions }
else
render partial: 'backend/interventions/change_state_modal', locals: { interventions: @interventions }
end
end
end
def change_state
unless state_change_permitted_params
head :unprocessable_entity
return
end
interventions_ids = JSON.parse(state_change_permitted_params[:interventions_ids]).to_a
new_state = state_change_permitted_params[:state].to_sym
@interventions = Intervention.find(interventions_ids)
Intervention.transaction do
@interventions.each do |intervention|
if intervention.nature == :record && new_state == :rejected
unless intervention.request_intervention_id.nil?
intervention_request = Intervention.find(intervention.request_intervention_id)
if state_change_permitted_params[:delete_option].to_sym == :delete_request
intervention_request.destroy!
else
intervention_request.parameters = intervention.parameters
intervention_request.save!
end
end
intervention.destroy!
next
end
if intervention.nature == :request && new_state == :rejected
intervention.state = new_state
next unless intervention.valid?
intervention.save!
next
end
new_intervention = intervention
if intervention.nature == :request
new_intervention = intervention.dup
new_intervention.parameters = intervention.parameters
new_intervention.request_intervention_id = intervention.id
end
new_intervention.state = new_state
new_intervention.nature = :record
next unless new_intervention.valid?
new_intervention.save!
end
end
redirect_to_back
end
private
def find_interventions
intervention_ids = params[:id].split(',')
interventions = intervention_ids.map { |id| Intervention.find_by(id: id) }.compact
unless interventions.any?
notify_error :no_interventions_given
redirect_to(params[:redirect] || { action: :index })
return nil
end
interventions
end
def state_change_permitted_params
params.require(:intervention).permit(:interventions_ids, :state, :delete_option)
end
end
end
|
require 'forwardable'
require 'twitter/error/configuration_error'
module Twitter
module Configurable
extend Forwardable
attr_writer :consumer_key, :consumer_secret, :oauth_token, :oauth_token_secret, :bearer_token
attr_accessor :endpoint, :connection_options, :identity_map, :middleware
def_delegator :options, :hash
class << self
def keys
@keys ||= [
:consumer_key,
:consumer_secret,
:oauth_token,
:oauth_token_secret,
:bearer_token,
:endpoint,
:connection_options,
:identity_map,
:middleware,
]
end
end
# Convenience method to allow configuration options to be set in a block
#
# @raise [Twitter::Error::ConfigurationError] Error is raised when supplied
# twitter credentials are not a String or Symbol.
def configure
yield self
validate_credential_type!
self
end
# @return [Boolean]
def credentials?
credentials.values.all? || @bearer_token
end
def reset!
Twitter::Configurable.keys.each do |key|
instance_variable_set(:"@#{key}", Twitter::Default.options[key])
end
self
end
alias setup reset!
private
def application_only_auth?
!!@bearer_token
end
# @return [Hash]
def credentials
{
:consumer_key => @consumer_key,
:consumer_secret => @consumer_secret,
:token => @oauth_token,
:token_secret => @oauth_token_secret,
}
end
# @return [Hash]
def options
Hash[Twitter::Configurable.keys.map{|key| [key, instance_variable_get(:"@#{key}")]}]
end
# Ensures that all credentials set during configuration are of a
# valid type. Valid types are String and Symbol.
#
# @raise [Twitter::Error::ConfigurationError] Error is raised when
# supplied twitter credentials are not a String or Symbol.
def validate_credential_type!
credentials.each do |credential, value|
next if value.nil?
unless value.is_a?(String) || value.is_a?(Symbol)
raise(Error::ConfigurationError, "Invalid #{credential} specified: #{value} must be a string or symbol.")
end
end
end
end
end
expose credentials hash to public, boolean methods for presense of user/bearer tokens
require 'forwardable'
require 'twitter/error/configuration_error'
module Twitter
module Configurable
extend Forwardable
attr_writer :consumer_key, :consumer_secret, :oauth_token, :oauth_token_secret, :bearer_token
attr_accessor :endpoint, :connection_options, :identity_map, :middleware
def_delegator :options, :hash
class << self
def keys
@keys ||= [
:consumer_key,
:consumer_secret,
:oauth_token,
:oauth_token_secret,
:bearer_token,
:endpoint,
:connection_options,
:identity_map,
:middleware,
]
end
end
# Convenience method to allow configuration options to be set in a block
#
# @raise [Twitter::Error::ConfigurationError] Error is raised when supplied
# twitter credentials are not a String or Symbol.
def configure
yield self
validate_credential_type!
self
end
# @return [Boolean]
def credentials?
credentials.values.all? || @bearer_token
end
def reset!
Twitter::Configurable.keys.each do |key|
instance_variable_set(:"@#{key}", Twitter::Default.options[key])
end
self
end
alias setup reset!
# @return [Hash]
def credentials
{ :consumer_key => @consumer_key,
:consumer_secret => @consumer_secret,
:token => @oauth_token,
:token_secret => @oauth_token_secret }
end
protected
# @return [Boolean]
def bearer_token?
!!@bearer_token
end
# @return [Boolean]
def user_token?
!!@oauth_token
end
private
# @return [Hash]
def options
Hash[Twitter::Configurable.keys.map{|key| [key, instance_variable_get(:"@#{key}")]}]
end
# Ensures that all credentials set during configuration are of a
# valid type. Valid types are String and Symbol.
#
# @raise [Twitter::Error::ConfigurationError] Error is raised when
# supplied twitter credentials are not a String or Symbol.
def validate_credential_type!
credentials.each do |credential, value|
next if value.nil?
unless value.is_a?(String) || value.is_a?(Symbol)
raise(Error::ConfigurationError, "Invalid #{credential} specified: #{value} must be a string or symbol.")
end
end
end
end
end
|
module GitHub
class Ldap
require 'ladle'
# Preconfigured user fixtures. If you want to use them for your own tests.
DEFAULT_FIXTURES_PATH = File.expand_path('fixtures.ldif', File.dirname(__FILE__))
DEFAULT_SERVER_OPTIONS = {
user_fixtures: DEFAULT_FIXTURES_PATH,
user_domain: 'dc=github,dc=com',
admin_user: 'uid=admin,dc=github,dc=com',
admin_password: 'secret',
quiet: true,
port: 3897
}
class << self
# server_options: is the options used to start the server,
# useful to know in development.
attr_reader :server_options
# ldap_server: is the instance of the testing ldap server,
# you should never interact with it,
# but it's used to grecefully stop it after your tests finalize.
attr_reader :ldap_server
end
# Start a testing server.
# If there is already a server initialized it doesn't do anything.
#
# options: is a hash with the custom options for the server.
def self.start_server(options = {})
@server_options = DEFAULT_SERVER_OPTIONS.merge(options)
@server_options[:allow_anonymous] ||= false
@server_options[:ldif] = @server_options[:user_fixtures]
@server_options[:domain] = @server_options[:user_domain]
@server_options[:tmpdir] ||= server_tmp
@ldap_server = Ladle::Server.new(@server_options)
@ldap_server.start
end
# Stop the testing server.
# If there is no server started this method doesn't do anything.
def self.stop_server
ldap_server && ldap_server.stop
end
# Determine the temporal directory where the ldap server lives.
# If there is no temporal directory in the environment we create one in the base path.
#
# Returns the path to the temporal directory.
def self.server_tmp
tmp = ENV['TMPDIR'] || ENV['TEMPDIR']
if tmp.nil?
tmp = 'tmp'
Dir.mkdir(tmp) unless File.directory?('tmp')
end
tmp
end
end
end
Remove quiet option when verbose is set
Helpful for debugging since quiet will override verbose in practice.
module GitHub
class Ldap
require 'ladle'
# Preconfigured user fixtures. If you want to use them for your own tests.
DEFAULT_FIXTURES_PATH = File.expand_path('fixtures.ldif', File.dirname(__FILE__))
DEFAULT_SERVER_OPTIONS = {
user_fixtures: DEFAULT_FIXTURES_PATH,
user_domain: 'dc=github,dc=com',
admin_user: 'uid=admin,dc=github,dc=com',
admin_password: 'secret',
quiet: true,
port: 3897
}
class << self
# server_options: is the options used to start the server,
# useful to know in development.
attr_reader :server_options
# ldap_server: is the instance of the testing ldap server,
# you should never interact with it,
# but it's used to grecefully stop it after your tests finalize.
attr_reader :ldap_server
end
# Start a testing server.
# If there is already a server initialized it doesn't do anything.
#
# options: is a hash with the custom options for the server.
def self.start_server(options = {})
@server_options = DEFAULT_SERVER_OPTIONS.merge(options)
@server_options[:allow_anonymous] ||= false
@server_options[:ldif] = @server_options[:user_fixtures]
@server_options[:domain] = @server_options[:user_domain]
@server_options[:tmpdir] ||= server_tmp
@server_options[:quiet] = false if @server_options[:verbose]
@ldap_server = Ladle::Server.new(@server_options)
@ldap_server.start
end
# Stop the testing server.
# If there is no server started this method doesn't do anything.
def self.stop_server
ldap_server && ldap_server.stop
end
# Determine the temporal directory where the ldap server lives.
# If there is no temporal directory in the environment we create one in the base path.
#
# Returns the path to the temporal directory.
def self.server_tmp
tmp = ENV['TMPDIR'] || ENV['TEMPDIR']
if tmp.nil?
tmp = 'tmp'
Dir.mkdir(tmp) unless File.directory?('tmp')
end
tmp
end
end
end
|
class Management::PageStepsController < ManagementController
include Wicked::Wizard
# The order of prepend is the opposite of its declaration
prepend_before_action :set_steps
prepend_before_action :build_current_page_state, only: [:show, :update]
prepend_before_action :set_site, only: [:new, :edit, :show, :update]
before_action :setup_wizard
# TODO: Authenticate user per site
# before_action :authenticate_user_for_site!, only: [:index, :new, :create]
# before_action :set_content_type_variables, only: [:new, :edit]
helper_method :form_steps
attr_accessor :steps_names
attr_accessor :invalid_steps
CONTINUE = 'CONTINUE'.freeze
SAVE = 'SAVE CHANGES'.freeze
# This action cleans the session
def new
# TODO: change this when the pages are unified
if params[:position] && params[:parent_id]
session[:page] = {uri: "test-#{DateTime.new.to_id}", parent_id: params[:parent_id], position: params[:position]}
else
session[:page] = {uri: "test-#{DateTime.new.to_i}", parent_id: @site.root.id, position: @site.site_pages.where(parent_id: @site.root.id).length}
end
session[:dataset_setting] = {}
# TODO: The next line should be used. While developing this feature...
# ... there will be a direct jump to datasets
# redirect_to management_page_step_path(id: :position)
redirect_to management_site_page_step_path(id: 'position')
end
# This action cleans the session
def edit
session[:page] = {}
session[:dataset_setting] = {}
redirect_to next_wizard_path
#redirect_to management_site_page_step_path(page: params[:page_id], id: 'dataset')
end
def show
case step
when 'position'
when 'title'
when 'type'
when 'dataset'
@context_datasets = current_user.get_context_datasets
when 'filters'
build_current_dataset_setting
@fields = @dataset_setting.get_fields
gon.fields = @fields
when 'columns'
build_current_dataset_setting
@fields = @dataset_setting.get_fields
when 'customization'
when 'preview'
build_current_dataset_setting
gon.analysis_user_filters = @dataset_setting.columns_changeable.blank? ? {} : (JSON.parse @dataset_setting.columns_changeable)
gon.analysis_graphs = @dataset_setting.default_graphs.blank? ? {} : (JSON.parse @dataset_setting.default_graphs)
gon.analysis_map = @dataset_setting.default_map.blank? ? {} : (JSON.parse @dataset_setting.default_map)
gon.analysis_data = @dataset_setting.get_filtered_dataset
gon.analysis_timestamp = @dataset_setting.fields_last_modified
# OPEN CONTENT PATH
when 'open_content'
when 'open_content_preview'
# DYNAMIC INDICATOR PATH
when 'widget'
when 'dynamic_indicator_dashboard'
when 'dynamic_indicator_dashboard_preview'
end
# TODO: Is this supposed to have only page creation?
@breadcrumbs = ['Page creation']
render_wizard
end
# TODO: REFACTOR
def update
case step
when 'position'
redirect_to next_wizard_path
when 'title'
redirect_to next_wizard_path
when 'type'
set_current_page_state
redirect_to next_wizard_path
# ANALYSIS DASHBOARD PATH
when 'dataset'
build_current_dataset_setting
set_current_dataset_setting_state
if @page.valid?
redirect_to next_wizard_path
else
@context_datasets = current_user.get_context_datasets
render_wizard
end
when 'filters'
build_current_dataset_setting
set_current_dataset_setting_state
if @page.valid?
redirect_to next_wizard_path
else
render_wizard
end
when 'columns'
build_current_dataset_setting
set_current_dataset_setting_state
if @page.valid?
redirect_to next_wizard_path
else
render_wizard
end
when 'customization'
build_current_dataset_setting
set_current_dataset_setting_state
set_current_page_state
if @page.valid?
redirect_to next_wizard_path
else
render_wizard
end
when 'preview'
build_current_dataset_setting
set_current_dataset_setting_state
@page.dataset_setting = @dataset_setting
if @page.save
redirect_to management_site_site_pages_path params[:site_slug]
else
render_wizard
end
# OPEN CONTENT PATH
when 'open_content'
build_current_dataset_setting
set_current_dataset_setting_state
if @page.valid?
redirect_to next_wizard_path
else
render_wizard
end
when 'open_content_preview'
# TODO : When validations are done, put this back
#if @page.save
redirect_to management_site_site_pages_path params[:site_slug]
#else
# render_wizard
#end
# DYNAMIC INDICATOR DASHBOARD PATH
when 'widget'
redirect_to next_wizard_path
when 'dynamic_indicator_dashboard'
redirect_to next_wizard_path
when 'dynamic_indicator_dashboard_preview'
# TODO: When the validations are done, put this back
#if @page.save
redirect_to management_site_site_pages_path params[:site_slug]
#else
# render_wizard
#end
# LINK PATH
when 'link'
if @page.save
redirect_to management_site_site_pages_path params[:site_slug]
else
render_wizard
end
end
end
private
def page_params
# TODO: To have different permissions for different steps
params.require(:site_page).permit(:name, :description, :content_type, content: [:url, :target_blank], dataset_setting: [:context_id_dataset_id, :filters, visible_fields: []])
end
def set_site
@site = Site.find_by({slug: params[:site_slug]})
end
def build_current_page_state
# Verify if the manager is editing a page or creating a new one
@page = params[:page_id] ? SitePage.find(params[:page_id]) : (SitePage.new site_id: @site.id)
# Update the page with the attributes saved on the session
@page.assign_attributes session[:page] if session[:page]
@page.assign_attributes page_params.to_h.except(:dataset_setting) if params[:site_page] && page_params.to_h.except(:dataset_setting)
end
def set_current_page_state
session[:page] = @page
end
def build_current_dataset_setting
ds_params = {}
ds_params = page_params.to_h[:dataset_setting] if params[:site_page] && page_params.to_h && page_params.to_h[:dataset_setting]
@dataset_setting = nil
if ds_params[:id]
@dataset_setting = DatasetSetting.find(ds_params[:id])
else
@dataset_setting = DatasetSetting.new
@page.dataset_setting = @dataset_setting
end
@dataset_setting.assign_attributes session[:dataset_setting] if session[:dataset_setting]
# If the user changed the id of the dataset, the entity is reset
if ids = ds_params[:context_id_dataset_id]
ids = ids.split(' ')
@dataset_setting = DatasetSetting.new(context_id: ids[0], dataset_id: ids[1])
@dataset_setting.api_table_name = @dataset_setting.get_table_name
end
if fields = ds_params[:filters]
fields = JSON.parse fields
filters = []
changeables = []
fields.each do |field|
name = field['name']
from = field['from']
to = field['to']
changeables << field['name'] if field['variable'] == 'true'
filters << "#{name} between #{from} and #{to}"
end
filters = filters.blank? ? '' : filters.to_json
changeables = changeables.blank? ? '' : changeables.to_json
@dataset_setting.assign_attributes({filters: filters, columns_changeable: changeables})
end
if fields = ds_params[:visible_fields]
columns_visible = fields.to_json
@dataset_setting.columns_visible = columns_visible
end
end
def set_current_dataset_setting_state
session[:dataset_setting] = @dataset_setting
end
def set_steps
invalid_steps = []
unless @page && @page.content_type
steps = { pages: %w[position title type],
names: %w[Position Title Type] }
self.steps = steps[:pages]
self.steps_names = steps[:names]
else
steps = @page.form_steps
self.steps = steps[:pages]
self.steps_names = steps[:names]
invalid_steps = ['title']
end
set_invalid_steps invalid_steps
end
def form_steps
self.steps
end
def set_invalid_steps(steps)
self.invalid_steps = steps
end
end
Saving position and title of the page
class Management::PageStepsController < ManagementController
include Wicked::Wizard
# The order of prepend is the opposite of its declaration
prepend_before_action :set_steps
prepend_before_action :build_current_page_state, only: [:show, :update]
prepend_before_action :set_site, only: [:new, :edit, :show, :update]
before_action :setup_wizard
# TODO: Authenticate user per site
# before_action :authenticate_user_for_site!, only: [:index, :new, :create]
# before_action :set_content_type_variables, only: [:new, :edit]
helper_method :form_steps
attr_accessor :steps_names
attr_accessor :invalid_steps
CONTINUE = 'CONTINUE'.freeze
SAVE = 'SAVE CHANGES'.freeze
# This action cleans the session
def new
# TODO: change this when the pages are unified
if params[:position] && params[:parent_id]
session[:page] = {uri: "test-#{DateTime.new.to_id}", parent_id: params[:parent_id], position: params[:position]}
else
session[:page] = {uri: "test-#{DateTime.new.to_i}", parent_id: @site.root.id, position: @site.site_pages.where(parent_id: @site.root.id).length}
end
session[:dataset_setting] = {}
# TODO: The next line should be used. While developing this feature...
# ... there will be a direct jump to datasets
# redirect_to management_page_step_path(id: :position)
redirect_to management_site_page_step_path(id: 'position')
end
# This action cleans the session
def edit
session[:page] = {}
session[:dataset_setting] = {}
redirect_to next_wizard_path
#redirect_to management_site_page_step_path(page: params[:page_id], id: 'dataset')
end
def show
case step
when 'position'
when 'title'
when 'type'
when 'dataset'
@context_datasets = current_user.get_context_datasets
when 'filters'
build_current_dataset_setting
@fields = @dataset_setting.get_fields
gon.fields = @fields
when 'columns'
build_current_dataset_setting
@fields = @dataset_setting.get_fields
when 'customization'
when 'preview'
build_current_dataset_setting
gon.analysis_user_filters = @dataset_setting.columns_changeable.blank? ? {} : (JSON.parse @dataset_setting.columns_changeable)
gon.analysis_graphs = @dataset_setting.default_graphs.blank? ? {} : (JSON.parse @dataset_setting.default_graphs)
gon.analysis_map = @dataset_setting.default_map.blank? ? {} : (JSON.parse @dataset_setting.default_map)
gon.analysis_data = @dataset_setting.get_filtered_dataset
gon.analysis_timestamp = @dataset_setting.fields_last_modified
# OPEN CONTENT PATH
when 'open_content'
when 'open_content_preview'
# DYNAMIC INDICATOR PATH
when 'widget'
when 'dynamic_indicator_dashboard'
when 'dynamic_indicator_dashboard_preview'
end
# TODO: Is this supposed to have only page creation?
@breadcrumbs = ['Page creation']
render_wizard
end
# TODO: REFACTOR
def update
case step
when 'position'
set_current_page_state
redirect_to next_wizard_path
when 'title'
set_current_page_state
redirect_to next_wizard_path
when 'type'
set_current_page_state
redirect_to next_wizard_path
# ANALYSIS DASHBOARD PATH
when 'dataset'
build_current_dataset_setting
set_current_dataset_setting_state
if @page.valid?
redirect_to next_wizard_path
else
@context_datasets = current_user.get_context_datasets
render_wizard
end
when 'filters'
build_current_dataset_setting
set_current_dataset_setting_state
if @page.valid?
redirect_to next_wizard_path
else
render_wizard
end
when 'columns'
build_current_dataset_setting
set_current_dataset_setting_state
if @page.valid?
redirect_to next_wizard_path
else
render_wizard
end
when 'customization'
build_current_dataset_setting
set_current_dataset_setting_state
set_current_page_state
if @page.valid?
redirect_to next_wizard_path
else
render_wizard
end
when 'preview'
build_current_dataset_setting
set_current_dataset_setting_state
@page.dataset_setting = @dataset_setting
if @page.save
redirect_to management_site_site_pages_path params[:site_slug]
else
render_wizard
end
# OPEN CONTENT PATH
when 'open_content'
build_current_dataset_setting
set_current_dataset_setting_state
if @page.valid?
redirect_to next_wizard_path
else
render_wizard
end
when 'open_content_preview'
# TODO : When validations are done, put this back
#if @page.save
redirect_to management_site_site_pages_path params[:site_slug]
#else
# render_wizard
#end
# DYNAMIC INDICATOR DASHBOARD PATH
when 'widget'
redirect_to next_wizard_path
when 'dynamic_indicator_dashboard'
redirect_to next_wizard_path
when 'dynamic_indicator_dashboard_preview'
# TODO: When the validations are done, put this back
#if @page.save
redirect_to management_site_site_pages_path params[:site_slug]
#else
# render_wizard
#end
# LINK PATH
when 'link'
if @page.save
redirect_to management_site_site_pages_path params[:site_slug]
else
render_wizard
end
end
end
private
def page_params
# TODO: To have different permissions for different steps
params.require(:site_page).permit(:name, :description, :position, :parent_id, :content_type, content: [:url, :target_blank], dataset_setting: [:context_id_dataset_id, :filters, visible_fields: []])
end
def set_site
@site = Site.find_by({slug: params[:site_slug]})
end
def build_current_page_state
# Verify if the manager is editing a page or creating a new one
@page = params[:page_id] ? SitePage.find(params[:page_id]) : (SitePage.new site_id: @site.id)
# Update the page with the attributes saved on the session
@page.assign_attributes session[:page] if session[:page]
@page.assign_attributes page_params.to_h.except(:dataset_setting) if params[:site_page] && page_params.to_h.except(:dataset_setting)
end
def set_current_page_state
session[:page] = @page
end
def build_current_dataset_setting
ds_params = {}
ds_params = page_params.to_h[:dataset_setting] if params[:site_page] && page_params.to_h && page_params.to_h[:dataset_setting]
@dataset_setting = nil
if ds_params[:id]
@dataset_setting = DatasetSetting.find(ds_params[:id])
else
@dataset_setting = DatasetSetting.new
@page.dataset_setting = @dataset_setting
end
@dataset_setting.assign_attributes session[:dataset_setting] if session[:dataset_setting]
# If the user changed the id of the dataset, the entity is reset
if ids = ds_params[:context_id_dataset_id]
ids = ids.split(' ')
@dataset_setting = DatasetSetting.new(context_id: ids[0], dataset_id: ids[1])
@dataset_setting.api_table_name = @dataset_setting.get_table_name
end
if fields = ds_params[:filters]
fields = JSON.parse fields
filters = []
changeables = []
fields.each do |field|
name = field['name']
from = field['from']
to = field['to']
changeables << field['name'] if field['variable'] == 'true'
filters << "#{name} between #{from} and #{to}"
end
filters = filters.blank? ? '' : filters.to_json
changeables = changeables.blank? ? '' : changeables.to_json
@dataset_setting.assign_attributes({filters: filters, columns_changeable: changeables})
end
if fields = ds_params[:visible_fields]
columns_visible = fields.to_json
@dataset_setting.columns_visible = columns_visible
end
end
def set_current_dataset_setting_state
session[:dataset_setting] = @dataset_setting
end
def set_steps
invalid_steps = []
unless @page && @page.content_type
steps = { pages: %w[position title type],
names: %w[Position Title Type] }
self.steps = steps[:pages]
self.steps_names = steps[:names]
else
steps = @page.form_steps
self.steps = steps[:pages]
self.steps_names = steps[:names]
invalid_steps = ['title']
end
set_invalid_steps invalid_steps
end
def form_steps
self.steps
end
def set_invalid_steps(steps)
self.invalid_steps = steps
end
end
|
module U2i
module CiUtils
VERSION = '0.3.2'
end
end
Bump version to 1.0.0.
module U2i
module CiUtils
VERSION = '1.0.0'
end
end
|
require 'yaml'
class Merge
VERSION = "1.0.0"
def initialize(file, options)
@file = file
@options = options
end
def local!
config = YAML.load_file @file
end
end
Clone repos based on YAML config
require 'yaml'
class Merge
VERSION = "1.0.0"
def initialize(file, options)
@file = file
@options = options
end
def local!
config = YAML.load_file @file
repo_dir = clone_repos(config)
end
def clone_repos(config)
repo_dir = "#{Dir.pwd}/out"
FileUtils.mkdir_p repo_dir
Dir.chdir repo_dir do
config["repositories"].each do |repo|
`git clone #{repo["url"]} #{repo["sub directory"]}`
end
end
repo_dir
end
end
|
class ReguestedInvitationsController < ApplicationController
before_filter :only_logged_and_not_authorised
def create
request = RequestedInvitation.find_by_user_id(current_user.id)
if request.nil?
RequestedInvitation.create!(:user_id => current_user.id, :invitation_sent_date => Time.now)
redirect_to root_url, :notice => "Your request has been received. The site is currently at capacity. Your invite will be dispatched as soon as possible."
else
redirect_to root_url, :notice => "You have already requested an invitation. It will be dispatched as soon as possible."
end
end
private
def only_logged_and_not_authorised
current_user && current_user.person.authorised?
end
end
scrinked notice
class ReguestedInvitationsController < ApplicationController
before_filter :only_logged_and_not_authorised
def create
request = RequestedInvitation.find_by_user_id(current_user.id)
if request.nil?
RequestedInvitation.create!(:user_id => current_user.id, :invitation_sent_date => Time.now)
redirect_to root_url, :notice => "Your invite will be dispatched as soon as possible."
else
redirect_to root_url, :notice => "You have already requested an invitation."
end
end
private
def only_logged_and_not_authorised
current_user && current_user.person.authorised?
end
end
|
# coding: UTF-8
require 'securerandom'
require_dependency 'google_plus_api'
require_dependency 'carto/strong_password_validator'
# This class is quite coupled to UserCreation.
module CartoDB
class UserAccountCreator
PARAM_USERNAME = :username
PARAM_EMAIL = :email
PARAM_PASSWORD = :password
# For user creations from orgs
PARAM_SOFT_GEOCODING_LIMIT = :soft_geocoding_limit
PARAM_SOFT_HERE_ISOLINES_LIMIT = :soft_here_isolines_limit
PARAM_SOFT_TWITTER_DATASOURCE_LIMIT = :soft_twitter_datasource_limit
PARAM_QUOTA_IN_BYTES = :quota_in_bytes
def initialize(created_via)
@built = false
@organization = nil
@google_user_data = nil
@user = ::User.new
@user_params = {}
@custom_errors = {}
@created_via = created_via
end
def with_username(value)
with_param(PARAM_USERNAME, value)
end
def with_email(value)
with_param(PARAM_EMAIL, value)
end
def with_password(value)
with_param(PARAM_PASSWORD, value)
end
def with_soft_geocoding_limit(value)
with_param(PARAM_SOFT_GEOCODING_LIMIT, value)
end
def with_soft_here_isolines_limit(value)
with_param(PARAM_SOFT_HERE_ISOLINES_LIMIT, value)
end
def with_soft_twitter_datasource_limit(value)
with_param(PARAM_SOFT_TWITTER_DATASOURCE_LIMIT, value)
end
def with_quota_in_bytes(value)
with_param(PARAM_QUOTA_IN_BYTES, value)
end
def with_organization(organization)
@built = false
@organization = organization
@user = ::User.new_with_organization(organization)
self
end
def with_invitation_token(invitation_token)
@invitation_token = invitation_token
self
end
def with_email_only(email)
with_email(email)
with_username(email.split('@')[0])
with_password(SecureRandom.hex)
self
end
def user
@user
end
def with_google_token(google_access_token)
@built = false
# get_user_data can return nil
@google_user_data = GooglePlusAPI.new.get_user_data(google_access_token)
self
end
def valid?
build
if @organization
if @organization.owner.nil?
if !promote_to_organization_owner?
@custom_errors[:organization] = ["Organization owner is not set. Administrator must login first."]
end
else
validate_organization_soft_limits
end
unless @created_via == Carto::UserCreation::CREATED_VIA_LDAP
password_validator = Carto::StrongPasswordValidator.new
password_errors = password_validator.validate(@user.password)
unless password_errors.empty?
@custom_errors[:password] = [password_validator.formatted_error_message(password_errors)]
end
end
end
@user.valid? && @user.validate_credentials_not_taken_in_central && @custom_errors.empty?
end
def validation_errors
@user.errors.merge!(@custom_errors)
end
def enqueue_creation(current_controller)
user_creation = build_user_creation
user_creation.save
common_data_url = CartoDB::Visualization::CommonDataService.build_url(current_controller)
::Resque.enqueue(::Resque::UserJobs::Signup::NewUser,
user_creation.id,
common_data_url,
promote_to_organization_owner?)
{ id: user_creation.id, username: user_creation.username }
end
def build_user_creation
build
Carto::UserCreation.new_user_signup(@user, @created_via).with_invitation_token(@invitation_token)
end
def build
return if @built
if @google_user_data
@google_user_data.set_values(@user)
else
@user.email = @user_params[PARAM_EMAIL]
@user.password = @user_params[PARAM_PASSWORD]
@user.password_confirmation = @user_params[PARAM_PASSWORD]
end
@user.invitation_token = @invitation_token
@user.username = @user_params[PARAM_USERNAME] if @user_params[PARAM_USERNAME]
@user.soft_geocoding_limit = @user_params[PARAM_SOFT_GEOCODING_LIMIT] == 'true'
@user.soft_here_isolines_limit = @user_params[PARAM_SOFT_HERE_ISOLINES_LIMIT] == 'true'
@user.soft_twitter_datasource_limit = @user_params[PARAM_SOFT_TWITTER_DATASOURCE_LIMIT] == 'true'
@user.quota_in_bytes = @user_params[PARAM_QUOTA_IN_BYTES] if @user_params[PARAM_QUOTA_IN_BYTES]
@built = true
@user
end
private
# This is coupled to OrganizationUserController soft limits validations.
def validate_organization_soft_limits
owner = @organization.owner
if @user_params[PARAM_SOFT_GEOCODING_LIMIT] == 'true' && !owner.soft_geocoding_limit
@custom_errors[:soft_geocoding_limit] = ["Owner can't assign soft geocoding limit"]
end
if @user_params[PARAM_SOFT_HERE_ISOLINES_LIMIT] == 'true' && !owner.soft_here_isolines_limit
@custom_errors[:soft_here_isolines_limit] = ["Owner can't assign soft here isolines limit"]
end
if @user_params[PARAM_SOFT_TWITTER_DATASOURCE_LIMIT] == 'true' && !owner.soft_twitter_datasource_limit
@custom_errors[:soft_twitter_datasource_limit] = ["Owner can't assign soft twitter datasource limit"]
end
end
def with_param(key, value)
@built = false
@user_params[key] = value
self
end
def promote_to_organization_owner?
# INFO: Custom installs convention: org owner always has `<orgname>-admin` format
!!(@organization && !@organization.owner_id && @user_params[PARAM_USERNAME] &&
@user_params[PARAM_USERNAME] == "#{@organization.name}-admin")
end
end
end
Add check for strong passwords
# coding: UTF-8
require 'securerandom'
require_dependency 'google_plus_api'
require_dependency 'carto/strong_password_validator'
# This class is quite coupled to UserCreation.
module CartoDB
class UserAccountCreator
PARAM_USERNAME = :username
PARAM_EMAIL = :email
PARAM_PASSWORD = :password
# For user creations from orgs
PARAM_SOFT_GEOCODING_LIMIT = :soft_geocoding_limit
PARAM_SOFT_HERE_ISOLINES_LIMIT = :soft_here_isolines_limit
PARAM_SOFT_TWITTER_DATASOURCE_LIMIT = :soft_twitter_datasource_limit
PARAM_QUOTA_IN_BYTES = :quota_in_bytes
def initialize(created_via)
@built = false
@organization = nil
@google_user_data = nil
@user = ::User.new
@user_params = {}
@custom_errors = {}
@created_via = created_via
end
def with_username(value)
with_param(PARAM_USERNAME, value)
end
def with_email(value)
with_param(PARAM_EMAIL, value)
end
def with_password(value)
with_param(PARAM_PASSWORD, value)
end
def with_soft_geocoding_limit(value)
with_param(PARAM_SOFT_GEOCODING_LIMIT, value)
end
def with_soft_here_isolines_limit(value)
with_param(PARAM_SOFT_HERE_ISOLINES_LIMIT, value)
end
def with_soft_twitter_datasource_limit(value)
with_param(PARAM_SOFT_TWITTER_DATASOURCE_LIMIT, value)
end
def with_quota_in_bytes(value)
with_param(PARAM_QUOTA_IN_BYTES, value)
end
def with_organization(organization)
@built = false
@organization = organization
@user = ::User.new_with_organization(organization)
self
end
def with_invitation_token(invitation_token)
@invitation_token = invitation_token
self
end
def with_email_only(email)
with_email(email)
with_username(email.split('@')[0])
with_password(SecureRandom.hex)
self
end
def user
@user
end
def with_google_token(google_access_token)
@built = false
# get_user_data can return nil
@google_user_data = GooglePlusAPI.new.get_user_data(google_access_token)
self
end
def valid?
build
if @organization
if @organization.owner.nil?
if !promote_to_organization_owner?
@custom_errors[:organization] = ["Organization owner is not set. Administrator must login first."]
end
else
validate_organization_soft_limits
end
if @organization.strong_passwords_enabled && @created_via != Carto::UserCreation::CREATED_VIA_LDAP
password_validator = Carto::StrongPasswordValidator.new
password_errors = password_validator.validate(@user.password)
unless password_errors.empty?
@custom_errors[:password] = [password_validator.formatted_error_message(password_errors)]
end
end
end
@user.valid? && @user.validate_credentials_not_taken_in_central && @custom_errors.empty?
end
def validation_errors
@user.errors.merge!(@custom_errors)
end
def enqueue_creation(current_controller)
user_creation = build_user_creation
user_creation.save
common_data_url = CartoDB::Visualization::CommonDataService.build_url(current_controller)
::Resque.enqueue(::Resque::UserJobs::Signup::NewUser,
user_creation.id,
common_data_url,
promote_to_organization_owner?)
{ id: user_creation.id, username: user_creation.username }
end
def build_user_creation
build
Carto::UserCreation.new_user_signup(@user, @created_via).with_invitation_token(@invitation_token)
end
def build
return if @built
if @google_user_data
@google_user_data.set_values(@user)
else
@user.email = @user_params[PARAM_EMAIL]
@user.password = @user_params[PARAM_PASSWORD]
@user.password_confirmation = @user_params[PARAM_PASSWORD]
end
@user.invitation_token = @invitation_token
@user.username = @user_params[PARAM_USERNAME] if @user_params[PARAM_USERNAME]
@user.soft_geocoding_limit = @user_params[PARAM_SOFT_GEOCODING_LIMIT] == 'true'
@user.soft_here_isolines_limit = @user_params[PARAM_SOFT_HERE_ISOLINES_LIMIT] == 'true'
@user.soft_twitter_datasource_limit = @user_params[PARAM_SOFT_TWITTER_DATASOURCE_LIMIT] == 'true'
@user.quota_in_bytes = @user_params[PARAM_QUOTA_IN_BYTES] if @user_params[PARAM_QUOTA_IN_BYTES]
@built = true
@user
end
private
# This is coupled to OrganizationUserController soft limits validations.
def validate_organization_soft_limits
owner = @organization.owner
if @user_params[PARAM_SOFT_GEOCODING_LIMIT] == 'true' && !owner.soft_geocoding_limit
@custom_errors[:soft_geocoding_limit] = ["Owner can't assign soft geocoding limit"]
end
if @user_params[PARAM_SOFT_HERE_ISOLINES_LIMIT] == 'true' && !owner.soft_here_isolines_limit
@custom_errors[:soft_here_isolines_limit] = ["Owner can't assign soft here isolines limit"]
end
if @user_params[PARAM_SOFT_TWITTER_DATASOURCE_LIMIT] == 'true' && !owner.soft_twitter_datasource_limit
@custom_errors[:soft_twitter_datasource_limit] = ["Owner can't assign soft twitter datasource limit"]
end
end
def with_param(key, value)
@built = false
@user_params[key] = value
self
end
def promote_to_organization_owner?
# INFO: Custom installs convention: org owner always has `<orgname>-admin` format
!!(@organization && !@organization.owner_id && @user_params[PARAM_USERNAME] &&
@user_params[PARAM_USERNAME] == "#{@organization.name}-admin")
end
end
end
|
module ActionDispatch::Routing
class Mapper
def givey_routes
# SESSIONS
match 'sign_in' => 'givey_rails/sessions#new', as: :new_session, via: :get
match 'sign_in/facebook' => 'givey_rails/sessions#new_facebook', as: :facebook_session, via: :get
match 'sign_in/twitter' => 'givey_rails/sessions#new_twitter', as: :twitter_session, via: :get
match 'sign_in' => 'givey_rails/sessions#create', as: :session, via: :post
match 'callback_facebook' => 'givey_rails/sessions#create_facebook', as: :facebook_callback, via: :get
match 'callback_twitter' => 'givey_rails/sessions#create_twitter', as: :twitter_callback, via: :get
match 'sign_out' => 'givey_rails/sessions#destroy', as: :destroy_session, via: [:delete, :get]
# PASSWORD
resource :password, only: [:new, :create, :edit, :update], controller: "givey_rails/passwords"
# ME
match 'sign_up' => 'givey_rails/me#new', via: :get
resource :me, controller: "givey_rails/me", only: [:new, :create] do
get 'paypal_link', 'paypal_link_callback'
end
end
end
end
Remove reset password link
module ActionDispatch::Routing
class Mapper
def givey_routes
# SESSIONS
match 'sign_in' => 'givey_rails/sessions#new', as: :new_session, via: :get
match 'sign_in/facebook' => 'givey_rails/sessions#new_facebook', as: :facebook_session, via: :get
match 'sign_in/twitter' => 'givey_rails/sessions#new_twitter', as: :twitter_session, via: :get
match 'sign_in' => 'givey_rails/sessions#create', as: :session, via: :post
match 'callback_facebook' => 'givey_rails/sessions#create_facebook', as: :facebook_callback, via: :get
match 'callback_twitter' => 'givey_rails/sessions#create_twitter', as: :twitter_callback, via: :get
match 'sign_out' => 'givey_rails/sessions#destroy', as: :destroy_session, via: [:delete, :get]
# ME
match 'sign_up' => 'givey_rails/me#new', via: :get
resource :me, controller: "givey_rails/me", only: [:new, :create] do
get 'paypal_link', 'paypal_link_callback'
end
end
end
end
|
require_dependency 'renalware/drugs'
module Renalware
module Drugs
class DrugsController < BaseController
include Renalware::Concerns::Pageable
before_filter :prepare_drugs_search, only: :index
before_filter :prepare_paging, only: :index
def selected_drugs
@selected_drugs = Drug.for(params[:medication_switch])
authorize @selected_drugs
respond_to do |format|
format.html
format.json { render :json => @selected_drugs.as_json(:only => [:id, :name]) }
end
end
def new
@drug = Drug.new
authorize @drug
end
def create
@drug = Drug.new(drug_params)
authorize @drug
if @drug.save
redirect_to drugs_drugs_path,
notice: t(".success", model_name: "drug")
else
render :new
end
end
def index
@drugs = @drugs_search.result(distinct: true)
authorize @drugs
@drugs = @drugs.page(@page).per(@per_page) if request.format.html?
respond_to do |format|
format.html
format.json { render json: @drugs }
end
end
def edit
@drug = Drug.find(params[:id])
authorize @drug
end
def update
@drug = Drug.find(params[:id])
authorize @drug
if @drug.update(drug_params)
redirect_to drugs_drugs_path,
notice: t(".success", model_name: "drug")
else
render :edit
end
end
def destroy
authorize Drug.destroy(params[:id])
redirect_to drugs_drugs_path,
notice: t(".success", model_name: "drug")
end
private
def drug_params
params.require(:drugs_drug).permit(
:name, :deleted_at, drug_type_ids: []
)
end
def prepare_drugs_search
search_params = params.fetch(:q, {})
@drugs_search = Drug.search(search_params)
@drugs_search.sorts = 'name'
end
end
end
end
Added missing notices for drugs controller.
require_dependency 'renalware/drugs'
module Renalware
module Drugs
class DrugsController < BaseController
include Renalware::Concerns::Pageable
before_filter :prepare_drugs_search, only: :index
before_filter :prepare_paging, only: :index
def selected_drugs
@selected_drugs = Drug.for(params[:medication_switch])
authorize @selected_drugs
respond_to do |format|
format.html
format.json { render :json => @selected_drugs.as_json(:only => [:id, :name]) }
end
end
def new
@drug = Drug.new
authorize @drug
end
def create
@drug = Drug.new(drug_params)
authorize @drug
if @drug.save
redirect_to drugs_drugs_path,
notice: t(".success", model_name: "drug")
else
flash[:error] = t(".failed", model_name: "drug")
render :new
end
end
def index
@drugs = @drugs_search.result(distinct: true)
authorize @drugs
@drugs = @drugs.page(@page).per(@per_page) if request.format.html?
respond_to do |format|
format.html
format.json { render json: @drugs }
end
end
def edit
@drug = Drug.find(params[:id])
authorize @drug
end
def update
@drug = Drug.find(params[:id])
authorize @drug
if @drug.update(drug_params)
redirect_to drugs_drugs_path,
notice: t(".success", model_name: "drug")
else
flash[:error] = t(".failed", model_name: "drug")
render :edit
end
end
def destroy
authorize Drug.destroy(params[:id])
redirect_to drugs_drugs_path,
notice: t(".success", model_name: "drug")
end
private
def drug_params
params.require(:drugs_drug).permit(
:name, :deleted_at, drug_type_ids: []
)
end
def prepare_drugs_search
search_params = params.fetch(:q, {})
@drugs_search = Drug.search(search_params)
@drugs_search.sorts = 'name'
end
end
end
end
|
class UserAgentValidator
Browser = Struct.new(:browser, :version)
SupportedBrowsers = [
Browser.new('Safari', '10'),
Browser.new('Firefox', '60'),
Browser.new('Internet Explorer', '11'),
Browser.new('Chrome', '66'),
Browser.new('Opera', '51'),
Browser.new('Edge', '15')
]
SupportedSnippetCollectors = [
Regexp.new("facebookexternalhit"),
Regexp.new("https://developers.google.com/\\+/web/snippet/")
]
def self.user_agent_supported? user_agent
instance = self.new user_agent
instance.user_agent_supported?
end
def initialize user_agent
@user_agent = user_agent
end
def user_agent_supported?
is_supported_browser || user_agent.bot? || is_snippet_collector
end
def is_snippet_collector
@user_agent.match(Regexp.union(SupportedSnippetCollectors))
end
def method_missing method
user_agent.send(method) rescue super(method)
end
private
def is_supported_browser
SupportedBrowsers.detect { |browser| user_agent >= browser }
end
def user_agent
UserAgent.parse(@user_agent)
end
end
lower browser support versions
class UserAgentValidator
Browser = Struct.new(:browser, :version)
SupportedBrowsers = [
Browser.new('Safari', '10'),
Browser.new('Firefox', '55'),
Browser.new('Internet Explorer', '11'),
Browser.new('Chrome', '55'),
Browser.new('Opera', '51'),
Browser.new('Edge', '15')
]
SupportedSnippetCollectors = [
Regexp.new("facebookexternalhit"),
Regexp.new("https://developers.google.com/\\+/web/snippet/")
]
def self.user_agent_supported? user_agent
instance = self.new user_agent
instance.user_agent_supported?
end
def initialize user_agent
@user_agent = user_agent
end
def user_agent_supported?
is_supported_browser || user_agent.bot? || is_snippet_collector
end
def is_snippet_collector
@user_agent.match(Regexp.union(SupportedSnippetCollectors))
end
def method_missing method
user_agent.send(method) rescue super(method)
end
private
def is_supported_browser
SupportedBrowsers.detect { |browser| user_agent >= browser }
end
def user_agent
UserAgent.parse(@user_agent)
end
end
|
require 'google/apis/gmail_v1'
require 'googleauth'
require 'googleauth/stores/file_token_store'
require 'launchy'
require 'forwardable'
# Retry if rate-limit.
Google::Apis::RequestOptions.default.retries = 5
module Glima
class Authorizer
def initialize(client_id, client_secret, scope, token_store_path)
@authorizer = Google::Auth::UserAuthorizer.new(
Google::Auth::ClientId.new(client_id, client_secret),
scope,
Google::Auth::Stores::FileTokenStore.new(file: token_store_path)
)
end
def credentials(user_id = "default")
@authorizer.get_credentials(user_id)
end
def auth_interactively(user_id = "default", shell = Thor.new.shell)
oob_uri = "urn:ietf:wg:oauth:2.0:oob"
url = @authorizer.get_authorization_url(base_url: oob_uri)
begin
Launchy.open(url)
rescue
puts "Open URL in your browser:\n #{url}"
end
code = shell.ask "Enter the resulting code:"
@authorizer.get_and_store_credentials_from_code(
user_id: user_id,
code: code,
base_url: oob_uri
)
end
end # Authorizer
class GmailClient
extend Forwardable
def_delegators :@client,
# Users.histoy
:list_user_histories,
# Users.labels
:list_user_labels,
:get_user_label,
:patch_user_label,
# Users.messages
:get_user_message,
:insert_user_message,
:list_user_messages,
:modify_message,
:trash_user_message,
# Users.threads
:get_user_thread,
# Users getProfile
:get_user_profile,
# Non-resources
:batch
# Find nearby messages from pivot_message
# `Nearby' message:
# + has same From: address
# + has near Date: field (+-1day)
# with the pivot_message.
def nearby_mails(pivot_mail)
from = "from:#{pivot_mail.from}"
date1 = (pivot_mail.date.to_date - 1).strftime("after:%Y/%m/%d")
date2 = (pivot_mail.date.to_date + 1).strftime("before:%Y/%m/%d")
query = "#{from} #{date1} #{date2}"
scan_batch("+all", query) do |message|
next if pivot_mail.id == message.id
yield Glima::Resource::Mail.new(message)
end
end
# * message types by format:
# | field/fromat: | list | minimal | raw | value type |
# |-----------------+------+---------+-----+-----------------|
# | id | ○ | ○ | ○ | string |
# | threadId | ○ | ○ | ○ | string |
# | labelIds | | ○ | ○ | string[] |
# | snippet | | ○ | ○ | string |
# | historyId | | ○ | ○ | unsinged long |
# | internalDate | | ○ | ○ | long |
# | sizeEstimate | | ○ | ○ | int |
# |-----------------+------+---------+-----+-----------------|
# | payload | | | | object |
# | payload.headers | | | | key/value pairs |
# | raw | | | ○ | bytes |
#
def get_user_smart_message(id)
fmt = if @datastore.exist?(id) then "minimal" else "raw" end
mail = nil
@client.get_user_message('me', id, format: fmt) do |m, err|
mail = Glima::Resource::Mail.new(@datastore.update(m)) if m
yield(mail, err)
end
return mail
end
def online?
Socket.getifaddrs.select {|i|
i.addr.ipv4? and ! i.addr.ipv4_loopback?
}.map(&:addr).map(&:ip_address).length > 0
end
def initialize(config, datastore)
authorizer = Authorizer.new(config.client_id,
config.client_secret,
Google::Apis::GmailV1::AUTH_SCOPE,
config.token_store)
credentials = authorizer.credentials(config.default_user) ||
authorizer.auth_interactively(config.default_user)
@datastore = datastore
@client = Google::Apis::GmailV1::GmailService.new
@client.client_options.application_name = 'glima'
@client.authorization = credentials
@client.authorization.username = config.default_user # for IMAP
return @client
end
# label == nil means "[Gmail]/All Mail"
def wait(label = nil)
@imap ||= Glima::ImapWatch.new("imap.gmail.com", @client.authorization)
@imap.wait(label)
end
def scan_batch(folder, search_or_range = nil, &block)
qp = Glima::QueryParameter.new(folder, search_or_range)
list_user_messages('me', qp.to_hash) do |res, error|
fail "#{error}" if error
ids = (res.messages || []).map(&:id)
unless ids.empty?
batch_on_messages(ids) do |message|
yield message if block
end
# context.save_page_token(res.next_page_token)
end
end
rescue Glima::QueryParameter::FormatError => e
STDERR.print "Error: " + e.message + "\n"
end
private
def batch_on_messages(ids, &block)
@client.batch do |batch_client|
ids.each do |id|
fmt = if @datastore.exist?(id) then "minimal" else "raw" end
batch_client.get_user_message('me', id, format: fmt) do |m, err|
fail "#{err}" if err
message = @datastore.update(m)
yield message
end
end
end
end
end # class GmailClient
end # module Glima
Ignore trash on searching nearby mails
require 'google/apis/gmail_v1'
require 'googleauth'
require 'googleauth/stores/file_token_store'
require 'launchy'
require 'forwardable'
# Retry if rate-limit.
Google::Apis::RequestOptions.default.retries = 5
module Glima
class Authorizer
def initialize(client_id, client_secret, scope, token_store_path)
@authorizer = Google::Auth::UserAuthorizer.new(
Google::Auth::ClientId.new(client_id, client_secret),
scope,
Google::Auth::Stores::FileTokenStore.new(file: token_store_path)
)
end
def credentials(user_id = "default")
@authorizer.get_credentials(user_id)
end
def auth_interactively(user_id = "default", shell = Thor.new.shell)
oob_uri = "urn:ietf:wg:oauth:2.0:oob"
url = @authorizer.get_authorization_url(base_url: oob_uri)
begin
Launchy.open(url)
rescue
puts "Open URL in your browser:\n #{url}"
end
code = shell.ask "Enter the resulting code:"
@authorizer.get_and_store_credentials_from_code(
user_id: user_id,
code: code,
base_url: oob_uri
)
end
end # Authorizer
class GmailClient
extend Forwardable
def_delegators :@client,
# Users.histoy
:list_user_histories,
# Users.labels
:list_user_labels,
:get_user_label,
:patch_user_label,
# Users.messages
:get_user_message,
:insert_user_message,
:list_user_messages,
:modify_message,
:trash_user_message,
# Users.threads
:get_user_thread,
# Users getProfile
:get_user_profile,
# Non-resources
:batch
# Find nearby messages from pivot_message
# `Nearby' message:
# + has same From: address
# + has near Date: field (+-1day)
# with the pivot_message.
def nearby_mails(pivot_mail)
from = "from:#{pivot_mail.from}"
date1 = (pivot_mail.date.to_date - 1).strftime("after:%Y/%m/%d")
date2 = (pivot_mail.date.to_date + 1).strftime("before:%Y/%m/%d")
query = "#{from} -in:trash #{date1} #{date2}"
scan_batch("+all", query) do |message|
next if pivot_mail.id == message.id
yield Glima::Resource::Mail.new(message)
end
end
# * message types by format:
# | field/fromat: | list | minimal | raw | value type |
# |-----------------+------+---------+-----+-----------------|
# | id | ○ | ○ | ○ | string |
# | threadId | ○ | ○ | ○ | string |
# | labelIds | | ○ | ○ | string[] |
# | snippet | | ○ | ○ | string |
# | historyId | | ○ | ○ | unsinged long |
# | internalDate | | ○ | ○ | long |
# | sizeEstimate | | ○ | ○ | int |
# |-----------------+------+---------+-----+-----------------|
# | payload | | | | object |
# | payload.headers | | | | key/value pairs |
# | raw | | | ○ | bytes |
#
def get_user_smart_message(id)
fmt = if @datastore.exist?(id) then "minimal" else "raw" end
mail = nil
@client.get_user_message('me', id, format: fmt) do |m, err|
mail = Glima::Resource::Mail.new(@datastore.update(m)) if m
yield(mail, err)
end
return mail
end
def online?
Socket.getifaddrs.select {|i|
i.addr.ipv4? and ! i.addr.ipv4_loopback?
}.map(&:addr).map(&:ip_address).length > 0
end
def initialize(config, datastore)
authorizer = Authorizer.new(config.client_id,
config.client_secret,
Google::Apis::GmailV1::AUTH_SCOPE,
config.token_store)
credentials = authorizer.credentials(config.default_user) ||
authorizer.auth_interactively(config.default_user)
@datastore = datastore
@client = Google::Apis::GmailV1::GmailService.new
@client.client_options.application_name = 'glima'
@client.authorization = credentials
@client.authorization.username = config.default_user # for IMAP
return @client
end
# label == nil means "[Gmail]/All Mail"
def wait(label = nil)
@imap ||= Glima::ImapWatch.new("imap.gmail.com", @client.authorization)
@imap.wait(label)
end
def scan_batch(folder, search_or_range = nil, &block)
qp = Glima::QueryParameter.new(folder, search_or_range)
list_user_messages('me', qp.to_hash) do |res, error|
fail "#{error}" if error
ids = (res.messages || []).map(&:id)
unless ids.empty?
batch_on_messages(ids) do |message|
yield message if block
end
# context.save_page_token(res.next_page_token)
end
end
rescue Glima::QueryParameter::FormatError => e
STDERR.print "Error: " + e.message + "\n"
end
private
def batch_on_messages(ids, &block)
@client.batch do |batch_client|
ids.each do |id|
fmt = if @datastore.exist?(id) then "minimal" else "raw" end
batch_client.get_user_message('me', id, format: fmt) do |m, err|
fail "#{err}" if err
message = @datastore.update(m)
yield message
end
end
end
end
end # class GmailClient
end # module Glima
|
module ReportController::Reports::Editor
extend ActiveSupport::Concern
CHARGEBACK_ALLOWED_FIELD_SUFFIXES = %w(_cost -owner_name _metric -provider_name -provider_uid -project_uid -archived -chargeback_rates).freeze
def miq_report_new
assert_privileges("miq_report_new")
@_params.delete :id # incase add button was pressed from report show screen.
miq_report_edit
end
def miq_report_copy
assert_privileges("miq_report_copy")
@report = nil # Clear any saved report object
if params[:tab] # Came in to change the tab
check_tabs
build_edit_screen
else
@sb[:miq_tab] = "edit_1"
@rpt = MiqReport.find(params[:id])
@rpt.id = nil # Treat as a new report
set_form_vars
build_edit_screen
end
@ina_form = @lock_tree = true
replace_right_cell
end
def miq_report_edit
assert_privileges("miq_report_edit")
case params[:button]
when "cancel"
@edit[:rpt_id] ?
add_flash(_("Edit of %{model} \"%{name}\" was cancelled by the user") % {:model => ui_lookup(:model => "MiqReport"), :name => @edit[:rpt_title]}) :
add_flash(_("Add of new %{model} was cancelled by the user") % {:model => ui_lookup(:model => "MiqReport")})
@edit = session[:edit] = nil # clean out the saved info
replace_right_cell
when "add", "save"
id = params[:id] ? params[:id] : "new"
return unless load_edit("report_edit__#{id}", "replace_cell__explorer")
get_form_vars
@changed = (@edit[:new] != @edit[:current])
@rpt = @edit[:rpt_id] ? find_by_id_filtered(MiqReport, params[:id]) :
MiqReport.new
set_record_vars(@rpt)
unless valid_report?(@rpt)
build_edit_screen
replace_right_cell
return
end
if @edit[:new][:graph_type] && (@edit[:new][:sortby1].blank? || @edit[:new][:sortby1] == NOTHING_STRING)
add_flash(_("Report can not be saved unless sort field has been configured for Charts"), :error)
@sb[:miq_tab] = "edit_4"
build_edit_screen
replace_right_cell
return
end
if @rpt.save
# update report name in menu if name is edited
menu_repname_update(@edit[:current][:name], @edit[:new][:name]) if @edit[:current][:name] != @edit[:new][:name]
AuditEvent.success(build_saved_audit(@rpt, @edit))
@edit[:rpt_id] ?
add_flash(_("%{model} \"%{name}\" was saved") % {:model => ui_lookup(:model => "MiqReport"), :name => @rpt.name}) :
add_flash(_("%{model} \"%{name}\" was added") % {:model => ui_lookup(:model => "MiqReport"), :name => @rpt.name})
# only do this for new reports
unless @edit[:rpt_id]
self.x_node = "xx-#{@sb[:rpt_menu].length}_xx-#{@sb[:rpt_menu].length}-0"
setnode_for_customreport
end
@edit = session[:edit] = nil # clean out the saved info
if role_allows?(:feature => "miq_report_widget_editor")
# all widgets for this report
get_all_widgets("report", from_cid(x_node.split('_').last))
end
replace_right_cell(:replace_trees => [:reports])
else
rpt.errors.each do |field, msg|
add_flash("#{field.to_s.capitalize} #{msg}", :error)
end
@in_a_form = true
session[:changed] = @changed ? true : false
@changed = true
replace_right_cell
end
else
add_flash(_("All changes have been reset"), :warning) if params[:button] == "reset"
@in_a_form = true
@report = nil # Clear any saved report object
if params[:tab] # Came in to change the tab
@rpt = @edit[:rpt_id] ? MiqReport.find(@edit[:rpt_id]) :
MiqReport.new
check_tabs
build_edit_screen
else
@sb[:miq_tab] = "edit_1"
@rpt = params[:id] && params[:id] != "new" ? MiqReport.find(params[:id]) :
MiqReport.new
if @rpt.rpt_type == "Default"
flash = "Default reports can not be edited"
redirect_to :action => "show", :id => @rpt.id, :flash_msg => flash, :flash_error => true
return
end
set_form_vars
build_edit_screen
end
@changed = (@edit[:new] != @edit[:current])
session[:changed] = @changed
@lock_tree = true
replace_right_cell
end
end
# AJAX driven routine to check for changes in ANY field on the form
def form_field_changed
return unless load_edit("report_edit__#{params[:id]}", "replace_cell__explorer")
get_form_vars
build_edit_screen
@changed = (@edit[:new] != @edit[:current])
render :update do |page|
page << javascript_prologue
page.replace("flash_msg_div", :partial => "layouts/flash_msg") unless @refresh_div && @refresh_div != "column_lists"
page.replace(@refresh_div, :partial => @refresh_partial) if @refresh_div
page.replace("chart_sample_div", :partial => "form_chart_sample") if @refresh_div == "chart_div"
page.replace("tl_sample_div", :partial => "form_tl_sample") if @refresh_div == "tl_settings_div"
page.replace_html("calc_#{@calc_div}_div", :text => @calc_val) if @calc_div
page << "miqSparkle(false);"
page << javascript_for_miq_button_visibility_changed(@changed)
if @tl_changed # Reload the screen if the timeline data was changed
page.replace_html("tl_sample_div", :partial => "form_tl_sample") if @tl_field != NOTHING_STRING
elsif @formatting_changed # Reload the screen if the formatting pulldowns need to be reset
page.replace_html("formatting_div", :partial => "form_formatting")
elsif @tl_repaint
# page << "tl.paint();"
page << javascript_hide("notification")
end
end
end
def filter_change
return unless load_edit("report_edit__#{params[:id]}", "replace_cell__explorer")
@expkey = $&.to_sym if params[:button].to_s =~ /^(record|display)_filter$/
render :update do |page|
page << javascript_prologue
page.replace("filter_div", :partial => "form_filter")
page << "miqSparkle(false);"
end
end
private
def build_edit_screen
build_tabs
get_time_profiles # Get time profiles list (global and user specific)
case @sb[:miq_tab].split("_")[1]
when "1" # Select columns
@edit[:models] ||= reportable_models
# Add the blank choice if no table chosen yet
# @edit[:models].insert(0,["<Choose>", "<Choose>"]) if @edit[:new][:model] == nil && @edit[:models][0][0] != "<Choose>"
if @edit[:new][:model].nil?
if @edit[:models][0][0] != "<Choose>"
@edit[:models].insert(0, ["<Choose>", "<Choose>"])
end
else
if @edit[:models][0][0] == "<Choose>"
@edit[:models].delete_at(0)
end
end
when "8" # Consolidate
# Build group chooser arrays
@edit[:new][:pivot].options = @edit[:new][:fields].dup
@pivot = @edit[:new][:pivot]
when "2" # Formatting
# @edit[:calc_xml] = build_calc_combo_xml # Get the combobox XML for any numeric fields
when "3" # Filter
# Build record filter expression
if @edit[:miq_exp] || # Is this stored as an MiqExp object
["new", "copy", "create"].include?(request.parameters["action"]) # or it's a new condition
@edit[:record_filter][:exp_idx] ||= 0 # Start at first exp
new_record_filter = @edit[:new][:record_filter]
@edit[:record_filter][:expression] = copy_hash(new_record_filter) unless new_record_filter.blank?
@expkey = :record_filter
# Initialize the exp array
exp_array(:init, @edit[:record_filter][:expression]) if @edit[:record_filter][:exp_array].nil?
@edit[:record_filter][:exp_table] = exp_build_table(@edit[:record_filter][:expression])
exp_get_prefill_types # Build prefill lists
@edit[:record_filter][:exp_model] = @edit[:new][:model] # Set the model for the expression editor
end
# Build display filter expression
@edit[:display_filter][:exp_idx] ||= 0 # Start at first exp
new_display_filter = @edit[:new][:display_filter]
@edit[:display_filter][:expression] = copy_hash(new_display_filter) unless new_display_filter.blank?
@expkey = :display_filter
# Initialize the exp array
exp_array(:init, @edit[:display_filter][:expression]) if @edit[:display_filter][:exp_array].nil?
@edit[:display_filter][:exp_table] = exp_build_table(@edit[:display_filter][:expression])
cols = @edit[:new][:field_order]
@edit[:display_filter][:exp_available_fields] = MiqReport.display_filter_details(cols, :field)
cols = @edit[:new][:fields]
@edit[:display_filter][:exp_available_tags] = MiqReport.display_filter_details(cols, :tag)
@edit[:display_filter][:exp_model] = "_display_filter_" # Set model for display filter
@expkey = :record_filter # Start with Record Filter showing
if @edit[:new][:perf_interval] && !@edit[:new][:time_profile]
set_time_profile_vars(selected_time_profile_for_pull_down, @edit[:new])
end
when "4" # Summarize
# Build sort chooser arrays(@edit[:new][:fields], :field)
@sortby1 = @edit[:new][:sortby1]
@sortby2 = @edit[:new][:sortby2]
@sort1 = @edit[:new][:field_order].dup
@sort2 = @sort1.dup.delete_if { |s| s[1] == @sortby1.split("__").first }
when "5" # Charts
options = chart_fields_options
if options.empty?
@edit[:new][:chart_column] = nil
else
options[0][1] unless options.detect { |_, v| v == @edit[:new][:chart_column] }
end
when "6" # Timeline
@tl_fields = []
@edit[:new][:fields].each do |field|
if MiqReport.get_col_type(field[1]) == :datetime
@tl_fields.push(field)
end
end
@tl_field = @edit[:new][:tl_field]
if @edit[:new][:tl_position] == "Last"
@position_time = format_timezone(Time.now, "UTC", nil)
else
@position_time = format_timezone(Time.now - 1.year, "UTC", nil)
end
@timeline = true if @tl_field != NOTHING_STRING
@tl_json = sample_timeline
when "7" # Preview
# generate preview report when
end
@in_a_form = true
if ["new", "copy", "create"].include?(request.parameters["action"])
# drop_breadcrumb( {:name=>"Add Report", :url=>"/report/new"} )
@gtl_url = "/new"
else
# drop_breadcrumb( {:name=>"Edit Report", :url=>"/report/edit"} )
@gtl_url = "/edit"
end
end
def reportable_models
MiqReport.reportable_models.collect do |m|
[Dictionary.gettext(m, :type => :model, :notfound => :titleize, :plural => true), m]
end
end
def ensure_perf_interval_defaults
case @edit[:new][:perf_interval]
when "hourly"
@edit[:new][:perf_end] ||= "0"
@edit[:new][:perf_start] ||= 1.day.to_s
when "daily"
@edit[:new][:perf_end] ||= "0"
@edit[:new][:perf_start] ||= 2.days.to_s
end
end
# Reset report column fields if model or interval was changed
def reset_report_col_fields
@edit[:new][:fields] = [] # Clear fields array
@edit[:new][:headers] = {} # Clear headers hash
@edit[:new][:pivot] = ReportController::PivotOptions.new
@edit[:new][:sortby1] = NOTHING_STRING # Clear sort fields
@edit[:new][:sortby2] = NOTHING_STRING
@edit[:new][:filter_operator] = nil
@edit[:new][:filter_string] = nil
@edit[:new][:categories] = []
@edit[:new][:graph_type] = nil # Clear graph field
@edit[:new][:chart_mode] = nil
@edit[:new][:chart_column] = nil
@edit[:new][:perf_trend_col] = nil
@edit[:new][:perf_trend_db] = nil
@edit[:new][:perf_trend_pct1] = nil
@edit[:new][:perf_trend_pct2] = nil
@edit[:new][:perf_trend_pct3] = nil
@edit[:new][:perf_limit_col] = nil
@edit[:new][:perf_limit_val] = nil
@edit[:new][:record_filter] = nil # Clear record filter
@edit[:new][:display_filter] = nil # Clear display filter
@edit[:miq_exp] = true
end
def build_tabs
req = "edit"
if @edit[:new][:model] == TREND_MODEL
@tabs = [
["#{req}_1", _("Columns")],
["#{req}_3", _("Filter")],
["#{req}_7", _("Preview")]
]
elsif Chargeback.db_is_chargeback?(@edit[:new][:model].to_s)
@tabs = [
["#{req}_1", _("Columns")],
["#{req}_2", _("Formatting")],
["#{req}_3", _("Filter")],
["#{req}_7", _("Preview")]
]
else
@tabs = [
["#{req}_1", _("Columns")],
["#{req}_8", _("Consolidation")],
["#{req}_2", _("Formatting")],
["#{req}_9", _("Styling")],
["#{req}_3", _("Filter")],
["#{req}_4", _("Summary")],
["#{req}_5", _("Charts")],
["#{req}_6", _("Timeline")],
["#{req}_7", _("Preview")]
]
end
tab = @sb[:miq_tab].split("_")[1] # Get the tab number of the active tab
@active_tab = "#{req}_#{tab}"
end
# Get variables from edit form
def get_form_vars
@assigned_filters = []
gfv_report_fields # Global report fields
gfv_move_cols_buttons # Move cols buttons
gfv_model # Model changes
gfv_trend # Trend fields
gfv_performance # Performance fields
gfv_chargeback # Chargeback fields
gfv_charts # Charting fields
gfv_pivots # Consolidation fields
gfv_sort # Summary fields
gfv_timeline # Timeline fields
# Check for key prefixes (params starting with certain keys)
params.each do |key, value|
# See if any headers were sent in
@edit[:new][:headers][key.split("_")[1..-1].join("_")] = value if key.split("_").first == "hdr"
# See if any formats were sent in
if key.split("_").first == "fmt"
key2 = key.gsub("___", ".") # Put period sub table separator back into the key
@edit[:new][:col_formats][key2.split("_")[1..-1].join("_")] = value.blank? ? nil : value.to_sym
@formatting_changed = value.blank?
end
# See if any group calculation checkboxes were sent in
gfv_key_group_calculations(key, value) if key.split("_").first == "calc"
# See if any pivot calculation checkboxes were sent in
gfv_key_pivot_calculations(key, value) if key.split("_").first == "pivotcalc"
# Check for style fields
prefix = key.split("_").first
gfv_key_style(key, value) if prefix && prefix.starts_with?("style")
end
end
# Handle params starting with "calc"
def gfv_key_group_calculations(key, value)
field = @edit[:new][:field_order][key.split("_").last.to_i].last # Get the field name
@edit[:new][:col_options][field_to_col(field)] = {
:grouping => value.split(",").sort.map(&:to_sym).reject { |a| a == :null }
}
end
# Handle params starting with "pivotcalc"
def gfv_key_pivot_calculations(key, value)
field = @edit[:new][:fields][key.split("_").last.to_i].last # Get the field name
@edit[:pivot_cols][field] = []
value.split(',').sort.map(&:to_sym).each do |agg|
@edit[:pivot_cols][field] << agg
# Create new header from original header + aggregate function
@edit[:new][:headers][field + "__#{agg}"] = @edit[:new][:headers][field] + " (#{agg.to_s.titleize})"
end
build_field_order
end
# Handle params starting with "style"
def gfv_key_style(key, value)
parm, f_idx, s_idx = key.split("_") # Get the parm type, field index, and style index
f_idx = f_idx.to_i
s_idx = s_idx.to_i
f = @edit[:new][:field_order][f_idx] # Get the field element
field_sub_type = MiqExpression.get_col_info(f.last)[:format_sub_type]
field_data_type = MiqExpression.get_col_info(f.last)[:data_type]
field_name = f.last.include?(".") ? f.last.split(".").last.tr("-", ".") : f.last.split("-").last
case parm
when "style" # New CSS class chosen
if value.blank?
@edit[:new][:col_options][field_name][:style].delete_at(s_idx)
@edit[:new][:col_options][field_name].delete(:style) if @edit[:new][:col_options][field_name][:style].empty?
@edit[:new][:col_options].delete(field_name) if @edit[:new][:col_options][field_name].empty?
else
@edit[:new][:col_options][field_name] ||= {}
@edit[:new][:col_options][field_name][:style] ||= []
@edit[:new][:col_options][field_name][:style][s_idx] ||= {}
@edit[:new][:col_options][field_name][:style][s_idx][:class] = value.to_sym
ovs = case field_data_type
when :boolean
["DEFAULT", "true"]
when :integer, :float
["DEFAULT", "", MiqExpression::FORMAT_SUB_TYPES.fetch_path(field_sub_type, :units) ? MiqExpression::FORMAT_SUB_TYPES.fetch_path(field_sub_type, :units).first : nil]
else
["DEFAULT", ""]
end
op ||= ovs[0]
val ||= ovs[1]
suffix ||= ovs[2]
@edit[:new][:col_options][field_name][:style][s_idx][:operator] ||= op
@edit[:new][:col_options][field_name][:style][s_idx][:value] ||= val
@edit[:new][:col_options][field_name][:style][s_idx][:value_suffix] ||= suffix if suffix
end
@refresh_div = "styling_div"
@refresh_partial = "form_styling"
when "styleop" # New operator chosen
@edit[:new][:col_options][field_name][:style][s_idx][:operator] = value
if value == "DEFAULT"
@edit[:new][:col_options][field_name][:style][s_idx].delete(:value) # Remove value key
# Remove all style array elements after this one
((s_idx + 1)...@edit[:new][:col_options][field_name][:style].length).each_with_index do |_i, i_idx|
@edit[:new][:col_options][field_name][:style].delete_at(i_idx)
end
elsif value.include?("NIL") || value.include?("EMPTY")
@edit[:new][:col_options][field_name][:style][s_idx].delete(:value) # Remove value key
elsif [:datetime, :date].include?(field_data_type)
@edit[:new][:col_options][field_name][:style][s_idx][:value] = EXP_TODAY # Set default date value
elsif [:boolean].include?(field_data_type)
@edit[:new][:col_options][field_name][:style][s_idx][:value] = true # Set default boolean value
else
@edit[:new][:col_options][field_name][:style][s_idx][:value] = "" # Set default value
end
@refresh_div = "styling_div"
@refresh_partial = "form_styling"
when "styleval" # New value chosen
@edit[:new][:col_options][field_name][:style][s_idx][:value] = value
when "stylesuffix" # New suffix chosen
@edit[:new][:col_options][field_name][:style][s_idx][:value_suffix] = value.to_sym
@refresh_div = "styling_div"
@refresh_partial = "form_styling"
end
end
def gfv_report_fields
@edit[:new][:pdf_page_size] = params[:pdf_page_size] if params[:pdf_page_size]
if params[:chosen_queue_timeout]
@edit[:new][:queue_timeout] = params[:chosen_queue_timeout].blank? ? nil : params[:chosen_queue_timeout].to_i
end
@edit[:new][:row_limit] = params[:row_limit].blank? ? "" : params[:row_limit] if params[:row_limit]
@edit[:new][:name] = params[:name] if params[:name]
@edit[:new][:title] = params[:title] if params[:title]
end
def gfv_move_cols_buttons
case params[:button]
when 'right' then move_cols_right
when 'left' then move_cols_left
when 'up' then move_cols_up
when 'down' then move_cols_down
when 'top' then move_cols_top
when 'bottom' then move_cols_bottom
end && build_field_order
end
def gfv_model
if params[:chosen_model] && # Check for db table changed
params[:chosen_model] != @edit[:new][:model]
@edit[:new][:model] = params[:chosen_model]
@edit[:new][:perf_interval] = nil # Clear performance interval setting
@edit[:new][:tz] = nil
if [:performance, :trend].include?(model_report_type(@edit[:new][:model]))
@edit[:new][:perf_interval] ||= "daily" # Default to Daily
@edit[:new][:perf_avgs] ||= "time_interval"
@edit[:new][:tz] = session[:user_tz]
ensure_perf_interval_defaults
end
if Chargeback.db_is_chargeback?(@edit[:new][:model])
@edit[:new][:cb_model] = Chargeback.report_cb_model(@edit[:new][:model])
@edit[:new][:cb_interval] ||= "daily" # Default to Daily
@edit[:new][:cb_interval_size] ||= 1
@edit[:new][:cb_end_interval_offset] ||= 1
@edit[:new][:cb_groupby] ||= "date" # Default to Date grouping
@edit[:new][:tz] = session[:user_tz]
end
reset_report_col_fields
build_edit_screen
@refresh_div = "form_div"
@refresh_partial = "form"
end
end
def gfv_trend
if params[:chosen_trend_col]
@edit[:new][:perf_interval] ||= "daily" # Default to Daily
@edit[:new][:perf_target_pct1] ||= 100 # Default to 100%
if params[:chosen_trend_col] == "<Choose>"
@edit[:new][:perf_trend_db] = nil
@edit[:new][:perf_trend_col] = nil
else
@edit[:new][:perf_trend_db], @edit[:new][:perf_trend_col] = params[:chosen_trend_col].split("-")
if MiqExpression.reporting_available_fields(@edit[:new][:model], @edit[:new][:perf_interval]).find { |af| af.last == params[:chosen_trend_col] }.first.include?("(%)")
@edit[:new][:perf_limit_val] = 100
@edit[:new][:perf_limit_col] = nil
@edit[:percent_col] = true
else
@edit[:percent_col] = false
@edit[:new][:perf_limit_val] = nil
end
ensure_perf_interval_defaults
@edit[:limit_cols] = VimPerformanceTrend.trend_limit_cols(@edit[:new][:perf_trend_db], @edit[:new][:perf_trend_col], @edit[:new][:perf_interval])
end
@refresh_div = "columns_div"
@refresh_partial = "form_columns"
# @edit[:limit_cols] = VimPerformanceTrend.trend_limit_cols(@edit[:new][:perf_trend_db], @edit[:new][:perf_trend_col], @edit[:new][:perf_interval])
elsif params[:chosen_limit_col]
if params[:chosen_limit_col] == "<None>"
@edit[:new][:perf_limit_col] = nil
else
@edit[:new][:perf_limit_col] = params[:chosen_limit_col]
@edit[:new][:perf_limit_val] = nil
end
@refresh_div = "columns_div"
@refresh_partial = "form_columns"
elsif params[:chosen_limit_val]
@edit[:new][:perf_limit_val] = params[:chosen_limit_val]
elsif params[:percent1]
@edit[:new][:perf_target_pct1] = params[:percent1].to_i
elsif params[:percent2]
@edit[:new][:perf_target_pct2] = params[:percent2] == "<None>" ? nil : params[:percent2].to_i
elsif params[:percent3]
@edit[:new][:perf_target_pct3] = params[:percent3] == "<None>" ? nil : params[:percent3].to_i
end
end
def gfv_performance
if params[:chosen_interval]
@edit[:new][:perf_interval] = params[:chosen_interval]
@edit[:new][:perf_start] = nil # Clear start/end offsets
@edit[:new][:perf_end] = nil
ensure_perf_interval_defaults
reset_report_col_fields
@refresh_div = "form_div"
@refresh_partial = "form"
elsif params[:perf_avgs]
@edit[:new][:perf_avgs] = params[:perf_avgs]
elsif params[:chosen_start]
@edit[:new][:perf_start] = params[:chosen_start]
elsif params[:chosen_end]
@edit[:new][:perf_end] = params[:chosen_end]
elsif params[:chosen_tz]
@edit[:new][:tz] = params[:chosen_tz]
elsif params.key?(:chosen_time_profile)
@edit[:new][:time_profile] = params[:chosen_time_profile].blank? ? nil : params[:chosen_time_profile].to_i
@refresh_div = "filter_div"
@refresh_partial = "form_filter"
end
end
def gfv_chargeback
# Chargeback options
if params.key?(:cb_show_typ)
@edit[:new][:cb_show_typ] = params[:cb_show_typ].blank? ? nil : params[:cb_show_typ]
@refresh_div = "filter_div"
@refresh_partial = "form_filter"
elsif params.key?(:cb_tag_cat)
@refresh_div = "filter_div"
@refresh_partial = "form_filter"
if params[:cb_tag_cat].blank?
@edit[:new][:cb_tag_cat] = nil
@edit[:new][:cb_tag_value] = nil
else
@edit[:new][:cb_tag_cat] = params[:cb_tag_cat]
@edit[:cb_tags] = {}
Classification.find_by_name(params[:cb_tag_cat]).entries.each { |e| @edit[:cb_tags][e.name] = e.description }
end
elsif params.key?(:cb_owner_id)
@edit[:new][:cb_owner_id] = params[:cb_owner_id].blank? ? nil : params[:cb_owner_id]
elsif params.key?(:cb_tenant_id)
@edit[:new][:cb_tenant_id] = params[:cb_tenant_id].blank? ? nil : params[:cb_tenant_id].to_i
elsif params.key?(:cb_tag_value)
@edit[:new][:cb_tag_value] = params[:cb_tag_value].blank? ? nil : params[:cb_tag_value]
elsif params.key?(:cb_entity_id)
@edit[:new][:cb_entity_id] = params[:cb_entity_id].blank? ? nil : params[:cb_entity_id]
elsif params.key?(:cb_provider_id)
@edit[:new][:cb_provider_id] = params[:cb_provider_id].blank? ? nil : params[:cb_provider_id]
@edit[:new][:cb_entity_id] = "all"
build_edit_screen
@refresh_div = "form_div"
@refresh_partial = "form"
elsif params.key?(:cb_groupby)
@edit[:new][:cb_groupby] = params[:cb_groupby]
@refresh_div = "filter_div"
@refresh_partial = "form_filter"
elsif params.key?(:cb_groupby_tag)
@edit[:new][:cb_groupby_tag] = params[:cb_groupby_tag]
elsif params[:cb_interval]
@edit[:new][:cb_interval] = params[:cb_interval]
@edit[:new][:cb_interval_size] = 1
@edit[:new][:cb_end_interval_offset] = 1
@refresh_div = "filter_div"
@refresh_partial = "form_filter"
elsif params[:cb_interval_size]
@edit[:new][:cb_interval_size] = params[:cb_interval_size].to_i
elsif params[:cb_end_interval_offset]
@edit[:new][:cb_end_interval_offset] = params[:cb_end_interval_offset].to_i
end
end
def gfv_charts
if params[:chosen_graph] && params[:chosen_graph] != @edit[:new][:graph_type]
if params[:chosen_graph] == "<No chart>"
@edit[:new][:graph_type] = nil
# Reset other setting to initial settings if choosing <No chart>
@edit[:new][:graph_count] = @edit[:current][:graph_count]
@edit[:new][:graph_other] = @edit[:current][:graph_other]
@edit[:new][:chart_mode] = @edit[:current][:chart_mode]
@edit[:new][:chart_column] = @edit[:current][:chart_column]
else
@edit[:new][:graph_other] = true if @edit[:new][:graph_type].nil? # Reset other setting if choosing first chart
@edit[:new][:graph_type] = params[:chosen_graph] # Save graph type
@edit[:new][:graph_count] ||= GRAPH_MAX_COUNT # Reset graph count, if not set
@edit[:new][:chart_mode] ||= 'counts'
@edit[:new][:chart_column] ||= ''
end
@refresh_div = "chart_div"
@refresh_partial = "form_chart"
end
if params[:chart_mode] && params[:chart_mode] != @edit[:new][:chart_mode]
@edit[:new][:chart_mode] = params[:chart_mode]
@refresh_div = "chart_div"
@refresh_partial = "form_chart"
end
if params[:chart_column] && params[:chart_column] != @edit[:new][:chart_column]
@edit[:new][:chart_column] = params[:chart_column]
@refresh_div = "chart_sample_div"
@refresh_partial = "form_chart_sample"
end
if params[:chosen_count] && params[:chosen_count] != @edit[:new][:graph_count]
@edit[:new][:graph_count] = params[:chosen_count]
@refresh_div = "chart_sample_div"
@refresh_partial = "form_chart_sample"
end
if params[:chosen_other] # If a chart is showing, set the other setting based on check box present
chosen = (params[:chosen_other].to_s == "1")
if @edit[:new][:graph_other] != chosen
@edit[:new][:graph_other] = chosen
@refresh_div = "chart_sample_div"
@refresh_partial = "form_chart_sample"
end
end
end
def gfv_pivots
@edit[:new][:pivot] ||= ReportController::PivotOptions.new
@edit[:new][:pivot].update(params)
if params[:chosen_pivot1] || params[:chosen_pivot2] || params[:chosen_pivot3]
if @edit[:new][:pivot].by1 == NOTHING_STRING
@edit[:pivot_cols] = {} # Clear pivot_cols if no pivot grouping fields selected
else
@edit[:pivot_cols].delete(@edit[:new][:pivot].by1) # Remove any pivot grouping fields from pivot cols
@edit[:pivot_cols].delete(@edit[:new][:pivot].by2)
@edit[:pivot_cols].delete(@edit[:new][:pivot].by3)
end
build_field_order
@refresh_div = "consolidate_div"
@refresh_partial = "form_consolidate"
end
end
def gfv_sort
@edit[:new][:order] = params[:sort_order] if params[:sort_order]
if params[:sort_group] # If grouping changed,
@edit[:new][:group] = params[:sort_group]
@refresh_div = "sort_div" # Resend the sort tab
@refresh_partial = "form_sort"
if @edit[:new][:chart_mode] = 'values' && !chart_mode_values_allowed?
@edit[:new][:chart_mode] = 'counts'
end
end
@edit[:new][:hide_details] = (params[:hide_details].to_s == "1") if params[:hide_details]
if params[:chosen_sort1] && params[:chosen_sort1] != @edit[:new][:sortby1].split("__").first
# Remove any col options for any existing sort + suffix
@edit[:new][:col_options].delete(@edit[:new][:sortby1].split("-").last) if @edit[:new][:sortby1].split("__")[1]
@edit[:new][:sortby1] = params[:chosen_sort1]
@edit[:new][:sortby2] = NOTHING_STRING if params[:chosen_sort1] == NOTHING_STRING || params[:chosen_sort1] == @edit[:new][:sortby2].split("__").first
@refresh_div = "sort_div"
@refresh_partial = "form_sort"
elsif params[:chosen_sort2] && params[:chosen_sort2] != @edit[:new][:sortby2].split("__").first
@edit[:new][:sortby2] = params[:chosen_sort2]
# Look at the 1st sort suffix (ie. month, day_of_week, etc)
elsif params[:sort1_suffix] && params[:sort1_suffix].to_s != @edit[:new][:sortby1].split("__")[1].to_s
# Remove any col options for any existing sort + suffix
@edit[:new][:col_options].delete(@edit[:new][:sortby1].split("-").last) if @edit[:new][:sortby1].split("__")[1]
@edit[:new][:sortby1] = @edit[:new][:sortby1].split("__").first +
(params[:sort1_suffix].blank? ? "" : "__#{params[:sort1_suffix]}")
# Look at the 2nd sort suffix (ie. month, day_of_week, etc)
elsif params[:sort2_suffix] && params[:sort2_suffix].to_s != @edit[:new][:sortby2].split("__")[1].to_s
# Remove any col options for any existing sort + suffix
@edit[:new][:col_options].delete(@edit[:new][:sortby2].split("-").last) if @edit[:new][:sortby2].split("__")[1]
@edit[:new][:sortby2] = @edit[:new][:sortby2].split("__").first + "__" + params[:sort2_suffix]
@edit[:new][:sortby2] = @edit[:new][:sortby2].split("__").first +
(params[:sort2_suffix].blank? ? "" : "__#{params[:sort2_suffix]}")
# Look at the break format
else
co_key1 = @edit[:new][:sortby1].split("-").last
if params[:break_format] &&
params[:break_format].to_s != @edit[:new].fetch_path(:col_options, co_key1)
if params[:break_format].blank? || # Remove format and col key (if empty)
params[:break_format].to_sym == MiqReport.get_col_info(@edit[:new][:sortby1])[:default_format]
if @edit[:new][:col_options][co_key1]
@edit[:new][:col_options][co_key1].delete(:break_format)
@edit[:new][:col_options].delete(co_key1) if @edit[:new][:col_options][co_key1].empty?
end
else # Add col and format to col_options
@edit[:new][:col_options][co_key1] ||= {}
@edit[:new][:col_options][co_key1][:break_format] = params[:break_format].to_sym
end
end
end
# Clear/set up the default break label
sort1 = @edit[:new][:sortby1].split("-").last unless @edit[:new][:sortby1].blank?
if @edit[:new][:group] == "No" # Clear any existing break label
if @edit[:new].fetch_path(:col_options, sort1, :break_label)
@edit[:new][:col_options][sort1].delete(:break_label)
@edit[:new][:col_options].delete(sort1) if @edit[:new][:col_options][sort1].empty?
end
else # Create a break label, if none there already
unless @edit[:new].fetch_path(:col_options, sort1, :break_label)
@edit[:new][:col_options][sort1] ||= {}
sort, suffix = @edit[:new][:sortby1].split("__")
@edit[:new][:col_options][sort1][:break_label] =
@edit[:new][:field_order].collect { |f| f.first if f.last == sort }.compact.join.strip +
(suffix ? " (#{MiqReport.date_time_break_suffixes.collect { |s| s.first if s.last == suffix }.compact.join})" : "") +
": "
end
end
# TODO: Not allowing user to change break label until editor is changed to not use form observe
# if params[:break_label]
# @edit[:new][:col_options][@edit[:new][:sortby1].split("-").last] ||= Hash.new
# @edit[:new][:col_options][@edit[:new][:sortby1].split("-").last][:break_label] == params[:break_label]
# end
end
def gfv_timeline
if params[:chosen_tl] && params[:chosen_tl] != @edit[:new][:tl_field]
if @edit[:new][:tl_field] == NOTHING_STRING || params[:chosen_tl] == NOTHING_STRING
@refresh_div = "tl_settings_div"
@refresh_partial = "form_tl_settings"
@tl_changed = true
else
@tl_repaint = true
end
@edit[:new][:tl_field] = params[:chosen_tl]
elsif params[:chosen_position] && params[:chosen_position] != @edit[:new][:tl_position]
@tl_changed = true
@edit[:new][:tl_position] = params[:chosen_position]
end
end
def move_cols_right
if !params[:available_fields] || params[:available_fields].length == 0 || params[:available_fields][0] == ""
add_flash(_("No fields were selected to move down"), :error)
elsif params[:available_fields].length + @edit[:new][:fields].length > MAX_REPORT_COLUMNS
add_flash(_("Fields not added: Adding the selected %{count} fields will exceed the maximum of %{max} fields") % {:count => params[:available_fields].length + @edit[:new][:fields].length, :max => MAX_REPORT_COLUMNS},
:error)
else
MiqExpression.reporting_available_fields(@edit[:new][:model], @edit[:new][:perf_interval]).each do |af| # Go thru all available columns
if params[:available_fields].include?(af[1]) # See if this column was selected to move
unless @edit[:new][:fields].include?(af) # Only move if it's not there already
@edit[:new][:fields].push(af) # Add it to the new fields list
if af[0].include?(":") # Not a base column
table = af[0].split(" : ")[0].split(".")[-1] # Get the table name
table = table.singularize unless table == "OS" # Singularize, except "OS"
temp = af[0].split(" : ")[1]
temp_header = table == temp.split(" ")[0] ? af[0].split(" : ")[1] : table + " " + af[0].split(" : ")[1]
else
temp_header = af[0].strip # Base column, just use it without leading space
end
@edit[:new][:headers][af[1]] = temp_header # Add the column title to the headers hash
end
end
end
@refresh_div = "column_lists"
@refresh_partial = "column_lists"
end
end
def move_cols_left
if !params[:selected_fields] || params[:selected_fields].length == 0 || params[:selected_fields][0] == ""
add_flash(_("No fields were selected to move up"), :error)
elsif display_filter_contains?(params[:selected_fields])
add_flash(_("No fields were moved up"), :error)
else
@edit[:new][:fields].each do |nf| # Go thru all new fields
if params[:selected_fields].include?(nf.last) # See if this col was selected to move
# Clear out headers and formatting
@edit[:new][:headers].delete(nf.last) # Delete the column name from the headers hash
@edit[:new][:headers].delete_if { |k, _v| k.starts_with?("#{nf.last}__") } # Delete pivot calc keys
@edit[:new][:col_formats].delete(nf.last) # Delete the column name from the col_formats hash
@edit[:new][:col_formats].delete_if { |k, _v| k.starts_with?("#{nf.last}__") } # Delete pivot calc keys
# Clear out pivot field options
@edit[:new][:pivot].drop_from_selection(nf.last)
@edit[:pivot_cols].delete(nf.last) # Delete the column name from the pivot_cols hash
# Clear out sort options
if @edit[:new][:sortby1] && nf.last == @edit[:new][:sortby1].split("__").first # If deleting the first sort field
if MiqReport.is_break_suffix?(@edit[:new][:sortby1].split("__")[1]) # If sort has a break suffix
@edit[:new][:col_options].delete(field_to_col(@edit[:new][:sortby1])) # Remove the <col>__<suffix> from col_options
end
unless @edit[:new][:group] == "No" # If we were grouping, remove all col_options :group keys
@edit[:new][:col_options].each do |co_key, co_val|
co_val.delete(:grouping) # Remove :group key
@edit[:new][:col_options].delete(co_key) if co_val.empty? # Remove the col, if empty
end
end
@edit[:new][:sortby1] = NOTHING_STRING
@edit[:new][:sortby2] = NOTHING_STRING
end
if @edit[:new][:sortby1] && nf.last == @edit[:new][:sortby2].split("__").first # If deleting the second sort field
@edit[:new][:sortby2] = NOTHING_STRING
end
@edit[:new][:col_options].delete(field_to_col(nf.last)) # Remove this column from the col_options hash
end
end
@edit[:new][:fields].delete_if { |nf| params[:selected_fields].include?(nf.last) } # Remove selected fields
@refresh_div = "column_lists"
@refresh_partial = "column_lists"
end
end
# See if any of the fields passed in are present in the display filter expression
def display_filter_contains?(fields)
return false if @edit[:new][:display_filter].nil? # No display filter defined
exp = @edit[:new][:display_filter].inspect
@edit[:new][:fields].each do |f| # Go thru all of the selected fields
if fields.include?(f.last) # Is this field being removed?
add_flash(_("%{name} is currently being used in the Display Filter") %
{:name => f.first}, :error) if exp.include?(f.last)
end
end
!@flash_array.nil?
end
def selected_consecutive?
first_idx = last_idx = 0
@edit[:new][:fields].each_with_index do |nf, idx|
first_idx = idx if nf[1] == params[:selected_fields].first
if nf[1] == params[:selected_fields].last
last_idx = idx
break
end
end
if last_idx - first_idx + 1 > params[:selected_fields].length
return [false, first_idx, last_idx]
else
return [true, first_idx, last_idx]
end
end
# Set record variables to new values
def set_record_vars(rpt)
# Set the simple string/number fields
rpt.template_type = "report"
rpt.name = @edit[:new][:name].to_s.strip
rpt.title = @edit[:new][:title].to_s.strip
rpt.db = @edit[:new][:model]
rpt.rpt_group = @edit[:new][:rpt_group]
rpt.rpt_type = @edit[:new][:rpt_type]
rpt.priority = @edit[:new][:priority]
rpt.categories = @edit[:new][:categories]
rpt.col_options = @edit[:new][:col_options]
rpt.order = @edit[:new][:sortby1].nil? ? nil : @edit[:new][:order]
# Set the graph fields
if @edit[:new][:sortby1] == NOTHING_STRING || @edit[:new][:graph_type].nil?
rpt.dims = nil
rpt.graph = nil
else
if @edit[:new][:graph_type] =~ /^(Pie|Donut)/ # Pie and Donut charts must be set to 1 dimension
rpt.dims = 1
else
rpt.dims = @edit[:new][:sortby2] == NOTHING_STRING ? 1 : 2 # Set dims to 1 or 2 based on presence of sortby2
end
if @edit[:new][:chart_mode] == 'values' && @edit[:new][:chart_column].blank?
options = chart_fields_options
@edit[:new][:chart_column] = options[0][1] unless options.empty?
end
rpt.graph = {
:type => @edit[:new][:graph_type],
:mode => @edit[:new][:chart_mode],
:column => @edit[:new][:chart_column],
:count => @edit[:new][:graph_count],
:other => @edit[:new][:graph_other],
}
end
# Set the conditions field (expression)
if !@edit[:new][:record_filter].nil? && @edit[:new][:record_filter]["???"].nil?
rpt.conditions = MiqExpression.new(@edit[:new][:record_filter])
else
rpt.conditions = nil
end
# Set the display_filter field (expression)
if !@edit[:new][:display_filter].nil? && @edit[:new][:display_filter]["???"].nil?
rpt.display_filter = MiqExpression.new(@edit[:new][:display_filter])
else
rpt.display_filter = nil
end
# Set the performance options
rpt.db_options = Hash.new
if model_report_type(rpt.db) == :performance
rpt.db_options[:interval] = @edit[:new][:perf_interval]
rpt.db_options[:calc_avgs_by] = @edit[:new][:perf_avgs]
rpt.db_options[:end_offset] = @edit[:new][:perf_end].to_i
rpt.db_options[:start_offset] = @edit[:new][:perf_end].to_i + @edit[:new][:perf_start].to_i
elsif model_report_type(rpt.db) == :trend
rpt.db_options[:rpt_type] = "trend"
rpt.db_options[:interval] = @edit[:new][:perf_interval]
rpt.db_options[:end_offset] = @edit[:new][:perf_end].to_i
rpt.db_options[:start_offset] = @edit[:new][:perf_end].to_i + @edit[:new][:perf_start].to_i
rpt.db_options[:trend_db] = @edit[:new][:perf_trend_db]
rpt.db_options[:trend_col] = @edit[:new][:perf_trend_col]
rpt.db_options[:limit_col] = @edit[:new][:perf_limit_col] if @edit[:new][:perf_limit_col]
rpt.db_options[:limit_val] = @edit[:new][:perf_limit_val] if @edit[:new][:perf_limit_val]
rpt.db_options[:target_pcts] = []
rpt.db_options[:target_pcts].push(@edit[:new][:perf_target_pct1])
rpt.db_options[:target_pcts].push(@edit[:new][:perf_target_pct2]) if @edit[:new][:perf_target_pct2]
rpt.db_options[:target_pcts].push(@edit[:new][:perf_target_pct3]) if @edit[:new][:perf_target_pct3]
elsif Chargeback.db_is_chargeback?(rpt.db)
rpt.db_options[:rpt_type] = @edit[:new][:model]
options = {} # CB options go in db_options[:options] key
options[:interval] = @edit[:new][:cb_interval]
options[:interval_size] = @edit[:new][:cb_interval_size]
options[:end_interval_offset] = @edit[:new][:cb_end_interval_offset]
if @edit[:new][:cb_show_typ] == "owner"
options[:owner] = @edit[:new][:cb_owner_id]
elsif @edit[:new][:cb_show_typ] == "tenant"
options[:tenant_id] = @edit[:new][:cb_tenant_id]
elsif @edit[:new][:cb_show_typ] == "tag"
if @edit[:new][:cb_tag_cat] && @edit[:new][:cb_tag_value]
options[:tag] = "/managed/#{@edit[:new][:cb_tag_cat]}/#{@edit[:new][:cb_tag_value]}"
end
elsif @edit[:new][:cb_show_typ] == "entity"
options[:provider_id] = @edit[:new][:cb_provider_id]
options[:entity_id] = @edit[:new][:cb_entity_id]
end
options[:groupby] = @edit[:new][:cb_groupby]
options[:groupby_tag] = @edit[:new][:cb_groupby] == 'tag' ? @edit[:new][:cb_groupby_tag] : nil
rpt.db_options[:options] = options
end
rpt.time_profile_id = @edit[:new][:time_profile]
if @edit[:new][:time_profile]
time_profile = TimeProfile.find_by_id(@edit[:new][:time_profile])
rpt.tz = time_profile.tz
end
# Set the timeline field
if @edit[:new][:tl_field] == NOTHING_STRING
rpt.timeline = nil
else
rpt.timeline = Hash.new
rpt.timeline[:field] = @edit[:new][:tl_field]
rpt.timeline[:position] = @edit[:new][:tl_position]
end
# Set the line break group field
if @edit[:new][:sortby1] == NOTHING_STRING # If no sort fields
rpt.group = nil # Clear line break group
else # Otherwise, check the setting
case @edit[:new][:group]
when "Yes"
rpt.group = "y"
when "Counts"
rpt.group = "c"
else
rpt.group = nil
end
end
# Set defaults, if not present
rpt.rpt_group ||= "Custom"
rpt.rpt_type ||= "Custom"
rpt.cols = []
rpt.col_order = []
rpt.col_formats = []
rpt.headers = []
rpt.include = Hash.new
rpt.sortby = @edit[:new][:sortby1] == NOTHING_STRING ? nil : [] # Clear sortby if sortby1 not present, else set up array
# Add in the chargeback static fields
if Chargeback.db_is_chargeback?(rpt.db) # For chargeback, add in specific chargeback report options
rpt = @edit[:new][:model].constantize.set_chargeback_report_options(rpt, @edit)
end
# Remove when we support user sorting of trend reports
if rpt.db == TREND_MODEL
rpt.sortby = ["resource_name"]
rpt.order = "Ascending"
end
# Build column related report fields
@pg1 = @pg2 = @pg3 = nil # Init the pivot group cols
@edit[:new][:fields].each do |field_entry| # Go thru all of the fields
field = field_entry[1] # Get the encoded fully qualified field name
if @edit[:new][:pivot].by1 != NOTHING_STRING && # If we are doing pivoting and
@edit[:pivot_cols].key?(field) # this is a pivot calc column
@edit[:pivot_cols][field].each do |calc_typ| # Add header/format/col_order for each calc type
rpt.headers.push(@edit[:new][:headers][field + "__#{calc_typ}"])
rpt.col_formats.push(@edit[:new][:col_formats][field + "__#{calc_typ}"])
add_field_to_col_order(rpt, field + "__#{calc_typ}")
end
else # Normal field, set header/format/col_order
rpt.headers.push(@edit[:new][:headers][field])
rpt.col_formats.push(@edit[:new][:col_formats][field])
add_field_to_col_order(rpt, field)
end
end
rpt.rpt_options ||= {}
rpt.rpt_options.delete(:pivot)
unless @pg1.nil? # Build the pivot group_cols array
rpt.rpt_options[:pivot] = {}
rpt.rpt_options[:pivot][:group_cols] = []
rpt.rpt_options[:pivot][:group_cols].push(@pg1)
rpt.rpt_options[:pivot][:group_cols].push(@pg2) unless @pg2.nil?
rpt.rpt_options[:pivot][:group_cols].push(@pg3) unless @pg3.nil?
end
if @edit[:new][:group] != "No" || @edit[:new][:row_limit].blank?
rpt.rpt_options.delete(:row_limit)
else
rpt.rpt_options[:row_limit] = @edit[:new][:row_limit].to_i
end
# Add pdf page size to rpt_options
rpt.rpt_options ||= {}
rpt.rpt_options[:pdf] ||= {}
rpt.rpt_options[:pdf][:page_size] = @edit[:new][:pdf_page_size] || DEFAULT_PDF_PAGE_SIZE
rpt.rpt_options[:queue_timeout] = @edit[:new][:queue_timeout]
# Add hide detail rows option, if grouping
if rpt.group.nil?
rpt.rpt_options.delete(:summary)
else
rpt.rpt_options[:summary] ||= {}
rpt.rpt_options[:summary][:hide_detail_rows] = @edit[:new][:hide_details]
end
user = current_user
rpt.user = user
rpt.miq_group = user.current_group
rpt.add_includes_for_virtual_custom_attributes
end
def add_field_to_col_order(rpt, field)
# Get the sort columns, removing the suffix if it exists
sortby1 = MiqReport.is_break_suffix?(@edit[:new][:sortby1].split("__")[1]) ?
@edit[:new][:sortby1].split("__").first :
@edit[:new][:sortby1]
sortby2 = MiqReport.is_break_suffix?(@edit[:new][:sortby2].split("__")[1]) ?
@edit[:new][:sortby2].split("__").first :
@edit[:new][:sortby2]
# Has a period, so it's an include
if field.include?(".") && !field.include?(CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX)
tables = field.split("-")[0].split(".")[1..-1] # Get the list of tables from before the hyphen
inc_hash = rpt.include # Start at the main hash
tables.each_with_index do |table, idx|
inc_hash[table] ||= {} # Create hash for the table, if it's not there already
if idx == tables.length - 1 # We're at the end of the field name, so add the column
inc_hash[table]["columns"] ||= [] # Create the columns array for this table
f = field.split("-")[1].split("__").first # Grab the field name after the hyphen, before the "__"
inc_hash[table]["columns"].push(f) unless inc_hash[table]["columns"].include?(f) # Add the field to the columns, if not there
table_field = tables.join('.') + "." + field.split("-")[1]
rpt.col_order.push(table_field) # Add the table.field to the col_order array
if field == sortby1 # Is this the first sort field?
rpt.sortby = [table_field] + rpt.sortby # Put the field first in the sortby array
elsif field == @edit[:new][:sortby2] # Is this the second sort field?
rpt.sortby.push(table_field) # Add the field to the sortby array
end
if field == @edit[:new][:pivot].by1 # Save the group fields
@pg1 = table_field
elsif field == @edit[:new][:pivot].by2
@pg2 = table_field
elsif field == @edit[:new][:pivot].by3
@pg3 = table_field
end
else # Set up for the next embedded include hash
inc_hash[table]["include"] ||= {} # Create include hash for next level
inc_hash = inc_hash[table]["include"] # Point to the new hash
end
end
else # No period, this is a main table column
if field.include?("__") # Check for pivot calculated field
f = field.split("-")[1].split("__").first # Grab the field name after the hyphen, before the "__"
rpt.cols.push(f) unless rpt.cols.include?(f) # Add the original field, if not already there
else
rpt.cols.push(field.split("-")[1]) # Grab the field name after the hyphen
end
rpt.col_order.push(field.split("-")[1]) # Add the field to the col_order array
if field == sortby1 # Is this the first sort field?
rpt.sortby = [@edit[:new][:sortby1].split("-")[1]] + rpt.sortby # Put the field first in the sortby array
elsif field == sortby2 # Is this the second sort field?
rpt.sortby.push(@edit[:new][:sortby2].split("-")[1]) # Add the field to the sortby array
end
if field == @edit[:new][:pivot].by1 # Save the group fields
@pg1 = field.split("-")[1]
elsif field == @edit[:new][:pivot].by2
@pg2 = field.split("-")[1]
elsif field == @edit[:new][:pivot].by3
@pg3 = field.split("-")[1]
end
end
end
# Set form variables for edit
def set_form_vars
@edit = {}
@edit[:rpt_id] = @rpt.id # Save a record id to use it later to look a record
@edit[:rpt_title] = @rpt.title
@edit[:rpt_name] = @rpt.name
@edit[:new] = {}
@edit[:key] = "report_edit__#{@rpt.id || "new"}"
if params[:pressed] == "miq_report_copy"
@edit[:new][:rpt_group] = "Custom"
@edit[:new][:rpt_type] = "Custom"
else
@edit[:new][:rpt_group] = @rpt.rpt_group
@edit[:new][:rpt_type] = @rpt.rpt_type
end
# Get the simple string/number fields
@edit[:new][:name] = @rpt.name
@edit[:new][:title] = @rpt.title
@edit[:new][:model] = @rpt.db
@edit[:new][:priority] = @rpt.priority
@edit[:new][:order] = @rpt.order.blank? ? "Ascending" : @rpt.order
# @edit[:new][:graph] = @rpt.graph
# Replaced above line to handle new graph settings Hash
if @rpt.graph.kind_of?(Hash)
@edit[:new][:graph_type] = @rpt.graph[:type]
@edit[:new][:graph_count] = @rpt.graph[:count]
@edit[:new][:chart_mode] = @rpt.graph[:mode]
@edit[:new][:chart_column] = @rpt.graph[:column]
@edit[:new][:graph_other] = @rpt.graph[:other] ? @rpt.graph[:other] : false
else
@edit[:new][:graph_type] = @rpt.graph
@edit[:new][:graph_count] = GRAPH_MAX_COUNT
@edit[:new][:chart_mode] = 'counts'
@edit[:new][:chart_column] = ''
@edit[:new][:graph_other] = true
end
@edit[:new][:dims] = @rpt.dims
@edit[:new][:categories] = @rpt.categories
@edit[:new][:categories] ||= []
@edit[:new][:col_options] = @rpt.col_options.blank? ? {} : @rpt.col_options
# Initialize options
@edit[:new][:perf_interval] = nil
@edit[:new][:perf_start] = nil
@edit[:new][:perf_end] = nil
@edit[:new][:tz] = nil
@edit[:new][:perf_trend_db] = nil
@edit[:new][:perf_trend_col] = nil
@edit[:new][:perf_limit_col] = nil
@edit[:new][:perf_limit_val] = nil
@edit[:new][:perf_target_pct1] = nil
@edit[:new][:perf_target_pct2] = nil
@edit[:new][:perf_target_pct3] = nil
@edit[:new][:cb_interval] = nil
@edit[:new][:cb_interval_size] = nil
@edit[:new][:cb_end_interval_offset] = nil
if [:performance, :trend].include?(model_report_type(@rpt.db))
@edit[:new][:perf_interval] = @rpt.db_options[:interval]
@edit[:new][:perf_avgs] = @rpt.db_options[:calc_avgs_by]
@edit[:new][:perf_end] = @rpt.db_options[:end_offset].to_s
@edit[:new][:perf_start] = (@rpt.db_options[:start_offset] - @rpt.db_options[:end_offset]).to_s
@edit[:new][:tz] = @rpt.tz ? @rpt.tz : session[:user_tz] # Set the timezone, default to user's
if @rpt.time_profile
@edit[:new][:time_profile] = @rpt.time_profile_id
@edit[:new][:time_profile_tz] = @rpt.time_profile.tz
else
set_time_profile_vars(selected_time_profile_for_pull_down, @edit[:new])
end
@edit[:new][:perf_trend_db] = @rpt.db_options[:trend_db]
@edit[:new][:perf_trend_col] = @rpt.db_options[:trend_col]
@edit[:new][:perf_limit_col] = @rpt.db_options[:limit_col]
@edit[:new][:perf_limit_val] = @rpt.db_options[:limit_val]
@edit[:new][:perf_target_pct1], @edit[:new][:perf_target_pct2], @edit[:new][:perf_target_pct3] = @rpt.db_options[:target_pcts]
elsif Chargeback.db_is_chargeback?(@rpt.db)
@edit[:new][:tz] = @rpt.tz ? @rpt.tz : session[:user_tz] # Set the timezone, default to user's
options = @rpt.db_options[:options]
if options.key?(:owner) # Get the owner options
@edit[:new][:cb_show_typ] = "owner"
@edit[:new][:cb_owner_id] = options[:owner]
elsif options.key?(:tenant_id) # Get the tenant options
@edit[:new][:cb_show_typ] = "tenant"
@edit[:new][:cb_tenant_id] = options[:tenant_id]
elsif options.key?(:tag) # Get the tag options
@edit[:new][:cb_show_typ] = "tag"
@edit[:new][:cb_tag_cat] = options[:tag].split("/")[-2]
@edit[:new][:cb_tag_value] = options[:tag].split("/")[-1]
@edit[:cb_tags] = {}
cat = Classification.find_by_name(@edit[:new][:cb_tag_cat])
cat.entries.each { |e| @edit[:cb_tags][e.name] = e.description } if cat # Collect the tags, if category is valid
elsif options.key?(:entity_id)
@edit[:new][:cb_show_typ] = "entity"
@edit[:new][:cb_entity_id] = options[:entity_id]
@edit[:new][:cb_provider_id] = options[:provider_id]
end
@edit[:new][:cb_groupby_tag] = options[:groupby_tag] if options.key?(:groupby_tag)
@edit[:new][:cb_model] = Chargeback.report_cb_model(@rpt.db)
@edit[:new][:cb_interval] = options[:interval]
@edit[:new][:cb_interval_size] = options[:interval_size]
@edit[:new][:cb_end_interval_offset] = options[:end_interval_offset]
@edit[:new][:cb_groupby] = options[:groupby]
end
# Only show chargeback users choice if an admin
if admin_user?
@edit[:cb_users] = User.all.each_with_object({}) { |u, h| h[u.userid] = u.name }
@edit[:cb_tenant] = Tenant.all.each_with_object({}) { |t, h| h[t.id] = t.name }
else
@edit[:new][:cb_show_typ] = "owner"
@edit[:new][:cb_owner_id] = session[:userid]
@edit[:cb_owner_name] = current_user.name
end
# Get chargeback tags
cats = Classification.categories.collect { |c| c if c.show }.compact # Get categories, sort by name, remove nils
cats.delete_if { |c| c.read_only? || c.entries.length == 0 } # Remove categories that are read only or have no entries
@edit[:cb_cats] = cats.each_with_object({}) { |c, h| h[c.name] = c.description }
@edit[:cb_providers] = {}
@edit[:cb_providers][:container_project] = {}
@edit[:cb_providers][:vm] = {} # Fill this in if entity show type it ever becomes relevent for VMs
@edit[:cb_entities_by_provider_id] = {}
ManageIQ::Providers::ContainerManager.all.each do |provider|
@edit[:cb_providers][:container_project][provider.name] = provider.id
@edit[:cb_entities_by_provider_id][provider.id] = {}
provider.container_projects.all.each do |project|
@edit[:cb_entities_by_provider_id][provider.id][project.id] = project.name
end
end
# Build trend limit cols array
if model_report_type(@rpt.db) == :trend
@edit[:limit_cols] = VimPerformanceTrend.trend_limit_cols(@edit[:new][:perf_trend_db], @edit[:new][:perf_trend_col], @edit[:new][:perf_interval])
end
if [:performance, :trend].include?(model_report_type(@rpt.db))
ensure_perf_interval_defaults
end
expkey = :record_filter
@edit[expkey] ||= {} # Create hash for this expression, if needed
@edit[expkey][:record_filter] = [] # Store exps in an array
@edit[expkey][:exp_idx] ||= 0
@edit[expkey][:expression] = {"???" => "???"} # Set as new exp element
# Get the conditions MiqExpression
if @rpt.conditions.kind_of?(MiqExpression)
@edit[:new][:record_filter] = @rpt.conditions.exp
@edit[:miq_exp] = true
elsif @rpt.conditions.nil?
@edit[:new][:record_filter] = nil
@edit[:new][:record_filter] = @edit[expkey][:expression] # Copy to new exp
@edit[:miq_exp] = true
end
# Get the display_filter MiqExpression
@edit[:new][:display_filter] = @rpt.display_filter.nil? ? nil : @rpt.display_filter.exp
expkey = :display_filter
@edit[expkey] ||= {} # Create hash for this expression, if needed
@edit[expkey][:expression] = [] # Store exps in an array
@edit[expkey][:exp_idx] ||= 0 # Start at first exp
@edit[expkey][:expression] = {"???" => "???"} # Set as new exp element
# Build display filter expression
@edit[:new][:display_filter] = @edit[expkey][:expression] if @edit[:new][:display_filter].nil? # Copy to new exp
# Get timeline fields
@edit[:new][:tl_field] = NOTHING_STRING
@edit[:new][:tl_position] = "Last"
if @rpt.timeline.kind_of?(Hash) # Timeline has any data
@edit[:new][:tl_field] = @rpt.timeline[:field] unless @rpt.timeline[:field].blank?
@edit[:new][:tl_position] = @rpt.timeline[:position] unless @rpt.timeline[:position].blank?
end
# Get the pdf page size, if present
if @rpt.rpt_options.kind_of?(Hash) && @rpt.rpt_options[:pdf]
@edit[:new][:pdf_page_size] = @rpt.rpt_options[:pdf][:page_size] || DEFAULT_PDF_PAGE_SIZE
else
@edit[:new][:pdf_page_size] = DEFAULT_PDF_PAGE_SIZE
end
# Get the hide details setting, if present
if @rpt.rpt_options.kind_of?(Hash) && @rpt.rpt_options[:summary]
@edit[:new][:hide_details] = @rpt.rpt_options[:summary][:hide_detail_rows]
else
@edit[:new][:hide_details] = false
end
# Get the timeout if present
if @rpt.rpt_options.kind_of?(Hash) && @rpt.rpt_options[:queue_timeout]
@edit[:new][:queue_timeout] = @rpt.rpt_options[:queue_timeout]
else
@edit[:new][:queue_timeout] = nil
end
case @rpt.group
when "y"
@edit[:new][:group] = "Yes"
when "c"
@edit[:new][:group] = "Counts"
else
@edit[:new][:group] = "No"
@edit[:new][:row_limit] = @rpt.rpt_options[:row_limit].to_s if @rpt.rpt_options
end
# build selected fields array from the report record
@edit[:new][:sortby1] = NOTHING_STRING # Initialize sortby fields to nothing
@edit[:new][:sortby2] = NOTHING_STRING
@edit[:new][:pivot] = ReportController::PivotOptions.new
if params[:pressed] == "miq_report_new"
@edit[:new][:fields] = []
@edit[:new][:categories] = []
@edit[:new][:headers] = {}
@edit[:new][:col_formats] = {}
@edit[:pivot_cols] = {}
else
build_selected_fields(@rpt) # Create the field related @edit arrays and hashes
end
# Rebuild the tag descriptions in the new fields array to match the ones in available fields
@edit[:new][:fields].each do |nf|
tag = nf.first.split(':')
if nf.first.include?("Managed :")
entry = MiqExpression.reporting_available_fields(@edit[:new][:model], @edit[:new][:perf_interval]).find { |a| a.last == nf.last }
nf[0] = entry ? entry.first : "#{tag} (Category not found)"
end
end
@edit[:current] = ["copy", "new"].include?(params[:action]) ? {} : copy_hash(@edit[:new])
# For trend reports, check for percent field chosen
if @rpt.db && @rpt.db == TREND_MODEL &&
MiqExpression.reporting_available_fields(@edit[:new][:model], @edit[:new][:perf_interval]).find do|af|
af.last ==
@edit[:new][:perf_trend_db] + "-" + @edit[:new][:perf_trend_col]
end.first.include?("(%)")
@edit[:percent_col] = true
end
end
# Build the :fields array and :headers hash from the rpt record cols and includes hashes
def build_selected_fields(rpt)
fields = []
headers = {}
col_formats = {}
pivot_cols = {}
rpt.col_formats ||= Array.new(rpt.col_order.length) # Create array of nils if col_formats not present (backward compat)
rpt.col_order.each_with_index do |col, idx|
if col.starts_with?(CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX)
field_key = rpt.db + "-" + col
field_value = col.gsub(CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX, "")
elsif !col.include?(".") # Main table field
field_key = rpt.db + "-" + col
field_value = friendly_model_name(rpt.db) +
Dictionary.gettext(rpt.db + "." + col.split("__").first, :type => :column, :notfound => :titleize)
else # Included table field
inc_string = find_includes(col.split("__").first, rpt.include) # Get the full include string
field_key = rpt.db + "." + inc_string.to_s + "-" + col.split(".").last
if inc_string.to_s.ends_with?(".managed") || inc_string.to_s == "managed"
# don't titleize tag name, need it to lookup later to get description by tag name
field_value = friendly_model_name(rpt.db + "." + inc_string.to_s) + col.split(".").last
else
field_value = friendly_model_name(rpt.db + "." + inc_string.to_s) +
Dictionary.gettext(col.split(".").last.split("__").first, :type => :column, :notfound => :titleize)
end
end
if field_key.include?("__") # Check for calculated pivot column
field_key1, calc_typ = field_key.split("__")
pivot_cols[field_key1] ||= []
pivot_cols[field_key1] << calc_typ.to_sym
pivot_cols[field_key1].sort! # Sort the array
fields.push([field_value, field_key1]) unless fields.include?([field_value, field_key1]) # Add original col to fields array
else
fields.push([field_value, field_key]) # Add to fields array
end
# Create the groupby keys if groupby array is present
if rpt.rpt_options &&
rpt.rpt_options[:pivot] &&
rpt.rpt_options[:pivot][:group_cols] &&
rpt.rpt_options[:pivot][:group_cols].kind_of?(Array)
if rpt.rpt_options[:pivot][:group_cols].length > 0
@edit[:new][:pivot].by1 = field_key if col == rpt.rpt_options[:pivot][:group_cols][0]
end
if rpt.rpt_options[:pivot][:group_cols].length > 1
@edit[:new][:pivot].by2 = field_key if col == rpt.rpt_options[:pivot][:group_cols][1]
end
if rpt.rpt_options[:pivot][:group_cols].length > 2
@edit[:new][:pivot].by3 = field_key if col == rpt.rpt_options[:pivot][:group_cols][2]
end
end
# Create the sortby keys if sortby array is present
if rpt.sortby.kind_of?(Array)
if rpt.sortby.length > 0
# If first sortby field as a break suffix, set up sortby1 with a suffix
if MiqReport.is_break_suffix?(rpt.sortby[0].split("__")[1])
sort1, suffix1 = rpt.sortby[0].split("__") # Get sort field and suffix, if present
@edit[:new][:sortby1] = field_key + (suffix1 ? "__#{suffix1}" : "") if col == sort1
else # Not a break suffix sort field, just copy the field name to sortby1
@edit[:new][:sortby1] = field_key if col == rpt.sortby[0]
end
end
if rpt.sortby.length > 1
if MiqReport.is_break_suffix?(rpt.sortby[1].split("__")[1])
sort2, suffix2 = rpt.sortby[1].split("__") # Get sort field and suffix, if present
@edit[:new][:sortby2] = field_key + (suffix2 ? "__#{suffix2}" : "") if col == sort2
else # Not a break suffix sort field, just copy the field name to sortby1
@edit[:new][:sortby2] = field_key if col == rpt.sortby[1]
end
end
end
headers[field_key] = rpt.headers[idx] # Add col to the headers hash
if field_key.include?("__") # if this a pivot calc field?
headers[field_key.split("__").first] = field_value # Save the original field key as well
end
col_formats[field_key] = rpt.col_formats[idx] # Add col to the headers hash
end
# Remove the non-cost and owner columns from the arrays for Chargeback
if Chargeback.db_is_chargeback?(rpt.db)
f_len = fields.length
for f_idx in 1..f_len # Go thru fields in reverse
f_key = fields[f_len - f_idx].last
next if f_key.ends_with?(*CHARGEBACK_ALLOWED_FIELD_SUFFIXES) || f_key.include?('managed') || f_key.include?(CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX)
headers.delete(f_key)
col_formats.delete(f_key)
fields.delete_at(f_len - f_idx)
end
end
@edit[:new][:fields] = fields
@edit[:new][:headers] = headers
@edit[:new][:col_formats] = col_formats
@edit[:pivot_cols] = pivot_cols
build_field_order
end
# Create the field_order hash from the fields and pivot_cols structures
def build_field_order
@edit[:new][:field_order] = []
@edit[:new][:fields].each do |f|
if @edit[:new][:pivot] && @edit[:new][:pivot].by1 != NOTHING_STRING && # If we are doing pivoting and
@edit[:pivot_cols].key?(f.last) # this is a pivot calc column
MiqReport::PIVOTS.each do |c|
calc_typ = c.first
@edit[:new][:field_order].push([f.first + " (#{calc_typ.to_s.titleize})", f.last + "__" + calc_typ.to_s]) if @edit[:pivot_cols][f.last].include?(calc_typ)
end
else
@edit[:new][:field_order].push(f)
end
end
end
# Build the full includes string by finding the column in the includes hash
def find_includes(col, includes)
tables = col.split(".")[0..-2]
field = col.split(".").last
table = tables.first
# Does this level include have the table name and does columns have the field name?
if includes[table] && includes[table]["columns"] && includes[table]["columns"].include?(field)
return table # Yes, return the table name
end
if includes[table] && includes[table]["include"]
new_col = [tables[1..-1], field].flatten.join('.')
# recursively search it for the table.col
inc_table = find_includes(new_col, includes[table]["include"])
return table + '.' + inc_table if inc_table
end
# Need to go to the next level
includes.each_pair do |key, inc| # Check each included table
next unless inc["include"] # Does the included table have an include?
inc_table = find_includes(col, inc["include"]) # Yes, recursively search it for the table.col
return nil if inc_table.nil? # If it comes back nil, we never found it
# Otherwise, return the table name + the included string
return key + "." + inc_table
end
nil
end
def setnode_for_customreport
@sb[:rpt_menu].each_with_index do |level1_nodes, i|
if level1_nodes[0] == @sb[:grp_title]
level1_nodes[1].each_with_index do |level2_nodes, k|
# Check for the existence of the Custom folder in the Reports tree and
# check if at least one report exists underneath it
if level2_nodes[0].downcase == "custom" && level2_nodes[1].count > 1
level2_nodes[1].each_with_index do |report|
self.x_node = "xx-#{i}_xx-#{i}-#{k}_rep-#{to_cid(@rpt.id)}" if report == @rpt.name
end
end
end
end
end
end
def valid_report?(rpt)
active_tab = 'edit_1'
if @edit[:new][:model] == TREND_MODEL
unless @edit[:new][:perf_trend_col]
add_flash(_('Trending for is required'), :error)
end
unless @edit[:new][:perf_limit_col] || @edit[:new][:perf_limit_val]
add_flash(_('Trend Target Limit must be configured'), :error)
end
if @edit[:new][:perf_limit_val] && !is_numeric?(@edit[:new][:perf_limit_val])
add_flash(_('Trend Target Limit must be numeric'), :error)
end
elsif @edit[:new][:fields].empty?
add_flash(_('At least one Field must be selected'), :error)
end
if Chargeback.db_is_chargeback?(@edit[:new][:model])
msg = case @edit[:new][:cb_show_typ]
when nil
_('Show Costs by must be selected')
when 'owner'
_('An Owner must be selected') unless @edit[:new][:cb_owner_id]
when 'tenant'
_('A Tenant Category must be selected') unless @edit[:new][:cb_tenant_id]
when 'tag'
if !@edit[:new][:cb_tag_cat]
_('A Tag Category must be selected')
elsif !@edit[:new][:cb_tag_value]
_('A Tag must be selected')
end
when 'entity'
unless @edit[:new][:cb_entity_id]
_("A specific #{ui_lookup(:model => @edit[:new][:cb_model])} or all must be selected")
end
end
if @edit[:new][:cb_groupby] == "tag" && !@edit[:new][:cb_groupby_tag].present?
msg = _('A Group by Tag must be selected')
end
if msg
add_flash(msg, :error)
active_tab = 'edit_3'
end
end
# Validate column styles
unless rpt.col_options.blank? || @edit[:new][:field_order].nil?
@edit[:new][:field_order].each do |f| # Go thru all of the cols in order
col = f.last.split('.').last.split('-').last
if val = rpt.col_options[col] # Skip if no options for this col
next unless val.key?(:style) # Skip if no style options
val[:style].each_with_index do |s, s_idx| # Go through all of the configured ifs
if s[:value]
if e = MiqExpression.atom_error(rpt.col_to_expression_col(col.split('__').first), # See if the value is in error
s[:operator],
s[:value])
msg = case s_idx + 1
when 1
add_flash(_("Styling for '%{item}', first value is in error: %{message}") %
{:item => f.first, :message => e.message}, :error)
when 2
add_flash(_("Styling for '%{item}', second value is in error: %{message}") %
{:item => f.first, :message => e.message}, :error)
when 3
add_flash(_("Styling for '%{item}', third value is in error: %{message}") %
{:item => f.first, :message => e.message}, :error)
end
active_tab = 'edit_9'
end
end
end
end
end
end
unless rpt.valid? # Check the model for errors
rpt.errors.each do |field, msg|
add_flash("#{field.to_s.capitalize} #{msg}", :error)
end
end
@sb[:miq_tab] = active_tab if flash_errors?
@flash_array.nil?
end
# Check for valid report configuration in @edit[:new]
# Check if chargeback field is valid
def valid_chargeback_fields
is_valid = false
# There are valid show typ fields
if %w(owner tenant tag entity).include?(@edit[:new][:cb_show_typ])
is_valid = case @edit[:new][:cb_show_typ]
when 'owner' then @edit[:new][:cb_owner_id]
when 'tenant' then @edit[:new][:cb_tenant_id]
when 'tag' then @edit[:new][:cb_tag_cat] && @edit[:new][:cb_tag_value]
when 'entity' then @edit[:new][:cb_entity_id] && @edit[:new][:cb_provider_id]
end
end
is_valid
end
# Check for tab switch error conditions
def check_tabs
@sb[:miq_tab] = params[:tab]
active_tab = 'edit_1'
case @sb[:miq_tab].split('_')[1]
when '8'
if @edit[:new][:fields].empty?
add_flash(_('Consolidation tab is not available until at least 1 field has been selected'), :error)
end
when '2'
if @edit[:new][:fields].empty?
add_flash(_('Formatting tab is not available until at least 1 field has been selected'), :error)
end
when '3'
if @edit[:new][:model] == TREND_MODEL
unless @edit[:new][:perf_trend_col]
add_flash(_('Filter tab is not available until Trending for field has been selected'), :error)
end
unless @edit[:new][:perf_limit_col] || @edit[:new][:perf_limit_val]
add_flash(_('Filter tab is not available until Trending Target Limit has been configured'), :error)
end
if @edit[:new][:perf_limit_val] && !is_numeric?(@edit[:new][:perf_limit_val])
add_flash(_('Trend Target Limit must be numeric'), :error)
end
elsif @edit[:new][:fields].empty?
add_flash(_('Filter tab is not available until at least 1 field has been selected'), :error)
end
when '4'
if @edit[:new][:fields].empty?
add_flash(_('Summary tab is not available until at least 1 field has been selected'), :error)
end
when '5'
if @edit[:new][:fields].empty?
add_flash(_('Charts tab is not available until at least 1 field has been selected'), :error)
elsif @edit[:new][:sortby1].blank? || @edit[:new][:sortby1] == NOTHING_STRING
add_flash(_('Charts tab is not available unless a sort field has been selected'), :error)
active_tab = 'edit_4'
end
when '6'
if @edit[:new][:fields].empty?
add_flash(_('Timeline tab is not available until at least 1 field has been selected'), :error)
else
found = false
@edit[:new][:fields].each do |field|
if MiqReport.get_col_type(field[1]) == :datetime
found = true
break
end
end
unless found
add_flash(_('Timeline tab is not available unless at least 1 time field has been selected'), :error)
end
end
when '7'
if @edit[:new][:model] == TREND_MODEL
unless @edit[:new][:perf_trend_col]
add_flash(_('Preview tab is not available until Trending for field has been selected'), :error)
end
unless @edit[:new][:perf_limit_col] || @edit[:new][:perf_limit_val]
add_flash(_('Preview tab is not available until Trend Target Limit has been configured'), :error)
end
if @edit[:new][:perf_limit_val] && !is_numeric?(@edit[:new][:perf_limit_val])
add_flash(_('Trend Target Limit: Value must be numeric'), :error)
end
elsif @edit[:new][:fields].empty?
add_flash(_('Preview tab is not available until at least 1 field has been selected'), :error)
elsif Chargeback.db_is_chargeback?(@edit[:new][:model]) && !valid_chargeback_fields
add_flash(_('Preview tab is not available until Chargeback Filters has been configured'), :error)
active_tab = 'edit_3'
end
when '9'
if @edit[:new][:fields].empty?
add_flash(_('Styling tab is not available until at least 1 field has been selected'), :error)
end
end
@sb[:miq_tab] = active_tab if flash_errors?
end
end
Add Vm Guid to report fields for chargeback reports
+ fix related rubobop issues
(transferred from ManageIQ/manageiq@acd6056234a5923b051dd715c3836471387f5bd7)
module ReportController::Reports::Editor
extend ActiveSupport::Concern
CHARGEBACK_ALLOWED_FIELD_SUFFIXES = %w(
_cost
-owner_name
_metric
-provider_name
-provider_uid
-project_uid
-archived
-chargeback_rates
-vm_guid
).freeze
def miq_report_new
assert_privileges("miq_report_new")
@_params.delete :id # incase add button was pressed from report show screen.
miq_report_edit
end
def miq_report_copy
assert_privileges("miq_report_copy")
@report = nil # Clear any saved report object
if params[:tab] # Came in to change the tab
check_tabs
build_edit_screen
else
@sb[:miq_tab] = "edit_1"
@rpt = MiqReport.find(params[:id])
@rpt.id = nil # Treat as a new report
set_form_vars
build_edit_screen
end
@ina_form = @lock_tree = true
replace_right_cell
end
def miq_report_edit
assert_privileges("miq_report_edit")
case params[:button]
when "cancel"
@edit[:rpt_id] ?
add_flash(_("Edit of %{model} \"%{name}\" was cancelled by the user") % {:model => ui_lookup(:model => "MiqReport"), :name => @edit[:rpt_title]}) :
add_flash(_("Add of new %{model} was cancelled by the user") % {:model => ui_lookup(:model => "MiqReport")})
@edit = session[:edit] = nil # clean out the saved info
replace_right_cell
when "add", "save"
id = params[:id] ? params[:id] : "new"
return unless load_edit("report_edit__#{id}", "replace_cell__explorer")
get_form_vars
@changed = (@edit[:new] != @edit[:current])
@rpt = @edit[:rpt_id] ? find_by_id_filtered(MiqReport, params[:id]) :
MiqReport.new
set_record_vars(@rpt)
unless valid_report?(@rpt)
build_edit_screen
replace_right_cell
return
end
if @edit[:new][:graph_type] && (@edit[:new][:sortby1].blank? || @edit[:new][:sortby1] == NOTHING_STRING)
add_flash(_("Report can not be saved unless sort field has been configured for Charts"), :error)
@sb[:miq_tab] = "edit_4"
build_edit_screen
replace_right_cell
return
end
if @rpt.save
# update report name in menu if name is edited
menu_repname_update(@edit[:current][:name], @edit[:new][:name]) if @edit[:current][:name] != @edit[:new][:name]
AuditEvent.success(build_saved_audit(@rpt, @edit))
@edit[:rpt_id] ?
add_flash(_("%{model} \"%{name}\" was saved") % {:model => ui_lookup(:model => "MiqReport"), :name => @rpt.name}) :
add_flash(_("%{model} \"%{name}\" was added") % {:model => ui_lookup(:model => "MiqReport"), :name => @rpt.name})
# only do this for new reports
unless @edit[:rpt_id]
self.x_node = "xx-#{@sb[:rpt_menu].length}_xx-#{@sb[:rpt_menu].length}-0"
setnode_for_customreport
end
@edit = session[:edit] = nil # clean out the saved info
if role_allows?(:feature => "miq_report_widget_editor")
# all widgets for this report
get_all_widgets("report", from_cid(x_node.split('_').last))
end
replace_right_cell(:replace_trees => [:reports])
else
rpt.errors.each do |field, msg|
add_flash("#{field.to_s.capitalize} #{msg}", :error)
end
@in_a_form = true
session[:changed] = @changed ? true : false
@changed = true
replace_right_cell
end
else
add_flash(_("All changes have been reset"), :warning) if params[:button] == "reset"
@in_a_form = true
@report = nil # Clear any saved report object
if params[:tab] # Came in to change the tab
@rpt = @edit[:rpt_id] ? MiqReport.find(@edit[:rpt_id]) :
MiqReport.new
check_tabs
build_edit_screen
else
@sb[:miq_tab] = "edit_1"
@rpt = params[:id] && params[:id] != "new" ? MiqReport.find(params[:id]) :
MiqReport.new
if @rpt.rpt_type == "Default"
flash = "Default reports can not be edited"
redirect_to :action => "show", :id => @rpt.id, :flash_msg => flash, :flash_error => true
return
end
set_form_vars
build_edit_screen
end
@changed = (@edit[:new] != @edit[:current])
session[:changed] = @changed
@lock_tree = true
replace_right_cell
end
end
# AJAX driven routine to check for changes in ANY field on the form
def form_field_changed
return unless load_edit("report_edit__#{params[:id]}", "replace_cell__explorer")
get_form_vars
build_edit_screen
@changed = (@edit[:new] != @edit[:current])
render :update do |page|
page << javascript_prologue
page.replace("flash_msg_div", :partial => "layouts/flash_msg") unless @refresh_div && @refresh_div != "column_lists"
page.replace(@refresh_div, :partial => @refresh_partial) if @refresh_div
page.replace("chart_sample_div", :partial => "form_chart_sample") if @refresh_div == "chart_div"
page.replace("tl_sample_div", :partial => "form_tl_sample") if @refresh_div == "tl_settings_div"
page.replace_html("calc_#{@calc_div}_div", :text => @calc_val) if @calc_div
page << "miqSparkle(false);"
page << javascript_for_miq_button_visibility_changed(@changed)
if @tl_changed # Reload the screen if the timeline data was changed
page.replace_html("tl_sample_div", :partial => "form_tl_sample") if @tl_field != NOTHING_STRING
elsif @formatting_changed # Reload the screen if the formatting pulldowns need to be reset
page.replace_html("formatting_div", :partial => "form_formatting")
elsif @tl_repaint
# page << "tl.paint();"
page << javascript_hide("notification")
end
end
end
def filter_change
return unless load_edit("report_edit__#{params[:id]}", "replace_cell__explorer")
@expkey = $&.to_sym if params[:button].to_s =~ /^(record|display)_filter$/
render :update do |page|
page << javascript_prologue
page.replace("filter_div", :partial => "form_filter")
page << "miqSparkle(false);"
end
end
private
def build_edit_screen
build_tabs
get_time_profiles # Get time profiles list (global and user specific)
case @sb[:miq_tab].split("_")[1]
when "1" # Select columns
@edit[:models] ||= reportable_models
# Add the blank choice if no table chosen yet
# @edit[:models].insert(0,["<Choose>", "<Choose>"]) if @edit[:new][:model] == nil && @edit[:models][0][0] != "<Choose>"
if @edit[:new][:model].nil?
if @edit[:models][0][0] != "<Choose>"
@edit[:models].insert(0, ["<Choose>", "<Choose>"])
end
else
if @edit[:models][0][0] == "<Choose>"
@edit[:models].delete_at(0)
end
end
when "8" # Consolidate
# Build group chooser arrays
@edit[:new][:pivot].options = @edit[:new][:fields].dup
@pivot = @edit[:new][:pivot]
when "2" # Formatting
# @edit[:calc_xml] = build_calc_combo_xml # Get the combobox XML for any numeric fields
when "3" # Filter
# Build record filter expression
if @edit[:miq_exp] || # Is this stored as an MiqExp object
["new", "copy", "create"].include?(request.parameters["action"]) # or it's a new condition
@edit[:record_filter][:exp_idx] ||= 0 # Start at first exp
new_record_filter = @edit[:new][:record_filter]
@edit[:record_filter][:expression] = copy_hash(new_record_filter) unless new_record_filter.blank?
@expkey = :record_filter
# Initialize the exp array
exp_array(:init, @edit[:record_filter][:expression]) if @edit[:record_filter][:exp_array].nil?
@edit[:record_filter][:exp_table] = exp_build_table(@edit[:record_filter][:expression])
exp_get_prefill_types # Build prefill lists
@edit[:record_filter][:exp_model] = @edit[:new][:model] # Set the model for the expression editor
end
# Build display filter expression
@edit[:display_filter][:exp_idx] ||= 0 # Start at first exp
new_display_filter = @edit[:new][:display_filter]
@edit[:display_filter][:expression] = copy_hash(new_display_filter) unless new_display_filter.blank?
@expkey = :display_filter
# Initialize the exp array
exp_array(:init, @edit[:display_filter][:expression]) if @edit[:display_filter][:exp_array].nil?
@edit[:display_filter][:exp_table] = exp_build_table(@edit[:display_filter][:expression])
cols = @edit[:new][:field_order]
@edit[:display_filter][:exp_available_fields] = MiqReport.display_filter_details(cols, :field)
cols = @edit[:new][:fields]
@edit[:display_filter][:exp_available_tags] = MiqReport.display_filter_details(cols, :tag)
@edit[:display_filter][:exp_model] = "_display_filter_" # Set model for display filter
@expkey = :record_filter # Start with Record Filter showing
if @edit[:new][:perf_interval] && !@edit[:new][:time_profile]
set_time_profile_vars(selected_time_profile_for_pull_down, @edit[:new])
end
when "4" # Summarize
# Build sort chooser arrays(@edit[:new][:fields], :field)
@sortby1 = @edit[:new][:sortby1]
@sortby2 = @edit[:new][:sortby2]
@sort1 = @edit[:new][:field_order].dup
@sort2 = @sort1.dup.delete_if { |s| s[1] == @sortby1.split("__").first }
when "5" # Charts
options = chart_fields_options
if options.empty?
@edit[:new][:chart_column] = nil
else
options[0][1] unless options.detect { |_, v| v == @edit[:new][:chart_column] }
end
when "6" # Timeline
@tl_fields = []
@edit[:new][:fields].each do |field|
if MiqReport.get_col_type(field[1]) == :datetime
@tl_fields.push(field)
end
end
@tl_field = @edit[:new][:tl_field]
if @edit[:new][:tl_position] == "Last"
@position_time = format_timezone(Time.now, "UTC", nil)
else
@position_time = format_timezone(Time.now - 1.year, "UTC", nil)
end
@timeline = true if @tl_field != NOTHING_STRING
@tl_json = sample_timeline
when "7" # Preview
# generate preview report when
end
@in_a_form = true
if ["new", "copy", "create"].include?(request.parameters["action"])
# drop_breadcrumb( {:name=>"Add Report", :url=>"/report/new"} )
@gtl_url = "/new"
else
# drop_breadcrumb( {:name=>"Edit Report", :url=>"/report/edit"} )
@gtl_url = "/edit"
end
end
def reportable_models
MiqReport.reportable_models.collect do |m|
[Dictionary.gettext(m, :type => :model, :notfound => :titleize, :plural => true), m]
end
end
def ensure_perf_interval_defaults
case @edit[:new][:perf_interval]
when "hourly"
@edit[:new][:perf_end] ||= "0"
@edit[:new][:perf_start] ||= 1.day.to_s
when "daily"
@edit[:new][:perf_end] ||= "0"
@edit[:new][:perf_start] ||= 2.days.to_s
end
end
# Reset report column fields if model or interval was changed
def reset_report_col_fields
@edit[:new][:fields] = [] # Clear fields array
@edit[:new][:headers] = {} # Clear headers hash
@edit[:new][:pivot] = ReportController::PivotOptions.new
@edit[:new][:sortby1] = NOTHING_STRING # Clear sort fields
@edit[:new][:sortby2] = NOTHING_STRING
@edit[:new][:filter_operator] = nil
@edit[:new][:filter_string] = nil
@edit[:new][:categories] = []
@edit[:new][:graph_type] = nil # Clear graph field
@edit[:new][:chart_mode] = nil
@edit[:new][:chart_column] = nil
@edit[:new][:perf_trend_col] = nil
@edit[:new][:perf_trend_db] = nil
@edit[:new][:perf_trend_pct1] = nil
@edit[:new][:perf_trend_pct2] = nil
@edit[:new][:perf_trend_pct3] = nil
@edit[:new][:perf_limit_col] = nil
@edit[:new][:perf_limit_val] = nil
@edit[:new][:record_filter] = nil # Clear record filter
@edit[:new][:display_filter] = nil # Clear display filter
@edit[:miq_exp] = true
end
def build_tabs
req = "edit"
if @edit[:new][:model] == TREND_MODEL
@tabs = [
["#{req}_1", _("Columns")],
["#{req}_3", _("Filter")],
["#{req}_7", _("Preview")]
]
elsif Chargeback.db_is_chargeback?(@edit[:new][:model].to_s)
@tabs = [
["#{req}_1", _("Columns")],
["#{req}_2", _("Formatting")],
["#{req}_3", _("Filter")],
["#{req}_7", _("Preview")]
]
else
@tabs = [
["#{req}_1", _("Columns")],
["#{req}_8", _("Consolidation")],
["#{req}_2", _("Formatting")],
["#{req}_9", _("Styling")],
["#{req}_3", _("Filter")],
["#{req}_4", _("Summary")],
["#{req}_5", _("Charts")],
["#{req}_6", _("Timeline")],
["#{req}_7", _("Preview")]
]
end
tab = @sb[:miq_tab].split("_")[1] # Get the tab number of the active tab
@active_tab = "#{req}_#{tab}"
end
# Get variables from edit form
def get_form_vars
@assigned_filters = []
gfv_report_fields # Global report fields
gfv_move_cols_buttons # Move cols buttons
gfv_model # Model changes
gfv_trend # Trend fields
gfv_performance # Performance fields
gfv_chargeback # Chargeback fields
gfv_charts # Charting fields
gfv_pivots # Consolidation fields
gfv_sort # Summary fields
gfv_timeline # Timeline fields
# Check for key prefixes (params starting with certain keys)
params.each do |key, value|
# See if any headers were sent in
@edit[:new][:headers][key.split("_")[1..-1].join("_")] = value if key.split("_").first == "hdr"
# See if any formats were sent in
if key.split("_").first == "fmt"
key2 = key.gsub("___", ".") # Put period sub table separator back into the key
@edit[:new][:col_formats][key2.split("_")[1..-1].join("_")] = value.blank? ? nil : value.to_sym
@formatting_changed = value.blank?
end
# See if any group calculation checkboxes were sent in
gfv_key_group_calculations(key, value) if key.split("_").first == "calc"
# See if any pivot calculation checkboxes were sent in
gfv_key_pivot_calculations(key, value) if key.split("_").first == "pivotcalc"
# Check for style fields
prefix = key.split("_").first
gfv_key_style(key, value) if prefix && prefix.starts_with?("style")
end
end
# Handle params starting with "calc"
def gfv_key_group_calculations(key, value)
field = @edit[:new][:field_order][key.split("_").last.to_i].last # Get the field name
@edit[:new][:col_options][field_to_col(field)] = {
:grouping => value.split(",").sort.map(&:to_sym).reject { |a| a == :null }
}
end
# Handle params starting with "pivotcalc"
def gfv_key_pivot_calculations(key, value)
field = @edit[:new][:fields][key.split("_").last.to_i].last # Get the field name
@edit[:pivot_cols][field] = []
value.split(',').sort.map(&:to_sym).each do |agg|
@edit[:pivot_cols][field] << agg
# Create new header from original header + aggregate function
@edit[:new][:headers][field + "__#{agg}"] = @edit[:new][:headers][field] + " (#{agg.to_s.titleize})"
end
build_field_order
end
# Handle params starting with "style"
def gfv_key_style(key, value)
parm, f_idx, s_idx = key.split("_") # Get the parm type, field index, and style index
f_idx = f_idx.to_i
s_idx = s_idx.to_i
f = @edit[:new][:field_order][f_idx] # Get the field element
field_sub_type = MiqExpression.get_col_info(f.last)[:format_sub_type]
field_data_type = MiqExpression.get_col_info(f.last)[:data_type]
field_name = f.last.include?(".") ? f.last.split(".").last.tr("-", ".") : f.last.split("-").last
case parm
when "style" # New CSS class chosen
if value.blank?
@edit[:new][:col_options][field_name][:style].delete_at(s_idx)
@edit[:new][:col_options][field_name].delete(:style) if @edit[:new][:col_options][field_name][:style].empty?
@edit[:new][:col_options].delete(field_name) if @edit[:new][:col_options][field_name].empty?
else
@edit[:new][:col_options][field_name] ||= {}
@edit[:new][:col_options][field_name][:style] ||= []
@edit[:new][:col_options][field_name][:style][s_idx] ||= {}
@edit[:new][:col_options][field_name][:style][s_idx][:class] = value.to_sym
ovs = case field_data_type
when :boolean
["DEFAULT", "true"]
when :integer, :float
["DEFAULT", "", MiqExpression::FORMAT_SUB_TYPES.fetch_path(field_sub_type, :units) ? MiqExpression::FORMAT_SUB_TYPES.fetch_path(field_sub_type, :units).first : nil]
else
["DEFAULT", ""]
end
op ||= ovs[0]
val ||= ovs[1]
suffix ||= ovs[2]
@edit[:new][:col_options][field_name][:style][s_idx][:operator] ||= op
@edit[:new][:col_options][field_name][:style][s_idx][:value] ||= val
@edit[:new][:col_options][field_name][:style][s_idx][:value_suffix] ||= suffix if suffix
end
@refresh_div = "styling_div"
@refresh_partial = "form_styling"
when "styleop" # New operator chosen
@edit[:new][:col_options][field_name][:style][s_idx][:operator] = value
if value == "DEFAULT"
@edit[:new][:col_options][field_name][:style][s_idx].delete(:value) # Remove value key
# Remove all style array elements after this one
((s_idx + 1)...@edit[:new][:col_options][field_name][:style].length).each_with_index do |_i, i_idx|
@edit[:new][:col_options][field_name][:style].delete_at(i_idx)
end
elsif value.include?("NIL") || value.include?("EMPTY")
@edit[:new][:col_options][field_name][:style][s_idx].delete(:value) # Remove value key
elsif [:datetime, :date].include?(field_data_type)
@edit[:new][:col_options][field_name][:style][s_idx][:value] = EXP_TODAY # Set default date value
elsif [:boolean].include?(field_data_type)
@edit[:new][:col_options][field_name][:style][s_idx][:value] = true # Set default boolean value
else
@edit[:new][:col_options][field_name][:style][s_idx][:value] = "" # Set default value
end
@refresh_div = "styling_div"
@refresh_partial = "form_styling"
when "styleval" # New value chosen
@edit[:new][:col_options][field_name][:style][s_idx][:value] = value
when "stylesuffix" # New suffix chosen
@edit[:new][:col_options][field_name][:style][s_idx][:value_suffix] = value.to_sym
@refresh_div = "styling_div"
@refresh_partial = "form_styling"
end
end
def gfv_report_fields
@edit[:new][:pdf_page_size] = params[:pdf_page_size] if params[:pdf_page_size]
if params[:chosen_queue_timeout]
@edit[:new][:queue_timeout] = params[:chosen_queue_timeout].blank? ? nil : params[:chosen_queue_timeout].to_i
end
@edit[:new][:row_limit] = params[:row_limit].blank? ? "" : params[:row_limit] if params[:row_limit]
@edit[:new][:name] = params[:name] if params[:name]
@edit[:new][:title] = params[:title] if params[:title]
end
def gfv_move_cols_buttons
case params[:button]
when 'right' then move_cols_right
when 'left' then move_cols_left
when 'up' then move_cols_up
when 'down' then move_cols_down
when 'top' then move_cols_top
when 'bottom' then move_cols_bottom
end && build_field_order
end
def gfv_model
if params[:chosen_model] && # Check for db table changed
params[:chosen_model] != @edit[:new][:model]
@edit[:new][:model] = params[:chosen_model]
@edit[:new][:perf_interval] = nil # Clear performance interval setting
@edit[:new][:tz] = nil
if [:performance, :trend].include?(model_report_type(@edit[:new][:model]))
@edit[:new][:perf_interval] ||= "daily" # Default to Daily
@edit[:new][:perf_avgs] ||= "time_interval"
@edit[:new][:tz] = session[:user_tz]
ensure_perf_interval_defaults
end
if Chargeback.db_is_chargeback?(@edit[:new][:model])
@edit[:new][:cb_model] = Chargeback.report_cb_model(@edit[:new][:model])
@edit[:new][:cb_interval] ||= "daily" # Default to Daily
@edit[:new][:cb_interval_size] ||= 1
@edit[:new][:cb_end_interval_offset] ||= 1
@edit[:new][:cb_groupby] ||= "date" # Default to Date grouping
@edit[:new][:tz] = session[:user_tz]
end
reset_report_col_fields
build_edit_screen
@refresh_div = "form_div"
@refresh_partial = "form"
end
end
def gfv_trend
if params[:chosen_trend_col]
@edit[:new][:perf_interval] ||= "daily" # Default to Daily
@edit[:new][:perf_target_pct1] ||= 100 # Default to 100%
if params[:chosen_trend_col] == "<Choose>"
@edit[:new][:perf_trend_db] = nil
@edit[:new][:perf_trend_col] = nil
else
@edit[:new][:perf_trend_db], @edit[:new][:perf_trend_col] = params[:chosen_trend_col].split("-")
if MiqExpression.reporting_available_fields(@edit[:new][:model], @edit[:new][:perf_interval]).find { |af| af.last == params[:chosen_trend_col] }.first.include?("(%)")
@edit[:new][:perf_limit_val] = 100
@edit[:new][:perf_limit_col] = nil
@edit[:percent_col] = true
else
@edit[:percent_col] = false
@edit[:new][:perf_limit_val] = nil
end
ensure_perf_interval_defaults
@edit[:limit_cols] = VimPerformanceTrend.trend_limit_cols(@edit[:new][:perf_trend_db], @edit[:new][:perf_trend_col], @edit[:new][:perf_interval])
end
@refresh_div = "columns_div"
@refresh_partial = "form_columns"
# @edit[:limit_cols] = VimPerformanceTrend.trend_limit_cols(@edit[:new][:perf_trend_db], @edit[:new][:perf_trend_col], @edit[:new][:perf_interval])
elsif params[:chosen_limit_col]
if params[:chosen_limit_col] == "<None>"
@edit[:new][:perf_limit_col] = nil
else
@edit[:new][:perf_limit_col] = params[:chosen_limit_col]
@edit[:new][:perf_limit_val] = nil
end
@refresh_div = "columns_div"
@refresh_partial = "form_columns"
elsif params[:chosen_limit_val]
@edit[:new][:perf_limit_val] = params[:chosen_limit_val]
elsif params[:percent1]
@edit[:new][:perf_target_pct1] = params[:percent1].to_i
elsif params[:percent2]
@edit[:new][:perf_target_pct2] = params[:percent2] == "<None>" ? nil : params[:percent2].to_i
elsif params[:percent3]
@edit[:new][:perf_target_pct3] = params[:percent3] == "<None>" ? nil : params[:percent3].to_i
end
end
def gfv_performance
if params[:chosen_interval]
@edit[:new][:perf_interval] = params[:chosen_interval]
@edit[:new][:perf_start] = nil # Clear start/end offsets
@edit[:new][:perf_end] = nil
ensure_perf_interval_defaults
reset_report_col_fields
@refresh_div = "form_div"
@refresh_partial = "form"
elsif params[:perf_avgs]
@edit[:new][:perf_avgs] = params[:perf_avgs]
elsif params[:chosen_start]
@edit[:new][:perf_start] = params[:chosen_start]
elsif params[:chosen_end]
@edit[:new][:perf_end] = params[:chosen_end]
elsif params[:chosen_tz]
@edit[:new][:tz] = params[:chosen_tz]
elsif params.key?(:chosen_time_profile)
@edit[:new][:time_profile] = params[:chosen_time_profile].blank? ? nil : params[:chosen_time_profile].to_i
@refresh_div = "filter_div"
@refresh_partial = "form_filter"
end
end
def gfv_chargeback
# Chargeback options
if params.key?(:cb_show_typ)
@edit[:new][:cb_show_typ] = params[:cb_show_typ].blank? ? nil : params[:cb_show_typ]
@refresh_div = "filter_div"
@refresh_partial = "form_filter"
elsif params.key?(:cb_tag_cat)
@refresh_div = "filter_div"
@refresh_partial = "form_filter"
if params[:cb_tag_cat].blank?
@edit[:new][:cb_tag_cat] = nil
@edit[:new][:cb_tag_value] = nil
else
@edit[:new][:cb_tag_cat] = params[:cb_tag_cat]
@edit[:cb_tags] = {}
Classification.find_by_name(params[:cb_tag_cat]).entries.each { |e| @edit[:cb_tags][e.name] = e.description }
end
elsif params.key?(:cb_owner_id)
@edit[:new][:cb_owner_id] = params[:cb_owner_id].blank? ? nil : params[:cb_owner_id]
elsif params.key?(:cb_tenant_id)
@edit[:new][:cb_tenant_id] = params[:cb_tenant_id].blank? ? nil : params[:cb_tenant_id].to_i
elsif params.key?(:cb_tag_value)
@edit[:new][:cb_tag_value] = params[:cb_tag_value].blank? ? nil : params[:cb_tag_value]
elsif params.key?(:cb_entity_id)
@edit[:new][:cb_entity_id] = params[:cb_entity_id].blank? ? nil : params[:cb_entity_id]
elsif params.key?(:cb_provider_id)
@edit[:new][:cb_provider_id] = params[:cb_provider_id].blank? ? nil : params[:cb_provider_id]
@edit[:new][:cb_entity_id] = "all"
build_edit_screen
@refresh_div = "form_div"
@refresh_partial = "form"
elsif params.key?(:cb_groupby)
@edit[:new][:cb_groupby] = params[:cb_groupby]
@refresh_div = "filter_div"
@refresh_partial = "form_filter"
elsif params.key?(:cb_groupby_tag)
@edit[:new][:cb_groupby_tag] = params[:cb_groupby_tag]
elsif params[:cb_interval]
@edit[:new][:cb_interval] = params[:cb_interval]
@edit[:new][:cb_interval_size] = 1
@edit[:new][:cb_end_interval_offset] = 1
@refresh_div = "filter_div"
@refresh_partial = "form_filter"
elsif params[:cb_interval_size]
@edit[:new][:cb_interval_size] = params[:cb_interval_size].to_i
elsif params[:cb_end_interval_offset]
@edit[:new][:cb_end_interval_offset] = params[:cb_end_interval_offset].to_i
end
end
def gfv_charts
if params[:chosen_graph] && params[:chosen_graph] != @edit[:new][:graph_type]
if params[:chosen_graph] == "<No chart>"
@edit[:new][:graph_type] = nil
# Reset other setting to initial settings if choosing <No chart>
@edit[:new][:graph_count] = @edit[:current][:graph_count]
@edit[:new][:graph_other] = @edit[:current][:graph_other]
@edit[:new][:chart_mode] = @edit[:current][:chart_mode]
@edit[:new][:chart_column] = @edit[:current][:chart_column]
else
@edit[:new][:graph_other] = true if @edit[:new][:graph_type].nil? # Reset other setting if choosing first chart
@edit[:new][:graph_type] = params[:chosen_graph] # Save graph type
@edit[:new][:graph_count] ||= GRAPH_MAX_COUNT # Reset graph count, if not set
@edit[:new][:chart_mode] ||= 'counts'
@edit[:new][:chart_column] ||= ''
end
@refresh_div = "chart_div"
@refresh_partial = "form_chart"
end
if params[:chart_mode] && params[:chart_mode] != @edit[:new][:chart_mode]
@edit[:new][:chart_mode] = params[:chart_mode]
@refresh_div = "chart_div"
@refresh_partial = "form_chart"
end
if params[:chart_column] && params[:chart_column] != @edit[:new][:chart_column]
@edit[:new][:chart_column] = params[:chart_column]
@refresh_div = "chart_sample_div"
@refresh_partial = "form_chart_sample"
end
if params[:chosen_count] && params[:chosen_count] != @edit[:new][:graph_count]
@edit[:new][:graph_count] = params[:chosen_count]
@refresh_div = "chart_sample_div"
@refresh_partial = "form_chart_sample"
end
if params[:chosen_other] # If a chart is showing, set the other setting based on check box present
chosen = (params[:chosen_other].to_s == "1")
if @edit[:new][:graph_other] != chosen
@edit[:new][:graph_other] = chosen
@refresh_div = "chart_sample_div"
@refresh_partial = "form_chart_sample"
end
end
end
def gfv_pivots
@edit[:new][:pivot] ||= ReportController::PivotOptions.new
@edit[:new][:pivot].update(params)
if params[:chosen_pivot1] || params[:chosen_pivot2] || params[:chosen_pivot3]
if @edit[:new][:pivot].by1 == NOTHING_STRING
@edit[:pivot_cols] = {} # Clear pivot_cols if no pivot grouping fields selected
else
@edit[:pivot_cols].delete(@edit[:new][:pivot].by1) # Remove any pivot grouping fields from pivot cols
@edit[:pivot_cols].delete(@edit[:new][:pivot].by2)
@edit[:pivot_cols].delete(@edit[:new][:pivot].by3)
end
build_field_order
@refresh_div = "consolidate_div"
@refresh_partial = "form_consolidate"
end
end
def gfv_sort
@edit[:new][:order] = params[:sort_order] if params[:sort_order]
if params[:sort_group] # If grouping changed,
@edit[:new][:group] = params[:sort_group]
@refresh_div = "sort_div" # Resend the sort tab
@refresh_partial = "form_sort"
if @edit[:new][:chart_mode] = 'values' && !chart_mode_values_allowed?
@edit[:new][:chart_mode] = 'counts'
end
end
@edit[:new][:hide_details] = (params[:hide_details].to_s == "1") if params[:hide_details]
if params[:chosen_sort1] && params[:chosen_sort1] != @edit[:new][:sortby1].split("__").first
# Remove any col options for any existing sort + suffix
@edit[:new][:col_options].delete(@edit[:new][:sortby1].split("-").last) if @edit[:new][:sortby1].split("__")[1]
@edit[:new][:sortby1] = params[:chosen_sort1]
@edit[:new][:sortby2] = NOTHING_STRING if params[:chosen_sort1] == NOTHING_STRING || params[:chosen_sort1] == @edit[:new][:sortby2].split("__").first
@refresh_div = "sort_div"
@refresh_partial = "form_sort"
elsif params[:chosen_sort2] && params[:chosen_sort2] != @edit[:new][:sortby2].split("__").first
@edit[:new][:sortby2] = params[:chosen_sort2]
# Look at the 1st sort suffix (ie. month, day_of_week, etc)
elsif params[:sort1_suffix] && params[:sort1_suffix].to_s != @edit[:new][:sortby1].split("__")[1].to_s
# Remove any col options for any existing sort + suffix
@edit[:new][:col_options].delete(@edit[:new][:sortby1].split("-").last) if @edit[:new][:sortby1].split("__")[1]
@edit[:new][:sortby1] = @edit[:new][:sortby1].split("__").first +
(params[:sort1_suffix].blank? ? "" : "__#{params[:sort1_suffix]}")
# Look at the 2nd sort suffix (ie. month, day_of_week, etc)
elsif params[:sort2_suffix] && params[:sort2_suffix].to_s != @edit[:new][:sortby2].split("__")[1].to_s
# Remove any col options for any existing sort + suffix
@edit[:new][:col_options].delete(@edit[:new][:sortby2].split("-").last) if @edit[:new][:sortby2].split("__")[1]
@edit[:new][:sortby2] = @edit[:new][:sortby2].split("__").first + "__" + params[:sort2_suffix]
@edit[:new][:sortby2] = @edit[:new][:sortby2].split("__").first +
(params[:sort2_suffix].blank? ? "" : "__#{params[:sort2_suffix]}")
# Look at the break format
else
co_key1 = @edit[:new][:sortby1].split("-").last
if params[:break_format] &&
params[:break_format].to_s != @edit[:new].fetch_path(:col_options, co_key1)
if params[:break_format].blank? || # Remove format and col key (if empty)
params[:break_format].to_sym == MiqReport.get_col_info(@edit[:new][:sortby1])[:default_format]
if @edit[:new][:col_options][co_key1]
@edit[:new][:col_options][co_key1].delete(:break_format)
@edit[:new][:col_options].delete(co_key1) if @edit[:new][:col_options][co_key1].empty?
end
else # Add col and format to col_options
@edit[:new][:col_options][co_key1] ||= {}
@edit[:new][:col_options][co_key1][:break_format] = params[:break_format].to_sym
end
end
end
# Clear/set up the default break label
sort1 = @edit[:new][:sortby1].split("-").last unless @edit[:new][:sortby1].blank?
if @edit[:new][:group] == "No" # Clear any existing break label
if @edit[:new].fetch_path(:col_options, sort1, :break_label)
@edit[:new][:col_options][sort1].delete(:break_label)
@edit[:new][:col_options].delete(sort1) if @edit[:new][:col_options][sort1].empty?
end
else # Create a break label, if none there already
unless @edit[:new].fetch_path(:col_options, sort1, :break_label)
@edit[:new][:col_options][sort1] ||= {}
sort, suffix = @edit[:new][:sortby1].split("__")
@edit[:new][:col_options][sort1][:break_label] =
@edit[:new][:field_order].collect { |f| f.first if f.last == sort }.compact.join.strip +
(suffix ? " (#{MiqReport.date_time_break_suffixes.collect { |s| s.first if s.last == suffix }.compact.join})" : "") +
": "
end
end
# TODO: Not allowing user to change break label until editor is changed to not use form observe
# if params[:break_label]
# @edit[:new][:col_options][@edit[:new][:sortby1].split("-").last] ||= Hash.new
# @edit[:new][:col_options][@edit[:new][:sortby1].split("-").last][:break_label] == params[:break_label]
# end
end
def gfv_timeline
if params[:chosen_tl] && params[:chosen_tl] != @edit[:new][:tl_field]
if @edit[:new][:tl_field] == NOTHING_STRING || params[:chosen_tl] == NOTHING_STRING
@refresh_div = "tl_settings_div"
@refresh_partial = "form_tl_settings"
@tl_changed = true
else
@tl_repaint = true
end
@edit[:new][:tl_field] = params[:chosen_tl]
elsif params[:chosen_position] && params[:chosen_position] != @edit[:new][:tl_position]
@tl_changed = true
@edit[:new][:tl_position] = params[:chosen_position]
end
end
def move_cols_right
if !params[:available_fields] || params[:available_fields].length == 0 || params[:available_fields][0] == ""
add_flash(_("No fields were selected to move down"), :error)
elsif params[:available_fields].length + @edit[:new][:fields].length > MAX_REPORT_COLUMNS
add_flash(_("Fields not added: Adding the selected %{count} fields will exceed the maximum of %{max} fields") % {:count => params[:available_fields].length + @edit[:new][:fields].length, :max => MAX_REPORT_COLUMNS},
:error)
else
MiqExpression.reporting_available_fields(@edit[:new][:model], @edit[:new][:perf_interval]).each do |af| # Go thru all available columns
if params[:available_fields].include?(af[1]) # See if this column was selected to move
unless @edit[:new][:fields].include?(af) # Only move if it's not there already
@edit[:new][:fields].push(af) # Add it to the new fields list
if af[0].include?(":") # Not a base column
table = af[0].split(" : ")[0].split(".")[-1] # Get the table name
table = table.singularize unless table == "OS" # Singularize, except "OS"
temp = af[0].split(" : ")[1]
temp_header = table == temp.split(" ")[0] ? af[0].split(" : ")[1] : table + " " + af[0].split(" : ")[1]
else
temp_header = af[0].strip # Base column, just use it without leading space
end
@edit[:new][:headers][af[1]] = temp_header # Add the column title to the headers hash
end
end
end
@refresh_div = "column_lists"
@refresh_partial = "column_lists"
end
end
def move_cols_left
if !params[:selected_fields] || params[:selected_fields].length == 0 || params[:selected_fields][0] == ""
add_flash(_("No fields were selected to move up"), :error)
elsif display_filter_contains?(params[:selected_fields])
add_flash(_("No fields were moved up"), :error)
else
@edit[:new][:fields].each do |nf| # Go thru all new fields
if params[:selected_fields].include?(nf.last) # See if this col was selected to move
# Clear out headers and formatting
@edit[:new][:headers].delete(nf.last) # Delete the column name from the headers hash
@edit[:new][:headers].delete_if { |k, _v| k.starts_with?("#{nf.last}__") } # Delete pivot calc keys
@edit[:new][:col_formats].delete(nf.last) # Delete the column name from the col_formats hash
@edit[:new][:col_formats].delete_if { |k, _v| k.starts_with?("#{nf.last}__") } # Delete pivot calc keys
# Clear out pivot field options
@edit[:new][:pivot].drop_from_selection(nf.last)
@edit[:pivot_cols].delete(nf.last) # Delete the column name from the pivot_cols hash
# Clear out sort options
if @edit[:new][:sortby1] && nf.last == @edit[:new][:sortby1].split("__").first # If deleting the first sort field
if MiqReport.is_break_suffix?(@edit[:new][:sortby1].split("__")[1]) # If sort has a break suffix
@edit[:new][:col_options].delete(field_to_col(@edit[:new][:sortby1])) # Remove the <col>__<suffix> from col_options
end
unless @edit[:new][:group] == "No" # If we were grouping, remove all col_options :group keys
@edit[:new][:col_options].each do |co_key, co_val|
co_val.delete(:grouping) # Remove :group key
@edit[:new][:col_options].delete(co_key) if co_val.empty? # Remove the col, if empty
end
end
@edit[:new][:sortby1] = NOTHING_STRING
@edit[:new][:sortby2] = NOTHING_STRING
end
if @edit[:new][:sortby1] && nf.last == @edit[:new][:sortby2].split("__").first # If deleting the second sort field
@edit[:new][:sortby2] = NOTHING_STRING
end
@edit[:new][:col_options].delete(field_to_col(nf.last)) # Remove this column from the col_options hash
end
end
@edit[:new][:fields].delete_if { |nf| params[:selected_fields].include?(nf.last) } # Remove selected fields
@refresh_div = "column_lists"
@refresh_partial = "column_lists"
end
end
# See if any of the fields passed in are present in the display filter expression
def display_filter_contains?(fields)
return false if @edit[:new][:display_filter].nil? # No display filter defined
exp = @edit[:new][:display_filter].inspect
@edit[:new][:fields].each do |f| # Go thru all of the selected fields
if fields.include?(f.last) # Is this field being removed?
add_flash(_("%{name} is currently being used in the Display Filter") %
{:name => f.first}, :error) if exp.include?(f.last)
end
end
!@flash_array.nil?
end
def selected_consecutive?
first_idx = last_idx = 0
@edit[:new][:fields].each_with_index do |nf, idx|
first_idx = idx if nf[1] == params[:selected_fields].first
if nf[1] == params[:selected_fields].last
last_idx = idx
break
end
end
if last_idx - first_idx + 1 > params[:selected_fields].length
return [false, first_idx, last_idx]
else
return [true, first_idx, last_idx]
end
end
# Set record variables to new values
def set_record_vars(rpt)
# Set the simple string/number fields
rpt.template_type = "report"
rpt.name = @edit[:new][:name].to_s.strip
rpt.title = @edit[:new][:title].to_s.strip
rpt.db = @edit[:new][:model]
rpt.rpt_group = @edit[:new][:rpt_group]
rpt.rpt_type = @edit[:new][:rpt_type]
rpt.priority = @edit[:new][:priority]
rpt.categories = @edit[:new][:categories]
rpt.col_options = @edit[:new][:col_options]
rpt.order = @edit[:new][:sortby1].nil? ? nil : @edit[:new][:order]
# Set the graph fields
if @edit[:new][:sortby1] == NOTHING_STRING || @edit[:new][:graph_type].nil?
rpt.dims = nil
rpt.graph = nil
else
if @edit[:new][:graph_type] =~ /^(Pie|Donut)/ # Pie and Donut charts must be set to 1 dimension
rpt.dims = 1
else
rpt.dims = @edit[:new][:sortby2] == NOTHING_STRING ? 1 : 2 # Set dims to 1 or 2 based on presence of sortby2
end
if @edit[:new][:chart_mode] == 'values' && @edit[:new][:chart_column].blank?
options = chart_fields_options
@edit[:new][:chart_column] = options[0][1] unless options.empty?
end
rpt.graph = {
:type => @edit[:new][:graph_type],
:mode => @edit[:new][:chart_mode],
:column => @edit[:new][:chart_column],
:count => @edit[:new][:graph_count],
:other => @edit[:new][:graph_other],
}
end
# Set the conditions field (expression)
if !@edit[:new][:record_filter].nil? && @edit[:new][:record_filter]["???"].nil?
rpt.conditions = MiqExpression.new(@edit[:new][:record_filter])
else
rpt.conditions = nil
end
# Set the display_filter field (expression)
if !@edit[:new][:display_filter].nil? && @edit[:new][:display_filter]["???"].nil?
rpt.display_filter = MiqExpression.new(@edit[:new][:display_filter])
else
rpt.display_filter = nil
end
# Set the performance options
rpt.db_options = Hash.new
if model_report_type(rpt.db) == :performance
rpt.db_options[:interval] = @edit[:new][:perf_interval]
rpt.db_options[:calc_avgs_by] = @edit[:new][:perf_avgs]
rpt.db_options[:end_offset] = @edit[:new][:perf_end].to_i
rpt.db_options[:start_offset] = @edit[:new][:perf_end].to_i + @edit[:new][:perf_start].to_i
elsif model_report_type(rpt.db) == :trend
rpt.db_options[:rpt_type] = "trend"
rpt.db_options[:interval] = @edit[:new][:perf_interval]
rpt.db_options[:end_offset] = @edit[:new][:perf_end].to_i
rpt.db_options[:start_offset] = @edit[:new][:perf_end].to_i + @edit[:new][:perf_start].to_i
rpt.db_options[:trend_db] = @edit[:new][:perf_trend_db]
rpt.db_options[:trend_col] = @edit[:new][:perf_trend_col]
rpt.db_options[:limit_col] = @edit[:new][:perf_limit_col] if @edit[:new][:perf_limit_col]
rpt.db_options[:limit_val] = @edit[:new][:perf_limit_val] if @edit[:new][:perf_limit_val]
rpt.db_options[:target_pcts] = []
rpt.db_options[:target_pcts].push(@edit[:new][:perf_target_pct1])
rpt.db_options[:target_pcts].push(@edit[:new][:perf_target_pct2]) if @edit[:new][:perf_target_pct2]
rpt.db_options[:target_pcts].push(@edit[:new][:perf_target_pct3]) if @edit[:new][:perf_target_pct3]
elsif Chargeback.db_is_chargeback?(rpt.db)
rpt.db_options[:rpt_type] = @edit[:new][:model]
options = {} # CB options go in db_options[:options] key
options[:interval] = @edit[:new][:cb_interval]
options[:interval_size] = @edit[:new][:cb_interval_size]
options[:end_interval_offset] = @edit[:new][:cb_end_interval_offset]
if @edit[:new][:cb_show_typ] == "owner"
options[:owner] = @edit[:new][:cb_owner_id]
elsif @edit[:new][:cb_show_typ] == "tenant"
options[:tenant_id] = @edit[:new][:cb_tenant_id]
elsif @edit[:new][:cb_show_typ] == "tag"
if @edit[:new][:cb_tag_cat] && @edit[:new][:cb_tag_value]
options[:tag] = "/managed/#{@edit[:new][:cb_tag_cat]}/#{@edit[:new][:cb_tag_value]}"
end
elsif @edit[:new][:cb_show_typ] == "entity"
options[:provider_id] = @edit[:new][:cb_provider_id]
options[:entity_id] = @edit[:new][:cb_entity_id]
end
options[:groupby] = @edit[:new][:cb_groupby]
options[:groupby_tag] = @edit[:new][:cb_groupby] == 'tag' ? @edit[:new][:cb_groupby_tag] : nil
rpt.db_options[:options] = options
end
rpt.time_profile_id = @edit[:new][:time_profile]
if @edit[:new][:time_profile]
time_profile = TimeProfile.find_by_id(@edit[:new][:time_profile])
rpt.tz = time_profile.tz
end
# Set the timeline field
if @edit[:new][:tl_field] == NOTHING_STRING
rpt.timeline = nil
else
rpt.timeline = Hash.new
rpt.timeline[:field] = @edit[:new][:tl_field]
rpt.timeline[:position] = @edit[:new][:tl_position]
end
# Set the line break group field
if @edit[:new][:sortby1] == NOTHING_STRING # If no sort fields
rpt.group = nil # Clear line break group
else # Otherwise, check the setting
case @edit[:new][:group]
when "Yes"
rpt.group = "y"
when "Counts"
rpt.group = "c"
else
rpt.group = nil
end
end
# Set defaults, if not present
rpt.rpt_group ||= "Custom"
rpt.rpt_type ||= "Custom"
rpt.cols = []
rpt.col_order = []
rpt.col_formats = []
rpt.headers = []
rpt.include = Hash.new
rpt.sortby = @edit[:new][:sortby1] == NOTHING_STRING ? nil : [] # Clear sortby if sortby1 not present, else set up array
# Add in the chargeback static fields
if Chargeback.db_is_chargeback?(rpt.db) # For chargeback, add in specific chargeback report options
rpt = @edit[:new][:model].constantize.set_chargeback_report_options(rpt, @edit)
end
# Remove when we support user sorting of trend reports
if rpt.db == TREND_MODEL
rpt.sortby = ["resource_name"]
rpt.order = "Ascending"
end
# Build column related report fields
@pg1 = @pg2 = @pg3 = nil # Init the pivot group cols
@edit[:new][:fields].each do |field_entry| # Go thru all of the fields
field = field_entry[1] # Get the encoded fully qualified field name
if @edit[:new][:pivot].by1 != NOTHING_STRING && # If we are doing pivoting and
@edit[:pivot_cols].key?(field) # this is a pivot calc column
@edit[:pivot_cols][field].each do |calc_typ| # Add header/format/col_order for each calc type
rpt.headers.push(@edit[:new][:headers][field + "__#{calc_typ}"])
rpt.col_formats.push(@edit[:new][:col_formats][field + "__#{calc_typ}"])
add_field_to_col_order(rpt, field + "__#{calc_typ}")
end
else # Normal field, set header/format/col_order
rpt.headers.push(@edit[:new][:headers][field])
rpt.col_formats.push(@edit[:new][:col_formats][field])
add_field_to_col_order(rpt, field)
end
end
rpt.rpt_options ||= {}
rpt.rpt_options.delete(:pivot)
unless @pg1.nil? # Build the pivot group_cols array
rpt.rpt_options[:pivot] = {}
rpt.rpt_options[:pivot][:group_cols] = []
rpt.rpt_options[:pivot][:group_cols].push(@pg1)
rpt.rpt_options[:pivot][:group_cols].push(@pg2) unless @pg2.nil?
rpt.rpt_options[:pivot][:group_cols].push(@pg3) unless @pg3.nil?
end
if @edit[:new][:group] != "No" || @edit[:new][:row_limit].blank?
rpt.rpt_options.delete(:row_limit)
else
rpt.rpt_options[:row_limit] = @edit[:new][:row_limit].to_i
end
# Add pdf page size to rpt_options
rpt.rpt_options ||= {}
rpt.rpt_options[:pdf] ||= {}
rpt.rpt_options[:pdf][:page_size] = @edit[:new][:pdf_page_size] || DEFAULT_PDF_PAGE_SIZE
rpt.rpt_options[:queue_timeout] = @edit[:new][:queue_timeout]
# Add hide detail rows option, if grouping
if rpt.group.nil?
rpt.rpt_options.delete(:summary)
else
rpt.rpt_options[:summary] ||= {}
rpt.rpt_options[:summary][:hide_detail_rows] = @edit[:new][:hide_details]
end
user = current_user
rpt.user = user
rpt.miq_group = user.current_group
rpt.add_includes_for_virtual_custom_attributes
end
def add_field_to_col_order(rpt, field)
# Get the sort columns, removing the suffix if it exists
sortby1 = MiqReport.is_break_suffix?(@edit[:new][:sortby1].split("__")[1]) ?
@edit[:new][:sortby1].split("__").first :
@edit[:new][:sortby1]
sortby2 = MiqReport.is_break_suffix?(@edit[:new][:sortby2].split("__")[1]) ?
@edit[:new][:sortby2].split("__").first :
@edit[:new][:sortby2]
# Has a period, so it's an include
if field.include?(".") && !field.include?(CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX)
tables = field.split("-")[0].split(".")[1..-1] # Get the list of tables from before the hyphen
inc_hash = rpt.include # Start at the main hash
tables.each_with_index do |table, idx|
inc_hash[table] ||= {} # Create hash for the table, if it's not there already
if idx == tables.length - 1 # We're at the end of the field name, so add the column
inc_hash[table]["columns"] ||= [] # Create the columns array for this table
f = field.split("-")[1].split("__").first # Grab the field name after the hyphen, before the "__"
inc_hash[table]["columns"].push(f) unless inc_hash[table]["columns"].include?(f) # Add the field to the columns, if not there
table_field = tables.join('.') + "." + field.split("-")[1]
rpt.col_order.push(table_field) # Add the table.field to the col_order array
if field == sortby1 # Is this the first sort field?
rpt.sortby = [table_field] + rpt.sortby # Put the field first in the sortby array
elsif field == @edit[:new][:sortby2] # Is this the second sort field?
rpt.sortby.push(table_field) # Add the field to the sortby array
end
if field == @edit[:new][:pivot].by1 # Save the group fields
@pg1 = table_field
elsif field == @edit[:new][:pivot].by2
@pg2 = table_field
elsif field == @edit[:new][:pivot].by3
@pg3 = table_field
end
else # Set up for the next embedded include hash
inc_hash[table]["include"] ||= {} # Create include hash for next level
inc_hash = inc_hash[table]["include"] # Point to the new hash
end
end
else # No period, this is a main table column
if field.include?("__") # Check for pivot calculated field
f = field.split("-")[1].split("__").first # Grab the field name after the hyphen, before the "__"
rpt.cols.push(f) unless rpt.cols.include?(f) # Add the original field, if not already there
else
rpt.cols.push(field.split("-")[1]) # Grab the field name after the hyphen
end
rpt.col_order.push(field.split("-")[1]) # Add the field to the col_order array
if field == sortby1 # Is this the first sort field?
rpt.sortby = [@edit[:new][:sortby1].split("-")[1]] + rpt.sortby # Put the field first in the sortby array
elsif field == sortby2 # Is this the second sort field?
rpt.sortby.push(@edit[:new][:sortby2].split("-")[1]) # Add the field to the sortby array
end
if field == @edit[:new][:pivot].by1 # Save the group fields
@pg1 = field.split("-")[1]
elsif field == @edit[:new][:pivot].by2
@pg2 = field.split("-")[1]
elsif field == @edit[:new][:pivot].by3
@pg3 = field.split("-")[1]
end
end
end
# Set form variables for edit
def set_form_vars
@edit = {}
@edit[:rpt_id] = @rpt.id # Save a record id to use it later to look a record
@edit[:rpt_title] = @rpt.title
@edit[:rpt_name] = @rpt.name
@edit[:new] = {}
@edit[:key] = "report_edit__#{@rpt.id || "new"}"
if params[:pressed] == "miq_report_copy"
@edit[:new][:rpt_group] = "Custom"
@edit[:new][:rpt_type] = "Custom"
else
@edit[:new][:rpt_group] = @rpt.rpt_group
@edit[:new][:rpt_type] = @rpt.rpt_type
end
# Get the simple string/number fields
@edit[:new][:name] = @rpt.name
@edit[:new][:title] = @rpt.title
@edit[:new][:model] = @rpt.db
@edit[:new][:priority] = @rpt.priority
@edit[:new][:order] = @rpt.order.blank? ? "Ascending" : @rpt.order
# @edit[:new][:graph] = @rpt.graph
# Replaced above line to handle new graph settings Hash
if @rpt.graph.kind_of?(Hash)
@edit[:new][:graph_type] = @rpt.graph[:type]
@edit[:new][:graph_count] = @rpt.graph[:count]
@edit[:new][:chart_mode] = @rpt.graph[:mode]
@edit[:new][:chart_column] = @rpt.graph[:column]
@edit[:new][:graph_other] = @rpt.graph[:other] ? @rpt.graph[:other] : false
else
@edit[:new][:graph_type] = @rpt.graph
@edit[:new][:graph_count] = GRAPH_MAX_COUNT
@edit[:new][:chart_mode] = 'counts'
@edit[:new][:chart_column] = ''
@edit[:new][:graph_other] = true
end
@edit[:new][:dims] = @rpt.dims
@edit[:new][:categories] = @rpt.categories
@edit[:new][:categories] ||= []
@edit[:new][:col_options] = @rpt.col_options.blank? ? {} : @rpt.col_options
# Initialize options
@edit[:new][:perf_interval] = nil
@edit[:new][:perf_start] = nil
@edit[:new][:perf_end] = nil
@edit[:new][:tz] = nil
@edit[:new][:perf_trend_db] = nil
@edit[:new][:perf_trend_col] = nil
@edit[:new][:perf_limit_col] = nil
@edit[:new][:perf_limit_val] = nil
@edit[:new][:perf_target_pct1] = nil
@edit[:new][:perf_target_pct2] = nil
@edit[:new][:perf_target_pct3] = nil
@edit[:new][:cb_interval] = nil
@edit[:new][:cb_interval_size] = nil
@edit[:new][:cb_end_interval_offset] = nil
if [:performance, :trend].include?(model_report_type(@rpt.db))
@edit[:new][:perf_interval] = @rpt.db_options[:interval]
@edit[:new][:perf_avgs] = @rpt.db_options[:calc_avgs_by]
@edit[:new][:perf_end] = @rpt.db_options[:end_offset].to_s
@edit[:new][:perf_start] = (@rpt.db_options[:start_offset] - @rpt.db_options[:end_offset]).to_s
@edit[:new][:tz] = @rpt.tz ? @rpt.tz : session[:user_tz] # Set the timezone, default to user's
if @rpt.time_profile
@edit[:new][:time_profile] = @rpt.time_profile_id
@edit[:new][:time_profile_tz] = @rpt.time_profile.tz
else
set_time_profile_vars(selected_time_profile_for_pull_down, @edit[:new])
end
@edit[:new][:perf_trend_db] = @rpt.db_options[:trend_db]
@edit[:new][:perf_trend_col] = @rpt.db_options[:trend_col]
@edit[:new][:perf_limit_col] = @rpt.db_options[:limit_col]
@edit[:new][:perf_limit_val] = @rpt.db_options[:limit_val]
@edit[:new][:perf_target_pct1], @edit[:new][:perf_target_pct2], @edit[:new][:perf_target_pct3] = @rpt.db_options[:target_pcts]
elsif Chargeback.db_is_chargeback?(@rpt.db)
@edit[:new][:tz] = @rpt.tz ? @rpt.tz : session[:user_tz] # Set the timezone, default to user's
options = @rpt.db_options[:options]
if options.key?(:owner) # Get the owner options
@edit[:new][:cb_show_typ] = "owner"
@edit[:new][:cb_owner_id] = options[:owner]
elsif options.key?(:tenant_id) # Get the tenant options
@edit[:new][:cb_show_typ] = "tenant"
@edit[:new][:cb_tenant_id] = options[:tenant_id]
elsif options.key?(:tag) # Get the tag options
@edit[:new][:cb_show_typ] = "tag"
@edit[:new][:cb_tag_cat] = options[:tag].split("/")[-2]
@edit[:new][:cb_tag_value] = options[:tag].split("/")[-1]
@edit[:cb_tags] = {}
cat = Classification.find_by_name(@edit[:new][:cb_tag_cat])
cat.entries.each { |e| @edit[:cb_tags][e.name] = e.description } if cat # Collect the tags, if category is valid
elsif options.key?(:entity_id)
@edit[:new][:cb_show_typ] = "entity"
@edit[:new][:cb_entity_id] = options[:entity_id]
@edit[:new][:cb_provider_id] = options[:provider_id]
end
@edit[:new][:cb_groupby_tag] = options[:groupby_tag] if options.key?(:groupby_tag)
@edit[:new][:cb_model] = Chargeback.report_cb_model(@rpt.db)
@edit[:new][:cb_interval] = options[:interval]
@edit[:new][:cb_interval_size] = options[:interval_size]
@edit[:new][:cb_end_interval_offset] = options[:end_interval_offset]
@edit[:new][:cb_groupby] = options[:groupby]
end
# Only show chargeback users choice if an admin
if admin_user?
@edit[:cb_users] = User.all.each_with_object({}) { |u, h| h[u.userid] = u.name }
@edit[:cb_tenant] = Tenant.all.each_with_object({}) { |t, h| h[t.id] = t.name }
else
@edit[:new][:cb_show_typ] = "owner"
@edit[:new][:cb_owner_id] = session[:userid]
@edit[:cb_owner_name] = current_user.name
end
# Get chargeback tags
cats = Classification.categories.collect { |c| c if c.show }.compact # Get categories, sort by name, remove nils
cats.delete_if { |c| c.read_only? || c.entries.length == 0 } # Remove categories that are read only or have no entries
@edit[:cb_cats] = cats.each_with_object({}) { |c, h| h[c.name] = c.description }
@edit[:cb_providers] = {}
@edit[:cb_providers][:container_project] = {}
@edit[:cb_providers][:vm] = {} # Fill this in if entity show type it ever becomes relevent for VMs
@edit[:cb_entities_by_provider_id] = {}
ManageIQ::Providers::ContainerManager.all.each do |provider|
@edit[:cb_providers][:container_project][provider.name] = provider.id
@edit[:cb_entities_by_provider_id][provider.id] = {}
provider.container_projects.all.each do |project|
@edit[:cb_entities_by_provider_id][provider.id][project.id] = project.name
end
end
# Build trend limit cols array
if model_report_type(@rpt.db) == :trend
@edit[:limit_cols] = VimPerformanceTrend.trend_limit_cols(@edit[:new][:perf_trend_db], @edit[:new][:perf_trend_col], @edit[:new][:perf_interval])
end
if [:performance, :trend].include?(model_report_type(@rpt.db))
ensure_perf_interval_defaults
end
expkey = :record_filter
@edit[expkey] ||= {} # Create hash for this expression, if needed
@edit[expkey][:record_filter] = [] # Store exps in an array
@edit[expkey][:exp_idx] ||= 0
@edit[expkey][:expression] = {"???" => "???"} # Set as new exp element
# Get the conditions MiqExpression
if @rpt.conditions.kind_of?(MiqExpression)
@edit[:new][:record_filter] = @rpt.conditions.exp
@edit[:miq_exp] = true
elsif @rpt.conditions.nil?
@edit[:new][:record_filter] = nil
@edit[:new][:record_filter] = @edit[expkey][:expression] # Copy to new exp
@edit[:miq_exp] = true
end
# Get the display_filter MiqExpression
@edit[:new][:display_filter] = @rpt.display_filter.nil? ? nil : @rpt.display_filter.exp
expkey = :display_filter
@edit[expkey] ||= {} # Create hash for this expression, if needed
@edit[expkey][:expression] = [] # Store exps in an array
@edit[expkey][:exp_idx] ||= 0 # Start at first exp
@edit[expkey][:expression] = {"???" => "???"} # Set as new exp element
# Build display filter expression
@edit[:new][:display_filter] = @edit[expkey][:expression] if @edit[:new][:display_filter].nil? # Copy to new exp
# Get timeline fields
@edit[:new][:tl_field] = NOTHING_STRING
@edit[:new][:tl_position] = "Last"
if @rpt.timeline.kind_of?(Hash) # Timeline has any data
@edit[:new][:tl_field] = @rpt.timeline[:field] unless @rpt.timeline[:field].blank?
@edit[:new][:tl_position] = @rpt.timeline[:position] unless @rpt.timeline[:position].blank?
end
# Get the pdf page size, if present
if @rpt.rpt_options.kind_of?(Hash) && @rpt.rpt_options[:pdf]
@edit[:new][:pdf_page_size] = @rpt.rpt_options[:pdf][:page_size] || DEFAULT_PDF_PAGE_SIZE
else
@edit[:new][:pdf_page_size] = DEFAULT_PDF_PAGE_SIZE
end
# Get the hide details setting, if present
if @rpt.rpt_options.kind_of?(Hash) && @rpt.rpt_options[:summary]
@edit[:new][:hide_details] = @rpt.rpt_options[:summary][:hide_detail_rows]
else
@edit[:new][:hide_details] = false
end
# Get the timeout if present
if @rpt.rpt_options.kind_of?(Hash) && @rpt.rpt_options[:queue_timeout]
@edit[:new][:queue_timeout] = @rpt.rpt_options[:queue_timeout]
else
@edit[:new][:queue_timeout] = nil
end
case @rpt.group
when "y"
@edit[:new][:group] = "Yes"
when "c"
@edit[:new][:group] = "Counts"
else
@edit[:new][:group] = "No"
@edit[:new][:row_limit] = @rpt.rpt_options[:row_limit].to_s if @rpt.rpt_options
end
# build selected fields array from the report record
@edit[:new][:sortby1] = NOTHING_STRING # Initialize sortby fields to nothing
@edit[:new][:sortby2] = NOTHING_STRING
@edit[:new][:pivot] = ReportController::PivotOptions.new
if params[:pressed] == "miq_report_new"
@edit[:new][:fields] = []
@edit[:new][:categories] = []
@edit[:new][:headers] = {}
@edit[:new][:col_formats] = {}
@edit[:pivot_cols] = {}
else
build_selected_fields(@rpt) # Create the field related @edit arrays and hashes
end
# Rebuild the tag descriptions in the new fields array to match the ones in available fields
@edit[:new][:fields].each do |nf|
tag = nf.first.split(':')
if nf.first.include?("Managed :")
entry = MiqExpression.reporting_available_fields(@edit[:new][:model], @edit[:new][:perf_interval]).find { |a| a.last == nf.last }
nf[0] = entry ? entry.first : "#{tag} (Category not found)"
end
end
@edit[:current] = ["copy", "new"].include?(params[:action]) ? {} : copy_hash(@edit[:new])
# For trend reports, check for percent field chosen
if @rpt.db && @rpt.db == TREND_MODEL &&
MiqExpression.reporting_available_fields(@edit[:new][:model], @edit[:new][:perf_interval]).find do|af|
af.last ==
@edit[:new][:perf_trend_db] + "-" + @edit[:new][:perf_trend_col]
end.first.include?("(%)")
@edit[:percent_col] = true
end
end
# Build the :fields array and :headers hash from the rpt record cols and includes hashes
def build_selected_fields(rpt)
fields = []
headers = {}
col_formats = {}
pivot_cols = {}
rpt.col_formats ||= Array.new(rpt.col_order.length) # Create array of nils if col_formats not present (backward compat)
rpt.col_order.each_with_index do |col, idx|
if col.starts_with?(CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX)
field_key = rpt.db + "-" + col
field_value = col.gsub(CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX, "")
elsif !col.include?(".") # Main table field
field_key = rpt.db + "-" + col
field_value = friendly_model_name(rpt.db) +
Dictionary.gettext(rpt.db + "." + col.split("__").first, :type => :column, :notfound => :titleize)
else # Included table field
inc_string = find_includes(col.split("__").first, rpt.include) # Get the full include string
field_key = rpt.db + "." + inc_string.to_s + "-" + col.split(".").last
if inc_string.to_s.ends_with?(".managed") || inc_string.to_s == "managed"
# don't titleize tag name, need it to lookup later to get description by tag name
field_value = friendly_model_name(rpt.db + "." + inc_string.to_s) + col.split(".").last
else
field_value = friendly_model_name(rpt.db + "." + inc_string.to_s) +
Dictionary.gettext(col.split(".").last.split("__").first, :type => :column, :notfound => :titleize)
end
end
if field_key.include?("__") # Check for calculated pivot column
field_key1, calc_typ = field_key.split("__")
pivot_cols[field_key1] ||= []
pivot_cols[field_key1] << calc_typ.to_sym
pivot_cols[field_key1].sort! # Sort the array
fields.push([field_value, field_key1]) unless fields.include?([field_value, field_key1]) # Add original col to fields array
else
fields.push([field_value, field_key]) # Add to fields array
end
# Create the groupby keys if groupby array is present
if rpt.rpt_options &&
rpt.rpt_options[:pivot] &&
rpt.rpt_options[:pivot][:group_cols] &&
rpt.rpt_options[:pivot][:group_cols].kind_of?(Array)
if rpt.rpt_options[:pivot][:group_cols].length > 0
@edit[:new][:pivot].by1 = field_key if col == rpt.rpt_options[:pivot][:group_cols][0]
end
if rpt.rpt_options[:pivot][:group_cols].length > 1
@edit[:new][:pivot].by2 = field_key if col == rpt.rpt_options[:pivot][:group_cols][1]
end
if rpt.rpt_options[:pivot][:group_cols].length > 2
@edit[:new][:pivot].by3 = field_key if col == rpt.rpt_options[:pivot][:group_cols][2]
end
end
# Create the sortby keys if sortby array is present
if rpt.sortby.kind_of?(Array)
if rpt.sortby.length > 0
# If first sortby field as a break suffix, set up sortby1 with a suffix
if MiqReport.is_break_suffix?(rpt.sortby[0].split("__")[1])
sort1, suffix1 = rpt.sortby[0].split("__") # Get sort field and suffix, if present
@edit[:new][:sortby1] = field_key + (suffix1 ? "__#{suffix1}" : "") if col == sort1
else # Not a break suffix sort field, just copy the field name to sortby1
@edit[:new][:sortby1] = field_key if col == rpt.sortby[0]
end
end
if rpt.sortby.length > 1
if MiqReport.is_break_suffix?(rpt.sortby[1].split("__")[1])
sort2, suffix2 = rpt.sortby[1].split("__") # Get sort field and suffix, if present
@edit[:new][:sortby2] = field_key + (suffix2 ? "__#{suffix2}" : "") if col == sort2
else # Not a break suffix sort field, just copy the field name to sortby1
@edit[:new][:sortby2] = field_key if col == rpt.sortby[1]
end
end
end
headers[field_key] = rpt.headers[idx] # Add col to the headers hash
if field_key.include?("__") # if this a pivot calc field?
headers[field_key.split("__").first] = field_value # Save the original field key as well
end
col_formats[field_key] = rpt.col_formats[idx] # Add col to the headers hash
end
# Remove the non-cost and owner columns from the arrays for Chargeback
if Chargeback.db_is_chargeback?(rpt.db)
f_len = fields.length
for f_idx in 1..f_len # Go thru fields in reverse
f_key = fields[f_len - f_idx].last
next if f_key.ends_with?(*CHARGEBACK_ALLOWED_FIELD_SUFFIXES) || f_key.include?('managed') || f_key.include?(CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX)
headers.delete(f_key)
col_formats.delete(f_key)
fields.delete_at(f_len - f_idx)
end
end
@edit[:new][:fields] = fields
@edit[:new][:headers] = headers
@edit[:new][:col_formats] = col_formats
@edit[:pivot_cols] = pivot_cols
build_field_order
end
# Create the field_order hash from the fields and pivot_cols structures
def build_field_order
@edit[:new][:field_order] = []
@edit[:new][:fields].each do |f|
if @edit[:new][:pivot] && @edit[:new][:pivot].by1 != NOTHING_STRING && # If we are doing pivoting and
@edit[:pivot_cols].key?(f.last) # this is a pivot calc column
MiqReport::PIVOTS.each do |c|
calc_typ = c.first
@edit[:new][:field_order].push([f.first + " (#{calc_typ.to_s.titleize})", f.last + "__" + calc_typ.to_s]) if @edit[:pivot_cols][f.last].include?(calc_typ)
end
else
@edit[:new][:field_order].push(f)
end
end
end
# Build the full includes string by finding the column in the includes hash
def find_includes(col, includes)
tables = col.split(".")[0..-2]
field = col.split(".").last
table = tables.first
# Does this level include have the table name and does columns have the field name?
if includes[table] && includes[table]["columns"] && includes[table]["columns"].include?(field)
return table # Yes, return the table name
end
if includes[table] && includes[table]["include"]
new_col = [tables[1..-1], field].flatten.join('.')
# recursively search it for the table.col
inc_table = find_includes(new_col, includes[table]["include"])
return table + '.' + inc_table if inc_table
end
# Need to go to the next level
includes.each_pair do |key, inc| # Check each included table
next unless inc["include"] # Does the included table have an include?
inc_table = find_includes(col, inc["include"]) # Yes, recursively search it for the table.col
return nil if inc_table.nil? # If it comes back nil, we never found it
# Otherwise, return the table name + the included string
return key + "." + inc_table
end
nil
end
def setnode_for_customreport
@sb[:rpt_menu].each_with_index do |level1_nodes, i|
if level1_nodes[0] == @sb[:grp_title]
level1_nodes[1].each_with_index do |level2_nodes, k|
# Check for the existence of the Custom folder in the Reports tree and
# check if at least one report exists underneath it
if level2_nodes[0].downcase == "custom" && level2_nodes[1].count > 1
level2_nodes[1].each_with_index do |report|
self.x_node = "xx-#{i}_xx-#{i}-#{k}_rep-#{to_cid(@rpt.id)}" if report == @rpt.name
end
end
end
end
end
end
def valid_report?(rpt)
active_tab = 'edit_1'
if @edit[:new][:model] == TREND_MODEL
unless @edit[:new][:perf_trend_col]
add_flash(_('Trending for is required'), :error)
end
unless @edit[:new][:perf_limit_col] || @edit[:new][:perf_limit_val]
add_flash(_('Trend Target Limit must be configured'), :error)
end
if @edit[:new][:perf_limit_val] && !is_numeric?(@edit[:new][:perf_limit_val])
add_flash(_('Trend Target Limit must be numeric'), :error)
end
elsif @edit[:new][:fields].empty?
add_flash(_('At least one Field must be selected'), :error)
end
if Chargeback.db_is_chargeback?(@edit[:new][:model])
msg = case @edit[:new][:cb_show_typ]
when nil
_('Show Costs by must be selected')
when 'owner'
_('An Owner must be selected') unless @edit[:new][:cb_owner_id]
when 'tenant'
_('A Tenant Category must be selected') unless @edit[:new][:cb_tenant_id]
when 'tag'
if !@edit[:new][:cb_tag_cat]
_('A Tag Category must be selected')
elsif !@edit[:new][:cb_tag_value]
_('A Tag must be selected')
end
when 'entity'
unless @edit[:new][:cb_entity_id]
_("A specific #{ui_lookup(:model => @edit[:new][:cb_model])} or all must be selected")
end
end
if @edit[:new][:cb_groupby] == "tag" && !@edit[:new][:cb_groupby_tag].present?
msg = _('A Group by Tag must be selected')
end
if msg
add_flash(msg, :error)
active_tab = 'edit_3'
end
end
# Validate column styles
unless rpt.col_options.blank? || @edit[:new][:field_order].nil?
@edit[:new][:field_order].each do |f| # Go thru all of the cols in order
col = f.last.split('.').last.split('-').last
if val = rpt.col_options[col] # Skip if no options for this col
next unless val.key?(:style) # Skip if no style options
val[:style].each_with_index do |s, s_idx| # Go through all of the configured ifs
if s[:value]
if e = MiqExpression.atom_error(rpt.col_to_expression_col(col.split('__').first), # See if the value is in error
s[:operator],
s[:value])
msg = case s_idx + 1
when 1
add_flash(_("Styling for '%{item}', first value is in error: %{message}") %
{:item => f.first, :message => e.message}, :error)
when 2
add_flash(_("Styling for '%{item}', second value is in error: %{message}") %
{:item => f.first, :message => e.message}, :error)
when 3
add_flash(_("Styling for '%{item}', third value is in error: %{message}") %
{:item => f.first, :message => e.message}, :error)
end
active_tab = 'edit_9'
end
end
end
end
end
end
unless rpt.valid? # Check the model for errors
rpt.errors.each do |field, msg|
add_flash("#{field.to_s.capitalize} #{msg}", :error)
end
end
@sb[:miq_tab] = active_tab if flash_errors?
@flash_array.nil?
end
# Check for valid report configuration in @edit[:new]
# Check if chargeback field is valid
def valid_chargeback_fields
is_valid = false
# There are valid show typ fields
if %w(owner tenant tag entity).include?(@edit[:new][:cb_show_typ])
is_valid = case @edit[:new][:cb_show_typ]
when 'owner' then @edit[:new][:cb_owner_id]
when 'tenant' then @edit[:new][:cb_tenant_id]
when 'tag' then @edit[:new][:cb_tag_cat] && @edit[:new][:cb_tag_value]
when 'entity' then @edit[:new][:cb_entity_id] && @edit[:new][:cb_provider_id]
end
end
is_valid
end
# Check for tab switch error conditions
def check_tabs
@sb[:miq_tab] = params[:tab]
active_tab = 'edit_1'
case @sb[:miq_tab].split('_')[1]
when '8'
if @edit[:new][:fields].empty?
add_flash(_('Consolidation tab is not available until at least 1 field has been selected'), :error)
end
when '2'
if @edit[:new][:fields].empty?
add_flash(_('Formatting tab is not available until at least 1 field has been selected'), :error)
end
when '3'
if @edit[:new][:model] == TREND_MODEL
unless @edit[:new][:perf_trend_col]
add_flash(_('Filter tab is not available until Trending for field has been selected'), :error)
end
unless @edit[:new][:perf_limit_col] || @edit[:new][:perf_limit_val]
add_flash(_('Filter tab is not available until Trending Target Limit has been configured'), :error)
end
if @edit[:new][:perf_limit_val] && !is_numeric?(@edit[:new][:perf_limit_val])
add_flash(_('Trend Target Limit must be numeric'), :error)
end
elsif @edit[:new][:fields].empty?
add_flash(_('Filter tab is not available until at least 1 field has been selected'), :error)
end
when '4'
if @edit[:new][:fields].empty?
add_flash(_('Summary tab is not available until at least 1 field has been selected'), :error)
end
when '5'
if @edit[:new][:fields].empty?
add_flash(_('Charts tab is not available until at least 1 field has been selected'), :error)
elsif @edit[:new][:sortby1].blank? || @edit[:new][:sortby1] == NOTHING_STRING
add_flash(_('Charts tab is not available unless a sort field has been selected'), :error)
active_tab = 'edit_4'
end
when '6'
if @edit[:new][:fields].empty?
add_flash(_('Timeline tab is not available until at least 1 field has been selected'), :error)
else
found = false
@edit[:new][:fields].each do |field|
if MiqReport.get_col_type(field[1]) == :datetime
found = true
break
end
end
unless found
add_flash(_('Timeline tab is not available unless at least 1 time field has been selected'), :error)
end
end
when '7'
if @edit[:new][:model] == TREND_MODEL
unless @edit[:new][:perf_trend_col]
add_flash(_('Preview tab is not available until Trending for field has been selected'), :error)
end
unless @edit[:new][:perf_limit_col] || @edit[:new][:perf_limit_val]
add_flash(_('Preview tab is not available until Trend Target Limit has been configured'), :error)
end
if @edit[:new][:perf_limit_val] && !is_numeric?(@edit[:new][:perf_limit_val])
add_flash(_('Trend Target Limit: Value must be numeric'), :error)
end
elsif @edit[:new][:fields].empty?
add_flash(_('Preview tab is not available until at least 1 field has been selected'), :error)
elsif Chargeback.db_is_chargeback?(@edit[:new][:model]) && !valid_chargeback_fields
add_flash(_('Preview tab is not available until Chargeback Filters has been configured'), :error)
active_tab = 'edit_3'
end
when '9'
if @edit[:new][:fields].empty?
add_flash(_('Styling tab is not available until at least 1 field has been selected'), :error)
end
end
@sb[:miq_tab] = active_tab if flash_errors?
end
end
|
require 'mechanize'
require 'logger'
# Utilize mechanize to do stuff on GLS webpage.
class GLSMech
attr_accessor :mech
attr_accessor :user
attr_accessor :pass
# Setup the mech.
def initialize
@mech = Mechanize.new
@mech.user_agent_alias = 'Windows Mozilla'
end
# Enable logging to given file.
def log_to filename
@mech.log = Logger.new filename
end
# Saves parcel label as pdf, does not overwrite file if exists,
# returns filename that label was saved to.
# returns nil if login, creation or redirect failed.
def save_parcel_label parcel_job, filename
return nil if !login! @user, @pass
form = @mech.page.forms.first
form.field_with(:name => 'txtName1').value = parcel_job.name
form.field_with(:name => 'txtStreet').value = parcel_job.street
form.field_with(:name => 'txtBlockNo').value = parcel_job.streetno
form.field_with(:name => 'txtZipCodeDisplay').value = parcel_job.zip
form.field_with(:name => 'txtCity').value = parcel_job.city
form.field_with(:name => 'txtWeight').value = parcel_job.weight
form.field_with(:name => 'txtDate').value = parcel_job.date
@mech.submit(form, form.buttons.first)
pdf_iframe = @mech.page.iframes.first
if pdf_iframe
return @mech.page.iframes.first.content.save_as filename
elsif @mech.log
@mech.page.save_as "gls_agent_debug_save-parcel-fail.html"
end
return nil
end
private
# Login to GLS parcel creation web page using provided credentials.
# returns true if login and navigation afterwards succeeded.
def login! username, password
target_url = 'http://www.your-gls.eu/276-I-PORTAL-WEB/content/GLS/DE03/DE/15005.htm'
page = @mech.get target_url
form = page.forms.first
form.fields[5].value = username
form.fields[6].value = password
form.submit
# Move on to target page.
page = @mech.get target_url
page.uri.to_s == target_url
end
end
stub for handling mutliple labels, raise on errors.
require 'mechanize'
require 'logger'
require 'date'
require 'gls_agent'
# Utilize mechanize to do stuff on GLS webpage.
class GLSMech
attr_accessor :mech
attr_accessor :user
attr_accessor :pass
# Setup the mech.
def initialize
@mech = Mechanize.new
@mech.user_agent_alias = 'Windows Mozilla'
end
# Enable logging to given file.
def log_to filename
@mech.log = Logger.new filename
end
# Saves parcel labels as pdf, does not overwrite file if exists,
# returns filenames that labels was saved to.
# yields nil if login, creation or redirect failed.
def save_parcel_labels parcel_jobs, filenames
return nil if !login! @user, @pass
parcel_jobs.zip(filenames).each do |parcel, filename|
target_url = 'http://www.your-gls.eu/276-I-PORTAL-WEB/content/GLS/DE03/DE/15005.htm'
page = @mech.get target_url
page = @mech.get target_url
fail "not logged in" if page.uri.to_s != target_url
form = @mech.page.forms.first
form.field_with(:name => 'txtName1').value = parcel_job.name
form.field_with(:name => 'txtName2').value = parcel_job.company
form.field_with(:name => 'txtStreet').value = parcel_job.street
form.field_with(:name => 'txtBlockNo').value = parcel_job.streetno
form.field_with(:name => 'txtZipCodeDisplay').value = parcel_job.zip
form.field_with(:name => 'txtCity').value = parcel_job.city
form.field_with(:name => 'txtWeight').value = parcel_job.weight
form.field_with(:name => 'txtDate').value = parcel_job.date
@mech.submit(form, form.buttons.first)
pdf_iframe = @mech.page.iframes.first
@mech.page.save_as "save_label#{DateTime.now.strftime('%s')}.html"
if pdf_iframe
yield pdf_iframe.content.save_as filename
elsif @mech.log
@mech.page.save_as "gls_agent_debug_save-parcel-fail.html"
end
yield nil
end
end
# Saves parcel label as pdf, does not overwrite file if exists,
# returns filename that label was saved to,
# or nil if login, creation or redirect failed.
def save_parcel_label parcel_job, filename
return nil if !login! @user, @pass
form = @mech.page.forms.first
form.field_with(:name => 'txtName1').value = parcel_job.name
form.field_with(:name => 'txtName2').value = parcel_job.company
form.field_with(:name => 'txtStreet').value = parcel_job.street
form.field_with(:name => 'txtBlockNo').value = parcel_job.streetno
form.field_with(:name => 'txtZipCodeDisplay').value = parcel_job.zip
form.field_with(:name => 'txtCity').value = parcel_job.city
form.field_with(:name => 'txtWeight').value = parcel_job.weight
form.field_with(:name => 'txtDate').value = parcel_job.date
@mech.submit(form, form.buttons.first)
pdf_iframe = @mech.page.iframes.first
@mech.page.save_as "save_label#{DateTime.now.strftime('%s')}.html"
if !@mech.page.search(".prefix").empty?
error_text = @mech.page.search(".prefix")[0].text
raise GLSAgent::GLSEndpointError.new error_text
end
if pdf_iframe
return pdf_iframe.content.save_as filename
elsif @mech.log
@mech.page.save_as "gls_agent_debug_save-parcel-fail.html"
end
return nil
end
private
# Login to GLS parcel creation web page using provided credentials.
# returns true if login and navigation afterwards succeeded.
def login! username, password
target_url = 'http://www.your-gls.eu/276-I-PORTAL-WEB/content/GLS/DE03/DE/15005.htm'
page = @mech.get target_url
form = page.forms.first
form.fields[5].value = username
form.fields[6].value = password
form.submit
# Move on to target page.
page = @mech.get target_url
page.uri.to_s == target_url
end
end
|
module ReportController::Reports::Editor
extend ActiveSupport::Concern
CHARGEBACK_ALLOWED_FIELD_SUFFIXES = %w(_cost -owner_name _metric -provider_name -provider_uid -project_uid -archived).freeze
def miq_report_new
assert_privileges("miq_report_new")
@_params.delete :id # incase add button was pressed from report show screen.
miq_report_edit
end
def miq_report_copy
assert_privileges("miq_report_copy")
@report = nil # Clear any saved report object
if params[:tab] # Came in to change the tab
check_tabs
build_edit_screen
else
@sb[:miq_tab] = "edit_1"
@rpt = MiqReport.find(params[:id])
@rpt.id = nil # Treat as a new report
set_form_vars
build_edit_screen
end
@ina_form = @lock_tree = true
replace_right_cell
end
def miq_report_edit
assert_privileges("miq_report_edit")
case params[:button]
when "cancel"
@edit[:rpt_id] ?
add_flash(_("Edit of %{model} \"%{name}\" was cancelled by the user") % {:model => ui_lookup(:model => "MiqReport"), :name => @edit[:rpt_title]}) :
add_flash(_("Add of new %{model} was cancelled by the user") % {:model => ui_lookup(:model => "MiqReport")})
@edit = session[:edit] = nil # clean out the saved info
replace_right_cell
when "add", "save"
id = params[:id] ? params[:id] : "new"
return unless load_edit("report_edit__#{id}", "replace_cell__explorer")
get_form_vars
@changed = (@edit[:new] != @edit[:current])
@rpt = @edit[:rpt_id] ? find_by_id_filtered(MiqReport, params[:id]) :
MiqReport.new
set_record_vars(@rpt)
unless valid_report?(@rpt)
build_edit_screen
replace_right_cell
return
end
if @edit[:new][:graph_type] && (@edit[:new][:sortby1].blank? || @edit[:new][:sortby1] == NOTHING_STRING)
add_flash(_("Report can not be saved unless sort field has been configured for Charts"), :error)
@sb[:miq_tab] = "edit_4"
build_edit_screen
replace_right_cell
return
end
if @rpt.save
# update report name in menu if name is edited
menu_repname_update(@edit[:current][:name], @edit[:new][:name]) if @edit[:current][:name] != @edit[:new][:name]
AuditEvent.success(build_saved_audit(@rpt, @edit))
@edit[:rpt_id] ?
add_flash(_("%{model} \"%{name}\" was saved") % {:model => ui_lookup(:model => "MiqReport"), :name => @rpt.name}) :
add_flash(_("%{model} \"%{name}\" was added") % {:model => ui_lookup(:model => "MiqReport"), :name => @rpt.name})
# only do this for new reports
unless @edit[:rpt_id]
self.x_node = "xx-#{@sb[:rpt_menu].length}_xx-#{@sb[:rpt_menu].length}-0"
setnode_for_customreport
end
@edit = session[:edit] = nil # clean out the saved info
if role_allows(:feature => "miq_report_widget_editor")
# all widgets for this report
get_all_widgets("report", from_cid(x_node.split('_').last))
end
replace_right_cell(:replace_trees => [:reports])
else
rpt.errors.each do |field, msg|
add_flash("#{field.to_s.capitalize} #{msg}", :error)
end
@in_a_form = true
session[:changed] = @changed ? true : false
@changed = true
replace_right_cell
end
else
add_flash(_("All changes have been reset"), :warning) if params[:button] == "reset"
@in_a_form = true
@report = nil # Clear any saved report object
if params[:tab] # Came in to change the tab
@rpt = @edit[:rpt_id] ? MiqReport.find(@edit[:rpt_id]) :
MiqReport.new
check_tabs
build_edit_screen
else
@sb[:miq_tab] = "edit_1"
@rpt = params[:id] && params[:id] != "new" ? MiqReport.find(params[:id]) :
MiqReport.new
if @rpt.rpt_type == "Default"
flash = "Default reports can not be edited"
redirect_to :action => "show", :id => @rpt.id, :flash_msg => flash, :flash_error => true
return
end
set_form_vars
build_edit_screen
end
@changed = (@edit[:new] != @edit[:current])
session[:changed] = @changed
@lock_tree = true
replace_right_cell
end
end
# AJAX driven routine to check for changes in ANY field on the form
def form_field_changed
return unless load_edit("report_edit__#{params[:id]}", "replace_cell__explorer")
get_form_vars
build_edit_screen
@changed = (@edit[:new] != @edit[:current])
render :update do |page|
page << javascript_prologue
page.replace("flash_msg_div", :partial => "layouts/flash_msg") unless @refresh_div && @refresh_div != "column_lists"
page.replace(@refresh_div, :partial => @refresh_partial) if @refresh_div
page.replace("chart_sample_div", :partial => "form_chart_sample") if @refresh_div == "chart_div"
page.replace("tl_sample_div", :partial => "form_tl_sample") if @refresh_div == "tl_settings_div"
page.replace_html("calc_#{@calc_div}_div", :text => @calc_val) if @calc_div
page << "miqSparkle(false);"
page << javascript_for_miq_button_visibility_changed(@changed)
if @tl_changed # Reload the screen if the timeline data was changed
page.replace_html("tl_sample_div", :partial => "form_tl_sample") if @tl_field != NOTHING_STRING
elsif @formatting_changed # Reload the screen if the formatting pulldowns need to be reset
page.replace_html("formatting_div", :partial => "form_formatting")
elsif @tl_repaint
# page << "tl.paint();"
page << javascript_hide("notification")
end
end
end
def filter_change
return unless load_edit("report_edit__#{params[:id]}", "replace_cell__explorer")
@expkey = $&.to_sym if params[:button].to_s =~ /^(record|display)_filter$/
render :update do |page|
page << javascript_prologue
page.replace("filter_div", :partial => "form_filter")
page << "miqSparkle(false);"
end
end
private
def build_edit_screen
build_tabs
get_time_profiles # Get time profiles list (global and user specific)
case @sb[:miq_tab].split("_")[1]
when "1" # Select columns
@edit[:models] ||= reportable_models
# Add the blank choice if no table chosen yet
# @edit[:models].insert(0,["<Choose>", "<Choose>"]) if @edit[:new][:model] == nil && @edit[:models][0][0] != "<Choose>"
if @edit[:new][:model].nil?
if @edit[:models][0][0] != "<Choose>"
@edit[:models].insert(0, ["<Choose>", "<Choose>"])
end
else
if @edit[:models][0][0] == "<Choose>"
@edit[:models].delete_at(0)
end
end
when "8" # Consolidate
# Build group chooser arrays
@pivots1 = @edit[:new][:fields].dup
@pivots2 = @pivots1.dup.delete_if { |g| g[1] == @edit[:new][:pivotby1] }
@pivots3 = @pivots2.dup.delete_if { |g| g[1] == @edit[:new][:pivotby2] }
@pivotby1 = @edit[:new][:pivotby1]
@pivotby2 = @edit[:new][:pivotby2]
@pivotby3 = @edit[:new][:pivotby3]
when "2" # Formatting
# @edit[:calc_xml] = build_calc_combo_xml # Get the combobox XML for any numeric fields
when "3" # Filter
# Build record filter expression
if @edit[:miq_exp] || # Is this stored as an MiqExp object
["new", "copy", "create"].include?(request.parameters["action"]) # or it's a new condition
@edit[:record_filter][:exp_idx] ||= 0 # Start at first exp
new_record_filter = @edit[:new][:record_filter]
@edit[:record_filter][:expression] = copy_hash(new_record_filter) unless new_record_filter.blank?
@expkey = :record_filter
# Initialize the exp array
exp_array(:init, @edit[:record_filter][:expression]) if @edit[:record_filter][:exp_array].nil?
@edit[:record_filter][:exp_table] = exp_build_table(@edit[:record_filter][:expression])
exp_get_prefill_types # Build prefill lists
@edit[:record_filter][:exp_model] = @edit[:new][:model] # Set the model for the expression editor
end
# Build display filter expression
@edit[:display_filter][:exp_idx] ||= 0 # Start at first exp
new_display_filter = @edit[:new][:display_filter]
@edit[:display_filter][:expression] = copy_hash(new_display_filter) unless new_display_filter.blank?
@expkey = :display_filter
# Initialize the exp array
exp_array(:init, @edit[:display_filter][:expression]) if @edit[:display_filter][:exp_array].nil?
@edit[:display_filter][:exp_table] = exp_build_table(@edit[:display_filter][:expression])
cols = @edit[:new][:field_order]
@edit[:display_filter][:exp_available_fields] = MiqReport.display_filter_details(cols, :field)
cols = @edit[:new][:fields]
@edit[:display_filter][:exp_available_tags] = MiqReport.display_filter_details(cols, :tag)
@edit[:display_filter][:exp_model] = "_display_filter_" # Set model for display filter
@expkey = :record_filter # Start with Record Filter showing
if @edit[:new][:perf_interval] && !@edit[:new][:time_profile]
set_time_profile_vars(selected_time_profile_for_pull_down, @edit[:new])
end
when "4" # Summarize
# Build sort chooser arrays(@edit[:new][:fields], :field)
@sortby1 = @edit[:new][:sortby1]
@sortby2 = @edit[:new][:sortby2]
@sort1 = @edit[:new][:field_order].dup
@sort2 = @sort1.dup.delete_if { |s| s[1] == @sortby1.split("__").first }
when "5" # Charts
options = chart_fields_options
if options.empty?
@edit[:new][:chart_column] = nil
else
options[0][1] unless options.detect { |_, v| v == @edit[:new][:chart_column] }
end
when "6" # Timeline
@tl_fields = []
@edit[:new][:fields].each do |field|
if MiqReport.get_col_type(field[1]) == :datetime
@tl_fields.push(field)
end
end
@tl_field = @edit[:new][:tl_field]
if @edit[:new][:tl_position] == "Last"
@position_time = format_timezone(Time.now, "UTC", nil)
else
@position_time = format_timezone(Time.now - 1.year, "UTC", nil)
end
@timeline = true if @tl_field != NOTHING_STRING
build_timeline_units
@tl_last_time_choices = case @edit[:new][:tl_last_unit]
when "Minutes" then Array.new(12) { |t| (t * 5 + 5).to_s }
when "Hours" then Array.new(24) { |t| (t + 1).to_s }
when "Days" then Array.new(31) { |t| (t + 1).to_s }
when "Weeks" then Array.new(4) { |t| (t + 1).to_s }
when "Months" then Array.new(12) { |t| (t + 1).to_s }
when "Years" then Array.new(10) { |t| (t + 1).to_s }
end
if @edit[:new][:tl_last_time].nil? && @edit[:new][:tl_last_unit] != SHOWALL_STRING
@edit[:new][:tl_last_time] = @tl_last_time_choices.first
end
when "7" # Preview
# generate preview report when
end
@in_a_form = true
if ["new", "copy", "create"].include?(request.parameters["action"])
# drop_breadcrumb( {:name=>"Add Report", :url=>"/report/new"} )
@gtl_url = "/new"
else
# drop_breadcrumb( {:name=>"Edit Report", :url=>"/report/edit"} )
@gtl_url = "/edit"
end
end
def reportable_models
MiqReport.reportable_models.collect do |m|
[Dictionary.gettext(m, :type => :model, :notfound => :titleize, :plural => true), m]
end
end
# Create the arrays for the start/end interval pulldowns
def build_perf_interval_arrays(interval)
case interval
when "hourly"
end_array = [
["Today", "0"],
["Yesterday", 1.day.to_s]
]
5.times { |i| end_array.push(["#{i + 2} days ago", (i + 2).days.to_s]) }
4.times { |i| end_array.push(["#{pluralize(i + 1, "week")} ago", (i + 1).weeks.to_s]) }
5.times { |i| end_array.push(["#{pluralize(i + 2, "month")} ago", (i + 1).months.to_s]) }
start_array = []
6.times { |i| start_array.push(["#{pluralize(i + 1, "day")}", (i + 1).days.to_s]) }
4.times { |i| start_array.push(["#{pluralize(i + 1, "week")}", (i + 1).weeks.to_s]) }
5.times { |i| start_array.push(["#{pluralize(i + 2, "month")}", (i + 1).months.to_s]) }
@edit[:new][:perf_end] ||= "0"
@edit[:new][:perf_start] ||= 1.day.to_s
when "daily"
end_array = [
["Yesterday", "0"] # Start with yesterday, since we only allow full 24 hour days in daily trending
]
5.times { |i| end_array.push(["#{i + 2} days ago", (i + 1).days.to_s]) }
3.times { |i| end_array.push(["#{pluralize((i + 1), "week")} ago", ((i + 1).weeks - 1.day).to_s]) }
6.times { |i| end_array.push(["#{pluralize((i + 1), "month")} ago", ((i + 1).months - 1.day).to_s]) }
start_array = []
5.times { |i| start_array.push(["#{pluralize(i + 2, "day")}", (i + 2).days.to_s]) }
3.times { |i| start_array.push(["#{pluralize((i + 1), "week")}", (i + 1).weeks.to_s]) }
11.times { |i| start_array.push(["#{pluralize((i + 1), "month")}", (i + 1).months.to_s]) }
start_array.push(["1 year", 1.year.to_i.to_s]) # For some reason, 1.year is a float, so use to_i to get rid of decimals
@edit[:new][:perf_end] ||= "0"
@edit[:new][:perf_start] ||= 2.days.to_s
end
@edit[:start_array] = start_array
@edit[:end_array] = end_array
end
# This method figures out what to put in each band unit pulldown array
def build_timeline_units
unless @edit[:new][:tl_bands].blank?
split1 = BAND_UNITS.join(" ").split(@edit[:unit2]).first # Split on the second band unit
@units1 = split1.split(" ") # Grab the units before the second band
split2 = BAND_UNITS.join(" ").split(@edit[:unit1]).last # Split on the first band unit
split3 = split2.split(@edit[:unit3]) # Split the rest on the 3rd unit
@units2 = split3.first.split(" ") # Grab the first part for the 2nd unit
split4 = BAND_UNITS.join(" ").split(@edit[:unit2]) # Split on the second band unit
@units3 = split4.last.split(" ") # Grab the last part for the 3rd unit
end
end
# Reset report column fields if model or interval was changed
def reset_report_col_fields
@edit[:new][:fields] = [] # Clear fields array
@edit[:new][:headers] = {} # Clear headers hash
@edit[:new][:pivotby1] = NOTHING_STRING # Clear consolidate group fields
@edit[:new][:pivotby2] = NOTHING_STRING
@edit[:new][:pivotby3] = NOTHING_STRING
@edit[:new][:sortby1] = NOTHING_STRING # Clear sort fields
@edit[:new][:sortby2] = NOTHING_STRING
@edit[:new][:filter_operator] = nil
@edit[:new][:filter_string] = nil
@edit[:new][:categories] = []
@edit[:new][:graph_type] = nil # Clear graph field
@edit[:new][:chart_mode] = nil
@edit[:new][:chart_column] = nil
@edit[:new][:perf_trend_col] = nil
@edit[:new][:perf_trend_db] = nil
@edit[:new][:perf_trend_pct1] = nil
@edit[:new][:perf_trend_pct2] = nil
@edit[:new][:perf_trend_pct3] = nil
@edit[:new][:perf_limit_col] = nil
@edit[:new][:perf_limit_val] = nil
@edit[:new][:record_filter] = nil # Clear record filter
@edit[:new][:display_filter] = nil # Clear display filter
@edit[:miq_exp] = true
end
def build_tabs
req = "edit"
if @edit[:new][:model] == TREND_MODEL
@tabs = [
["#{req}_1", _("Columns")],
["#{req}_3", _("Filter")],
["#{req}_7", _("Preview")]
]
elsif Chargeback.db_is_chargeback?(@edit[:new][:model].to_s)
@tabs = [
["#{req}_1", _("Columns")],
["#{req}_2", _("Formatting")],
["#{req}_3", _("Filter")],
["#{req}_7", _("Preview")]
]
else
@tabs = [
["#{req}_1", _("Columns")],
["#{req}_8", _("Consolidation")],
["#{req}_2", _("Formatting")],
["#{req}_9", _("Styling")],
["#{req}_3", _("Filter")],
["#{req}_4", _("Summary")],
["#{req}_5", _("Charts")],
["#{req}_6", _("Timeline")],
["#{req}_7", _("Preview")]
]
end
tab = @sb[:miq_tab].split("_")[1] # Get the tab number of the active tab
@tabs.insert(0, ["#{req}_#{tab}", ""]) # Set as the active tab in first @tabs element
end
# Get variables from edit form
def get_form_vars
@assigned_filters = []
gfv_report_fields # Global report fields
gfv_move_cols_buttons # Move cols buttons
gfv_model # Model changes
gfv_trend # Trend fields
gfv_performance # Performance fields
gfv_chargeback # Chargeback fields
gfv_charts # Charting fields
gfv_pivots # Consolidation fields
gfv_sort # Summary fields
gfv_timeline # Timeline fields
# Check for key prefixes (params starting with certain keys)
params.each do |key, value|
# See if any headers were sent in
@edit[:new][:headers][key.split("_")[1..-1].join("_")] = value if key.split("_").first == "hdr"
# See if any formats were sent in
if key.split("_").first == "fmt"
key2 = key.gsub("___", ".") # Put period sub table separator back into the key
@edit[:new][:col_formats][key2.split("_")[1..-1].join("_")] = value.blank? ? nil : value.to_sym
@formatting_changed = value.blank?
end
# See if any group calculation checkboxes were sent in
gfv_key_group_calculations(key, value) if key.split("_").first == "calc"
# See if any pivot calculation checkboxes were sent in
gfv_key_pivot_calculations(key, value) if key.split("_").first == "pivotcalc"
# Check for style fields
prefix = key.split("_").first
gfv_key_style(key, value) if prefix && prefix.starts_with?("style")
end
end
# Handle params starting with "calc"
def gfv_key_group_calculations(key, value)
field = @edit[:new][:field_order][key.split("_").last.to_i].last # Get the field name
@edit[:new][:col_options][field_to_col(field)] = {
:grouping => value.split(",").sort.map(&:to_sym).reject { |a| a == :null }
}
end
# Handle params starting with "pivotcalc"
def gfv_key_pivot_calculations(key, value)
field = @edit[:new][:fields][key.split("_").last.to_i].last # Get the field name
@edit[:pivot_cols][field] = []
value.split(',').sort.map(&:to_sym).each do |agg|
@edit[:pivot_cols][field] << agg
# Create new header from original header + aggregate function
@edit[:new][:headers][field + "__#{agg}"] = @edit[:new][:headers][field] + " (#{agg.to_s.titleize})"
end
build_field_order
end
# Handle params starting with "style"
def gfv_key_style(key, value)
parm, f_idx, s_idx = key.split("_") # Get the parm type, field index, and style index
f_idx = f_idx.to_i
s_idx = s_idx.to_i
f = @edit[:new][:field_order][f_idx] # Get the field element
field_sub_type = MiqExpression.get_col_info(f.last)[:format_sub_type]
field_data_type = MiqExpression.get_col_info(f.last)[:data_type]
field_name = f.last.include?(".") ? f.last.split(".").last.tr("-", ".") : f.last.split("-").last
case parm
when "style" # New CSS class chosen
if value.blank?
@edit[:new][:col_options][field_name][:style].delete_at(s_idx)
@edit[:new][:col_options][field_name].delete(:style) if @edit[:new][:col_options][field_name][:style].empty?
@edit[:new][:col_options].delete(field_name) if @edit[:new][:col_options][field_name].empty?
else
@edit[:new][:col_options][field_name] ||= {}
@edit[:new][:col_options][field_name][:style] ||= []
@edit[:new][:col_options][field_name][:style][s_idx] ||= {}
@edit[:new][:col_options][field_name][:style][s_idx][:class] = value.to_sym
ovs = case field_data_type
when :boolean
["DEFAULT", "true"]
when :integer, :float
["DEFAULT", "", MiqExpression::FORMAT_SUB_TYPES.fetch_path(field_sub_type, :units) ? MiqExpression::FORMAT_SUB_TYPES.fetch_path(field_sub_type, :units).first : nil]
else
["DEFAULT", ""]
end
op ||= ovs[0]
val ||= ovs[1]
suffix ||= ovs[2]
@edit[:new][:col_options][field_name][:style][s_idx][:operator] ||= op
@edit[:new][:col_options][field_name][:style][s_idx][:value] ||= val
@edit[:new][:col_options][field_name][:style][s_idx][:value_suffix] ||= suffix if suffix
end
@refresh_div = "styling_div"
@refresh_partial = "form_styling"
when "styleop" # New operator chosen
@edit[:new][:col_options][field_name][:style][s_idx][:operator] = value
if value == "DEFAULT"
@edit[:new][:col_options][field_name][:style][s_idx].delete(:value) # Remove value key
# Remove all style array elements after this one
((s_idx + 1)...@edit[:new][:col_options][field_name][:style].length).each_with_index do |_i, i_idx|
@edit[:new][:col_options][field_name][:style].delete_at(i_idx)
end
elsif value.include?("NIL") || value.include?("EMPTY")
@edit[:new][:col_options][field_name][:style][s_idx].delete(:value) # Remove value key
elsif [:datetime, :date].include?(field_data_type)
@edit[:new][:col_options][field_name][:style][s_idx][:value] = EXP_TODAY # Set default date value
elsif [:boolean].include?(field_data_type)
@edit[:new][:col_options][field_name][:style][s_idx][:value] = true # Set default boolean value
else
@edit[:new][:col_options][field_name][:style][s_idx][:value] = "" # Set default value
end
@refresh_div = "styling_div"
@refresh_partial = "form_styling"
when "styleval" # New value chosen
@edit[:new][:col_options][field_name][:style][s_idx][:value] = value
when "stylesuffix" # New suffix chosen
@edit[:new][:col_options][field_name][:style][s_idx][:value_suffix] = value.to_sym
@refresh_div = "styling_div"
@refresh_partial = "form_styling"
end
end
def gfv_report_fields
@edit[:new][:pdf_page_size] = params[:pdf_page_size] if params[:pdf_page_size]
if params[:chosen_queue_timeout]
@edit[:new][:queue_timeout] = params[:chosen_queue_timeout].blank? ? nil : params[:chosen_queue_timeout].to_i
end
@edit[:new][:row_limit] = params[:row_limit].blank? ? "" : params[:row_limit] if params[:row_limit]
@edit[:new][:name] = params[:name] if params[:name]
@edit[:new][:title] = params[:title] if params[:title]
end
def gfv_move_cols_buttons
if params[:button]
move_cols_right if params[:button] == "right"
move_cols_left if params[:button] == "left"
move_cols_up if params[:button] == "up"
move_cols_down if params[:button] == "down"
move_cols_top if params[:button] == "top"
move_cols_bottom if params[:button] == "bottom"
end
end
def gfv_model
if params[:chosen_model] && # Check for db table changed
params[:chosen_model] != @edit[:new][:model]
@edit[:new][:model] = params[:chosen_model]
@edit[:new][:perf_interval] = nil # Clear performance interval setting
@edit[:new][:tz] = nil
if [:performance, :trend].include?(model_report_type(@edit[:new][:model]))
@edit[:new][:perf_interval] ||= "daily" # Default to Daily
@edit[:new][:perf_avgs] ||= "time_interval"
@edit[:new][:tz] = session[:user_tz]
build_perf_interval_arrays(@edit[:new][:perf_interval]) # Build the start and end arrays for the performance interval chooser
end
if Chargeback.db_is_chargeback?(@edit[:new][:model])
@edit[:new][:cb_model] = Chargeback.report_cb_model(@edit[:new][:model])
@edit[:new][:cb_interval] ||= "daily" # Default to Daily
@edit[:new][:cb_interval_size] ||= 1
@edit[:new][:cb_end_interval_offset] ||= 1
@edit[:new][:cb_groupby] ||= "date" # Default to Date grouping
@edit[:new][:tz] = session[:user_tz]
end
reset_report_col_fields
build_edit_screen
@refresh_div = "form_div"
@refresh_partial = "form"
end
end
def gfv_trend
if params[:chosen_trend_col]
@edit[:new][:perf_interval] ||= "daily" # Default to Daily
@edit[:new][:perf_target_pct1] ||= 100 # Default to 100%
if params[:chosen_trend_col] == "<Choose>"
@edit[:new][:perf_trend_db] = nil
@edit[:new][:perf_trend_col] = nil
else
@edit[:new][:perf_trend_db], @edit[:new][:perf_trend_col] = params[:chosen_trend_col].split("-")
if MiqExpression.reporting_available_fields(@edit[:new][:model], @edit[:new][:perf_interval]).find { |af| af.last == params[:chosen_trend_col] }.first.include?("(%)")
@edit[:new][:perf_limit_val] = 100
@edit[:new][:perf_limit_col] = nil
@edit[:percent_col] = true
else
@edit[:percent_col] = false
@edit[:new][:perf_limit_val] = nil
end
build_perf_interval_arrays(@edit[:new][:perf_interval]) # Build the start and end arrays for the performance interval chooser
@edit[:limit_cols] = VimPerformanceTrend.trend_limit_cols(@edit[:new][:perf_trend_db], @edit[:new][:perf_trend_col], @edit[:new][:perf_interval])
end
@refresh_div = "columns_div"
@refresh_partial = "form_columns"
# build_perf_interval_arrays(@edit[:new][:perf_interval]) # Build the start and end arrays for the performance interval chooser
# @edit[:limit_cols] = VimPerformanceTrend.trend_limit_cols(@edit[:new][:perf_trend_db], @edit[:new][:perf_trend_col], @edit[:new][:perf_interval])
elsif params[:chosen_limit_col]
if params[:chosen_limit_col] == "<None>"
@edit[:new][:perf_limit_col] = nil
else
@edit[:new][:perf_limit_col] = params[:chosen_limit_col]
@edit[:new][:perf_limit_val] = nil
end
@refresh_div = "columns_div"
@refresh_partial = "form_columns"
elsif params[:chosen_limit_val]
@edit[:new][:perf_limit_val] = params[:chosen_limit_val]
elsif params[:percent1]
@edit[:new][:perf_target_pct1] = params[:percent1].to_i
elsif params[:percent2]
@edit[:new][:perf_target_pct2] = params[:percent2] == "<None>" ? nil : params[:percent2].to_i
elsif params[:percent3]
@edit[:new][:perf_target_pct3] = params[:percent3] == "<None>" ? nil : params[:percent3].to_i
end
end
def gfv_performance
if params[:chosen_interval]
@edit[:new][:perf_interval] = params[:chosen_interval]
@edit[:new][:perf_start] = nil # Clear start/end offsets
@edit[:new][:perf_end] = nil
build_perf_interval_arrays(@edit[:new][:perf_interval]) # Build the start and end arrays for the performance interval chooser
reset_report_col_fields
@refresh_div = "form_div"
@refresh_partial = "form"
elsif params[:perf_avgs]
@edit[:new][:perf_avgs] = params[:perf_avgs]
elsif params[:chosen_start]
@edit[:new][:perf_start] = params[:chosen_start]
elsif params[:chosen_end]
@edit[:new][:perf_end] = params[:chosen_end]
elsif params[:chosen_tz]
@edit[:new][:tz] = params[:chosen_tz]
elsif params.key?(:chosen_time_profile)
@edit[:new][:time_profile] = params[:chosen_time_profile].blank? ? nil : params[:chosen_time_profile].to_i
@refresh_div = "filter_div"
@refresh_partial = "form_filter"
end
end
def gfv_chargeback
# Chargeback options
if params.key?(:cb_show_typ)
@edit[:new][:cb_show_typ] = params[:cb_show_typ].blank? ? nil : params[:cb_show_typ]
@refresh_div = "filter_div"
@refresh_partial = "form_filter"
elsif params.key?(:cb_tag_cat)
@refresh_div = "filter_div"
@refresh_partial = "form_filter"
if params[:cb_tag_cat].blank?
@edit[:new][:cb_tag_cat] = nil
@edit[:new][:cb_tag_value] = nil
else
@edit[:new][:cb_tag_cat] = params[:cb_tag_cat]
@edit[:cb_tags] = {}
Classification.find_by_name(params[:cb_tag_cat]).entries.each { |e| @edit[:cb_tags][e.name] = e.description }
end
elsif params.key?(:cb_owner_id)
@edit[:new][:cb_owner_id] = params[:cb_owner_id].blank? ? nil : params[:cb_owner_id]
elsif params.key?(:cb_tenant_id)
@edit[:new][:cb_tenant_id] = params[:cb_tenant_id].blank? ? nil : params[:cb_tenant_id].to_i
elsif params.key?(:cb_tag_value)
@edit[:new][:cb_tag_value] = params[:cb_tag_value].blank? ? nil : params[:cb_tag_value]
elsif params.key?(:cb_entity_id)
@edit[:new][:cb_entity_id] = params[:cb_entity_id].blank? ? nil : params[:cb_entity_id]
elsif params.key?(:cb_provider_id)
@edit[:new][:cb_provider_id] = params[:cb_provider_id].blank? ? nil : params[:cb_provider_id]
@edit[:new][:cb_entity_id] = "all"
build_edit_screen
@refresh_div = "form_div"
@refresh_partial = "form"
elsif params.key?(:cb_groupby)
@edit[:new][:cb_groupby] = params[:cb_groupby]
elsif params[:cb_interval]
@edit[:new][:cb_interval] = params[:cb_interval]
@edit[:new][:cb_interval_size] = 1
@edit[:new][:cb_end_interval_offset] = 1
@refresh_div = "filter_div"
@refresh_partial = "form_filter"
elsif params[:cb_interval_size]
@edit[:new][:cb_interval_size] = params[:cb_interval_size].to_i
elsif params[:cb_end_interval_offset]
@edit[:new][:cb_end_interval_offset] = params[:cb_end_interval_offset].to_i
end
end
def gfv_charts
if params[:chosen_graph] && params[:chosen_graph] != @edit[:new][:graph_type]
if params[:chosen_graph] == "<No chart>"
@edit[:new][:graph_type] = nil
# Reset other setting to initial settings if choosing <No chart>
@edit[:new][:graph_count] = @edit[:current][:graph_count]
@edit[:new][:graph_other] = @edit[:current][:graph_other]
@edit[:new][:chart_mode] = @edit[:current][:chart_mode]
@edit[:new][:chart_column] = @edit[:current][:chart_column]
else
@edit[:new][:graph_other] = true if @edit[:new][:graph_type].nil? # Reset other setting if choosing first chart
@edit[:new][:graph_type] = params[:chosen_graph] # Save graph type
@edit[:new][:graph_count] ||= GRAPH_MAX_COUNT # Reset graph count, if not set
@edit[:new][:chart_mode] ||= 'counts'
@edit[:new][:chart_column] ||= ''
end
@refresh_div = "chart_div"
@refresh_partial = "form_chart"
end
if params[:chart_mode] && params[:chart_mode] != @edit[:new][:chart_mode]
@edit[:new][:chart_mode] = params[:chart_mode]
@refresh_div = "chart_div"
@refresh_partial = "form_chart"
end
if params[:chart_column] && params[:chart_column] != @edit[:new][:chart_column]
@edit[:new][:chart_column] = params[:chart_column]
@refresh_div = "chart_sample_div"
@refresh_partial = "form_chart_sample"
end
if params[:chosen_count] && params[:chosen_count] != @edit[:new][:graph_count]
@edit[:new][:graph_count] = params[:chosen_count]
@refresh_div = "chart_sample_div"
@refresh_partial = "form_chart_sample"
end
if params[:chosen_other] # If a chart is showing, set the other setting based on check box present
chosen = (params[:chosen_other].to_s == "1")
if @edit[:new][:graph_other] != chosen
@edit[:new][:graph_other] = chosen
@refresh_div = "chart_sample_div"
@refresh_partial = "form_chart_sample"
end
end
end
def gfv_pivots
if params[:chosen_pivot1] && params[:chosen_pivot1] != @edit[:new][:pivotby1]
@edit[:new][:pivotby1] = params[:chosen_pivot1]
if params[:chosen_pivot1] == NOTHING_STRING
@edit[:new][:pivotby2] = NOTHING_STRING
@edit[:new][:pivotby3] = NOTHING_STRING
elsif params[:chosen_pivot1] == @edit[:new][:pivotby2]
@edit[:new][:pivotby2] = @edit[:new][:pivotby3]
@edit[:new][:pivotby3] = NOTHING_STRING
elsif params[:chosen_pivot1] == @edit[:new][:pivotby3]
@edit[:new][:pivotby3] = NOTHING_STRING
end
elsif params[:chosen_pivot2] && params[:chosen_pivot2] != @edit[:new][:pivotby2]
@edit[:new][:pivotby2] = params[:chosen_pivot2]
if params[:chosen_pivot2] == NOTHING_STRING || params[:chosen_pivot2] == @edit[:new][:pivotby3]
@edit[:new][:pivotby3] = NOTHING_STRING
end
elsif params[:chosen_pivot3] && params[:chosen_pivot3] != @edit[:new][:pivotby3]
@edit[:new][:pivotby3] = params[:chosen_pivot3]
end
if params[:chosen_pivot1] || params[:chosen_pivot2] || params[:chosen_pivot3]
if @edit[:new][:pivotby1] == NOTHING_STRING
@edit[:pivot_cols] = {} # Clear pivot_cols if no pivot grouping fields selected
else
@edit[:pivot_cols].delete(@edit[:new][:pivotby1]) # Remove any pivot grouping fields from pivot cols
@edit[:pivot_cols].delete(@edit[:new][:pivotby2])
@edit[:pivot_cols].delete(@edit[:new][:pivotby3])
end
build_field_order
@refresh_div = "consolidate_div"
@refresh_partial = "form_consolidate"
end
end
def gfv_sort
@edit[:new][:order] = params[:sort_order] if params[:sort_order]
if params[:sort_group] # If grouping changed,
@edit[:new][:group] = params[:sort_group]
@refresh_div = "sort_div" # Resend the sort tab
@refresh_partial = "form_sort"
if @edit[:new][:chart_mode] = 'values' && !chart_mode_values_allowed?
@edit[:new][:chart_mode] = 'counts'
end
end
@edit[:new][:hide_details] = (params[:hide_details].to_s == "1") if params[:hide_details]
if params[:chosen_sort1] && params[:chosen_sort1] != @edit[:new][:sortby1].split("__").first
# Remove any col options for any existing sort + suffix
@edit[:new][:col_options].delete(@edit[:new][:sortby1].split("-").last) if @edit[:new][:sortby1].split("__")[1]
@edit[:new][:sortby1] = params[:chosen_sort1]
@edit[:new][:sortby2] = NOTHING_STRING if params[:chosen_sort1] == NOTHING_STRING || params[:chosen_sort1] == @edit[:new][:sortby2].split("__").first
@refresh_div = "sort_div"
@refresh_partial = "form_sort"
elsif params[:chosen_sort2] && params[:chosen_sort2] != @edit[:new][:sortby2].split("__").first
@edit[:new][:sortby2] = params[:chosen_sort2]
# Look at the 1st sort suffix (ie. month, day_of_week, etc)
elsif params[:sort1_suffix] && params[:sort1_suffix].to_s != @edit[:new][:sortby1].split("__")[1].to_s
# Remove any col options for any existing sort + suffix
@edit[:new][:col_options].delete(@edit[:new][:sortby1].split("-").last) if @edit[:new][:sortby1].split("__")[1]
@edit[:new][:sortby1] = @edit[:new][:sortby1].split("__").first +
(params[:sort1_suffix].blank? ? "" : "__#{params[:sort1_suffix]}")
# Look at the 2nd sort suffix (ie. month, day_of_week, etc)
elsif params[:sort2_suffix] && params[:sort2_suffix].to_s != @edit[:new][:sortby2].split("__")[1].to_s
# Remove any col options for any existing sort + suffix
@edit[:new][:col_options].delete(@edit[:new][:sortby2].split("-").last) if @edit[:new][:sortby2].split("__")[1]
@edit[:new][:sortby2] = @edit[:new][:sortby2].split("__").first + "__" + params[:sort2_suffix]
@edit[:new][:sortby2] = @edit[:new][:sortby2].split("__").first +
(params[:sort2_suffix].blank? ? "" : "__#{params[:sort2_suffix]}")
# Look at the break format
else
co_key1 = @edit[:new][:sortby1].split("-").last
if params[:break_format] &&
params[:break_format].to_s != @edit[:new].fetch_path(:col_options, co_key1)
if params[:break_format].blank? || # Remove format and col key (if empty)
params[:break_format].to_sym == MiqReport.get_col_info(@edit[:new][:sortby1])[:default_format]
if @edit[:new][:col_options][co_key1]
@edit[:new][:col_options][co_key1].delete(:break_format)
@edit[:new][:col_options].delete(co_key1) if @edit[:new][:col_options][co_key1].empty?
end
else # Add col and format to col_options
@edit[:new][:col_options][co_key1] ||= {}
@edit[:new][:col_options][co_key1][:break_format] = params[:break_format].to_sym
end
end
end
# Clear/set up the default break label
sort1 = @edit[:new][:sortby1].split("-").last unless @edit[:new][:sortby1].blank?
if @edit[:new][:group] == "No" # Clear any existing break label
if @edit[:new].fetch_path(:col_options, sort1, :break_label)
@edit[:new][:col_options][sort1].delete(:break_label)
@edit[:new][:col_options].delete(sort1) if @edit[:new][:col_options][sort1].empty?
end
else # Create a break label, if none there already
unless @edit[:new].fetch_path(:col_options, sort1, :break_label)
@edit[:new][:col_options][sort1] ||= {}
sort, suffix = @edit[:new][:sortby1].split("__")
@edit[:new][:col_options][sort1][:break_label] =
@edit[:new][:field_order].collect { |f| f.first if f.last == sort }.compact.join.strip +
(suffix ? " (#{MiqReport.date_time_break_suffixes.collect { |s| s.first if s.last == suffix }.compact.join})" : "") +
": "
end
end
# TODO: Not allowing user to change break label until editor is changed to not use form observe
# if params[:break_label]
# @edit[:new][:col_options][@edit[:new][:sortby1].split("-").last] ||= Hash.new
# @edit[:new][:col_options][@edit[:new][:sortby1].split("-").last][:break_label] == params[:break_label]
# end
end
def gfv_timeline
if params[:chosen_tl] && params[:chosen_tl] != @edit[:new][:tl_field]
if @edit[:new][:tl_field] == NOTHING_STRING || params[:chosen_tl] == NOTHING_STRING
@refresh_div = "tl_settings_div"
@refresh_partial = "form_tl_settings"
@tl_changed = true
else
@tl_repaint = true
end
@edit[:new][:tl_field] = params[:chosen_tl]
if params[:chosen_tl] == NOTHING_STRING # If clearing the timeline field
@edit[:new][:tl_bands] = [] # Clear the bands
@edit[:unit1] = NOTHING_STRING
@edit[:unit2] = NOTHING_STRING
@edit[:unit3] = NOTHING_STRING
else
if @edit[:new][:tl_bands].blank? # If the bands are blank
@edit[:unit1] = BAND_UNITS[1]
@edit[:new][:tl_bands] = [ # Create default first band
{:width => 100, :gap => 0.0, :text => true, :unit => BAND_UNITS[1], :pixels => 100}
]
end
end
elsif params[:chosen_position] && params[:chosen_position] != @edit[:new][:tl_position]
@tl_changed = true
@edit[:new][:tl_position] = params[:chosen_position]
elsif params[:chosen_last_unit] && params[:chosen_last_unit] != @edit[:new][:tl_last_unit]
@refresh_div = "tl_settings_div"
@refresh_partial = "form_tl_settings"
@tl_repaint = true
@edit[:new][:tl_last_unit] = params[:chosen_last_unit]
@edit[:new][:tl_last_time] = nil # Clear out the last time numeric choice
elsif params[:chosen_last_time] && params[:chosen_last_time] != @edit[:new][:tl_last_time]
@tl_repaint = true
@edit[:new][:tl_last_time] = params[:chosen_last_time]
elsif params[:chosen_unit1] && params[:chosen_unit1] != @edit[:unit1]
@refresh_div = "tl_settings_div"
@refresh_partial = "form_tl_settings"
@edit[:unit1] = params[:chosen_unit1]
@edit[:new][:tl_bands][0][:unit] = params[:chosen_unit1]
elsif params[:chosen_unit2] && params[:chosen_unit2] != @edit[:unit2]
@refresh_div = "tl_settings_div"
@refresh_partial = "form_tl_settings"
@tl_changed = true
@edit[:unit2] = params[:chosen_unit2]
if @edit[:unit2] == NOTHING_STRING
@edit[:unit3] = NOTHING_STRING # Clear the 3rd band unit value
@edit[:new][:tl_bands] = [@edit[:new][:tl_bands][0]] # Remove the 2nd and 3rd bands
@edit[:new][:tl_bands][0][:width] = 100
elsif @edit[:new][:tl_bands].length < 2
@edit[:new][:tl_bands][0][:width] = 70
@edit[:new][:tl_bands].push(:width => 30, :height => 0.6, :gap => 0.1, :text => false, :unit => params[:chosen_unit2], :pixels => 200)
else
@edit[:new][:tl_bands][1][:unit] = params[:chosen_unit2]
end
elsif params[:chosen_unit3] && params[:chosen_unit3] != @edit[:unit3]
@refresh_div = "tl_settings_div"
@refresh_partial = "form_tl_settings"
@tl_changed = true
@edit[:unit3] = params[:chosen_unit3]
if @edit[:unit3] == NOTHING_STRING
@edit[:new][:tl_bands] = @edit[:new][:tl_bands][0..1] # Remove the 3rd band
@edit[:new][:tl_bands][1][:width] = 30
elsif @edit[:new][:tl_bands].length < 3
@edit[:new][:tl_bands][0][:width] = 70
@edit[:new][:tl_bands][1][:width] = 20
@edit[:new][:tl_bands].push(:width => 10, :height => 0.3, :gap => 0.1, :text => false, :unit => params[:chosen_unit3], :pixels => 200)
else
@edit[:new][:tl_bands][2][:unit] = params[:chosen_unit3]
end
end
end
def move_cols_right
if !params[:available_fields] || params[:available_fields].length == 0 || params[:available_fields][0] == ""
add_flash(_("No fields were selected to move down"), :error)
elsif params[:available_fields].length + @edit[:new][:fields].length > MAX_REPORT_COLUMNS
add_flash(_("Fields not added: Adding the selected %{count} fields will exceed the maximum of %{max} fields") % {:count => params[:available_fields].length + @edit[:new][:fields].length, :max => MAX_REPORT_COLUMNS},
:error)
else
MiqExpression.reporting_available_fields(@edit[:new][:model], @edit[:new][:perf_interval]).each do |af| # Go thru all available columns
if params[:available_fields].include?(af[1]) # See if this column was selected to move
unless @edit[:new][:fields].include?(af) # Only move if it's not there already
@edit[:new][:fields].push(af) # Add it to the new fields list
if af[0].include?(":") # Not a base column
table = af[0].split(" : ")[0].split(".")[-1] # Get the table name
table = table.singularize unless table == "OS" # Singularize, except "OS"
header = table + " " + af[0].split(" : ")[1] # Add the table + col name
temp = af[0].split(" : ")[1]
temp_header = table == temp.split(" ")[0] ? af[0].split(" : ")[1] : temp_header = table + " " + af[0].split(" : ")[1]
else
header = temp_header = af[0].strip # Base column, just use it without leading space
end
@edit[:new][:headers][af[1]] = temp_header # Add the column title to the headers hash
end
end
end
@refresh_div = "column_lists"
@refresh_partial = "column_lists"
build_field_order
end
end
def move_cols_left
if !params[:selected_fields] || params[:selected_fields].length == 0 || params[:selected_fields][0] == ""
add_flash(_("No fields were selected to move up"), :error)
elsif display_filter_contains?(params[:selected_fields])
add_flash(_("No fields were moved up"), :error)
else
@edit[:new][:fields].each do |nf| # Go thru all new fields
if params[:selected_fields].include?(nf.last) # See if this col was selected to move
# Clear out headers and formatting
@edit[:new][:headers].delete(nf.last) # Delete the column name from the headers hash
@edit[:new][:headers].delete_if { |k, _v| k.starts_with?("#{nf.last}__") } # Delete pivot calc keys
@edit[:new][:col_formats].delete(nf.last) # Delete the column name from the col_formats hash
@edit[:new][:col_formats].delete_if { |k, _v| k.starts_with?("#{nf.last}__") } # Delete pivot calc keys
# Clear out pivot field options
if nf.last == @edit[:new][:pivotby1] # Compress the pivotby fields if being moved left
@edit[:new][:pivotby1] = @edit[:new][:pivotby2]
@edit[:new][:pivotby2] = @edit[:new][:pivotby3]
@edit[:new][:pivotby3] = NOTHING_STRING
elsif nf.last == @edit[:new][:pivotby2]
@edit[:new][:pivotby2] = @edit[:new][:pivotby3]
@edit[:new][:pivotby3] = NOTHING_STRING
elsif nf.last == @edit[:new][:pivotby3]
@edit[:new][:pivotby3] = NOTHING_STRING
end
@edit[:pivot_cols].delete(nf.last) # Delete the column name from the pivot_cols hash
# Clear out sort options
if @edit[:new][:sortby1] && nf.last == @edit[:new][:sortby1].split("__").first # If deleting the first sort field
if MiqReport.is_break_suffix?(@edit[:new][:sortby1].split("__")[1]) # If sort has a break suffix
@edit[:new][:col_options].delete(field_to_col(@edit[:new][:sortby1])) # Remove the <col>__<suffix> from col_options
end
unless @edit[:new][:group] == "No" # If we were grouping, remove all col_options :group keys
@edit[:new][:col_options].each do |co_key, co_val|
co_val.delete(:grouping) # Remove :group key
@edit[:new][:col_options].delete(co_key) if co_val.empty? # Remove the col, if empty
end
end
@edit[:new][:sortby1] = NOTHING_STRING
@edit[:new][:sortby2] = NOTHING_STRING
end
if @edit[:new][:sortby1] && nf.last == @edit[:new][:sortby2].split("__").first # If deleting the second sort field
@edit[:new][:sortby2] = NOTHING_STRING
end
# Clear out timeline options
if nf.last == @edit[:new][:tl_field] # If deleting the timeline field
@edit[:new][:tl_field] = NOTHING_STRING
@edit[:unit1] = NOTHING_STRING
@edit[:unit2] = NOTHING_STRING
@edit[:unit3] = NOTHING_STRING
@edit[:new][:tl_bands] = []
end
@edit[:new][:col_options].delete(field_to_col(nf.last)) # Remove this column from the col_options hash
end
end
@edit[:new][:fields].delete_if { |nf| params[:selected_fields].include?(nf.last) } # Remove selected fields
@refresh_div = "column_lists"
@refresh_partial = "column_lists"
build_field_order
end
end
# See if any of the fields passed in are present in the display filter expression
def display_filter_contains?(fields)
return false if @edit[:new][:display_filter].nil? # No display filter defined
exp = @edit[:new][:display_filter].inspect
@edit[:new][:fields].each do |f| # Go thru all of the selected fields
if fields.include?(f.last) # Is this field being removed?
add_flash(_("%{name} is currently being used in the Display Filter") %
{:name => f.first}, :error) if exp.include?(f.last)
end
end
!@flash_array.nil?
end
def move_cols_up
if !params[:selected_fields] || params[:selected_fields].length == 0 || params[:selected_fields][0] == ""
add_flash(_("No fields were selected to move up"), :error)
return
end
consecutive, first_idx, last_idx = selected_consecutive?
if !consecutive
add_flash(_("Select only one or consecutive fields to move up"), :error)
else
if first_idx > 0
@edit[:new][:fields][first_idx..last_idx].reverse_each do |field|
pulled = @edit[:new][:fields].delete(field)
@edit[:new][:fields].insert(first_idx - 1, pulled)
end
end
@refresh_div = "column_lists"
@refresh_partial = "column_lists"
end
@selected = params[:selected_fields]
build_field_order
end
def move_cols_down
if !params[:selected_fields] || params[:selected_fields].length == 0 || params[:selected_fields][0] == ""
add_flash(_("No fields were selected to move down"), :error)
return
end
consecutive, first_idx, last_idx = selected_consecutive?
if !consecutive
add_flash(_("Select only one or consecutive fields to move down"), :error)
else
if last_idx < @edit[:new][:fields].length - 1
insert_idx = last_idx + 1 # Insert before the element after the last one
insert_idx = -1 if last_idx == @edit[:new][:fields].length - 2 # Insert at end if 1 away from end
@edit[:new][:fields][first_idx..last_idx].each do |field|
pulled = @edit[:new][:fields].delete(field)
@edit[:new][:fields].insert(insert_idx, pulled)
end
end
@refresh_div = "column_lists"
@refresh_partial = "column_lists"
end
@selected = params[:selected_fields]
build_field_order
end
def move_cols_top
if !params[:selected_fields] || params[:selected_fields].length == 0 || params[:selected_fields][0] == ""
add_flash(_("No fields were selected to move to the top"), :error)
return
end
consecutive, first_idx, last_idx = selected_consecutive?
if !consecutive
add_flash(_("Select only one or consecutive fields to move to the top"), :error)
else
if first_idx > 0
@edit[:new][:fields][first_idx..last_idx].reverse_each do |field|
pulled = @edit[:new][:fields].delete(field)
@edit[:new][:fields].unshift(pulled)
end
end
@refresh_div = "column_lists"
@refresh_partial = "column_lists"
end
@selected = params[:selected_fields]
build_field_order
end
def move_cols_bottom
if !params[:selected_fields] || params[:selected_fields].length == 0 || params[:selected_fields][0] == ""
add_flash(_("No fields were selected to move to the bottom"), :error)
return
end
consecutive, first_idx, last_idx = selected_consecutive?
if !consecutive
add_flash(_("Select only one or consecutive fields to move to the bottom"), :error)
else
if last_idx < @edit[:new][:fields].length - 1
@edit[:new][:fields][first_idx..last_idx].each do |field|
pulled = @edit[:new][:fields].delete(field)
@edit[:new][:fields].push(pulled)
end
end
@refresh_div = "column_lists"
@refresh_partial = "column_lists"
end
@selected = params[:selected_fields]
build_field_order
end
def selected_consecutive?
first_idx = last_idx = 0
@edit[:new][:fields].each_with_index do |nf, idx|
first_idx = idx if nf[1] == params[:selected_fields].first
if nf[1] == params[:selected_fields].last
last_idx = idx
break
end
end
if last_idx - first_idx + 1 > params[:selected_fields].length
return [false, first_idx, last_idx]
else
return [true, first_idx, last_idx]
end
end
# Set record variables to new values
def set_record_vars(rpt)
# Set the simple string/number fields
rpt.template_type = "report"
rpt.name = @edit[:new][:name].to_s.strip
rpt.title = @edit[:new][:title].to_s.strip
rpt.db = @edit[:new][:model]
rpt.rpt_group = @edit[:new][:rpt_group]
rpt.rpt_type = @edit[:new][:rpt_type]
rpt.priority = @edit[:new][:priority]
rpt.categories = @edit[:new][:categories]
rpt.col_options = @edit[:new][:col_options]
rpt.order = @edit[:new][:sortby1].nil? ? nil : @edit[:new][:order]
# Set the graph fields
if @edit[:new][:sortby1] == NOTHING_STRING || @edit[:new][:graph_type].nil?
rpt.dims = nil
rpt.graph = nil
else
if @edit[:new][:graph_type] =~ /^(Pie|Donut)/ # Pie and Donut charts must be set to 1 dimension
rpt.dims = 1
else
rpt.dims = @edit[:new][:sortby2] == NOTHING_STRING ? 1 : 2 # Set dims to 1 or 2 based on presence of sortby2
end
if @edit[:new][:chart_mode] == 'values' && @edit[:new][:chart_column].blank?
options = chart_fields_options
@edit[:new][:chart_column] = options[0][1] unless options.empty?
end
rpt.graph = {
:type => @edit[:new][:graph_type],
:mode => @edit[:new][:chart_mode],
:column => @edit[:new][:chart_column],
:count => @edit[:new][:graph_count],
:other => @edit[:new][:graph_other],
}
end
# Set the conditions field (expression)
if !@edit[:new][:record_filter].nil? && @edit[:new][:record_filter]["???"].nil?
rpt.conditions = MiqExpression.new(@edit[:new][:record_filter])
else
rpt.conditions = nil
end
# Set the display_filter field (expression)
if !@edit[:new][:display_filter].nil? && @edit[:new][:display_filter]["???"].nil?
rpt.display_filter = MiqExpression.new(@edit[:new][:display_filter])
else
rpt.display_filter = nil
end
# Set the performance options
rpt.db_options = Hash.new
if model_report_type(rpt.db) == :performance
rpt.db_options[:interval] = @edit[:new][:perf_interval]
rpt.db_options[:calc_avgs_by] = @edit[:new][:perf_avgs]
rpt.db_options[:end_offset] = @edit[:new][:perf_end].to_i
rpt.db_options[:start_offset] = @edit[:new][:perf_end].to_i + @edit[:new][:perf_start].to_i
elsif model_report_type(rpt.db) == :trend
rpt.db_options[:rpt_type] = "trend"
rpt.db_options[:interval] = @edit[:new][:perf_interval]
rpt.db_options[:end_offset] = @edit[:new][:perf_end].to_i
rpt.db_options[:start_offset] = @edit[:new][:perf_end].to_i + @edit[:new][:perf_start].to_i
rpt.db_options[:trend_db] = @edit[:new][:perf_trend_db]
rpt.db_options[:trend_col] = @edit[:new][:perf_trend_col]
rpt.db_options[:limit_col] = @edit[:new][:perf_limit_col] if @edit[:new][:perf_limit_col]
rpt.db_options[:limit_val] = @edit[:new][:perf_limit_val] if @edit[:new][:perf_limit_val]
rpt.db_options[:target_pcts] = []
rpt.db_options[:target_pcts].push(@edit[:new][:perf_target_pct1])
rpt.db_options[:target_pcts].push(@edit[:new][:perf_target_pct2]) if @edit[:new][:perf_target_pct2]
rpt.db_options[:target_pcts].push(@edit[:new][:perf_target_pct3]) if @edit[:new][:perf_target_pct3]
elsif Chargeback.db_is_chargeback?(rpt.db)
rpt.db_options[:rpt_type] = @edit[:new][:model]
options = {} # CB options go in db_options[:options] key
options[:interval] = @edit[:new][:cb_interval]
options[:interval_size] = @edit[:new][:cb_interval_size]
options[:end_interval_offset] = @edit[:new][:cb_end_interval_offset]
if @edit[:new][:cb_show_typ] == "owner"
options[:owner] = @edit[:new][:cb_owner_id]
elsif @edit[:new][:cb_show_typ] == "tenant"
options[:tenant_id] = @edit[:new][:cb_tenant_id]
elsif @edit[:new][:cb_show_typ] == "tag"
if @edit[:new][:cb_tag_cat] && @edit[:new][:cb_tag_value]
options[:tag] = "/managed/#{@edit[:new][:cb_tag_cat]}/#{@edit[:new][:cb_tag_value]}"
end
elsif @edit[:new][:cb_show_typ] == "entity"
options[:provider_id] = @edit[:new][:cb_provider_id]
options[:entity_id] = @edit[:new][:cb_entity_id]
end
rpt.db_options[:options] = options
end
rpt.time_profile_id = @edit[:new][:time_profile]
if @edit[:new][:time_profile]
time_profile = TimeProfile.find_by_id(@edit[:new][:time_profile])
rpt.tz = time_profile.tz
end
# Set the timeline field
if @edit[:new][:tl_field] == NOTHING_STRING
rpt.timeline = nil
else
rpt.timeline = Hash.new
rpt.timeline[:field] = @edit[:new][:tl_field]
rpt.timeline[:position] = @edit[:new][:tl_position]
rpt.timeline[:bands] = @edit[:new][:tl_bands]
if @edit[:new][:tl_last_unit] == SHOWALL_STRING
rpt.timeline[:last_unit] = rpt.timeline[:last_time] = nil
else
rpt.timeline[:last_unit] = @edit[:new][:tl_last_unit]
rpt.timeline[:last_time] = @edit[:new][:tl_last_time]
end
end
# Set the line break group field
if @edit[:new][:sortby1] == NOTHING_STRING # If no sort fields
rpt.group = nil # Clear line break group
else # Otherwise, check the setting
case @edit[:new][:group]
when "Yes"
rpt.group = "y"
when "Counts"
rpt.group = "c"
else
rpt.group = nil
end
end
# Set defaults, if not present
rpt.rpt_group ||= "Custom"
rpt.rpt_type ||= "Custom"
rpt.cols = []
rpt.col_order = []
rpt.col_formats = []
rpt.headers = []
rpt.include = Hash.new
rpt.sortby = @edit[:new][:sortby1] == NOTHING_STRING ? nil : [] # Clear sortby if sortby1 not present, else set up array
# Add in the chargeback static fields
if Chargeback.db_is_chargeback?(rpt.db) # For chargeback, add in static fields
rpt.cols = %w(start_date display_range)
name_col = @edit[:new][:model].constantize.report_name_field
rpt.cols += [name_col]
if @edit[:new][:cb_groupby] == "date"
rpt.col_order = ["display_range", name_col]
rpt.sortby = ["start_date", name_col]
elsif @edit[:new][:cb_groupby] == "vm"
rpt.col_order = [name_col, "display_range"]
rpt.sortby = [name_col, "start_date"]
end
rpt.col_order.each do |c|
rpt.headers.push(Dictionary.gettext(c, :type => :column, :notfound => :titleize))
rpt.col_formats.push(nil) # No formatting needed on the static cols
end
rpt.col_options = @edit[:new][:model].constantize.report_col_options
rpt.order = "Ascending"
rpt.group = "y"
rpt.tz = @edit[:new][:tz]
end
# Remove when we support user sorting of trend reports
if rpt.db == TREND_MODEL
rpt.sortby = ["resource_name"]
rpt.order = "Ascending"
end
# Build column related report fields
@pg1 = @pg2 = @pg3 = nil # Init the pivot group cols
@edit[:new][:fields].each do |field_entry| # Go thru all of the fields
field = field_entry[1] # Get the encoded fully qualified field name
if @edit[:new][:pivotby1] != NOTHING_STRING && # If we are doing pivoting and
@edit[:pivot_cols].key?(field) # this is a pivot calc column
@edit[:pivot_cols][field].each do |calc_typ| # Add header/format/col_order for each calc type
rpt.headers.push(@edit[:new][:headers][field + "__#{calc_typ}"])
rpt.col_formats.push(@edit[:new][:col_formats][field + "__#{calc_typ}"])
add_field_to_col_order(rpt, field + "__#{calc_typ}")
end
else # Normal field, set header/format/col_order
rpt.headers.push(@edit[:new][:headers][field])
rpt.col_formats.push(@edit[:new][:col_formats][field])
add_field_to_col_order(rpt, field)
end
end
rpt.rpt_options ||= {}
rpt.rpt_options.delete(:pivot)
unless @pg1.nil? # Build the pivot group_cols array
rpt.rpt_options[:pivot] = {}
rpt.rpt_options[:pivot][:group_cols] = []
rpt.rpt_options[:pivot][:group_cols].push(@pg1)
rpt.rpt_options[:pivot][:group_cols].push(@pg2) unless @pg2.nil?
rpt.rpt_options[:pivot][:group_cols].push(@pg3) unless @pg3.nil?
end
if @edit[:new][:group] != "No" || @edit[:new][:row_limit].blank?
rpt.rpt_options.delete(:row_limit)
else
rpt.rpt_options[:row_limit] = @edit[:new][:row_limit].to_i
end
# Add pdf page size to rpt_options
rpt.rpt_options ||= {}
rpt.rpt_options[:pdf] ||= {}
rpt.rpt_options[:pdf][:page_size] = @edit[:new][:pdf_page_size] || DEFAULT_PDF_PAGE_SIZE
rpt.rpt_options[:queue_timeout] = @edit[:new][:queue_timeout]
# Add hide detail rows option, if grouping
if rpt.group.nil?
rpt.rpt_options.delete(:summary)
else
rpt.rpt_options[:summary] ||= {}
rpt.rpt_options[:summary][:hide_detail_rows] = @edit[:new][:hide_details]
end
user = current_user
rpt.user = user
rpt.miq_group = user.current_group
rpt.add_includes_for_virtual_custom_attributes
end
def add_field_to_col_order(rpt, field)
# Get the sort columns, removing the suffix if it exists
sortby1 = MiqReport.is_break_suffix?(@edit[:new][:sortby1].split("__")[1]) ?
@edit[:new][:sortby1].split("__").first :
@edit[:new][:sortby1]
sortby2 = MiqReport.is_break_suffix?(@edit[:new][:sortby2].split("__")[1]) ?
@edit[:new][:sortby2].split("__").first :
@edit[:new][:sortby2]
if field.include?(".") # Has a period, so it's an include
tables = field.split("-")[0].split(".")[1..-1] # Get the list of tables from before the hyphen
inc_hash = rpt.include # Start at the main hash
tables.each_with_index do |table, idx|
inc_hash[table] ||= {} # Create hash for the table, if it's not there already
if idx == tables.length - 1 # We're at the end of the field name, so add the column
inc_hash[table]["columns"] ||= [] # Create the columns array for this table
f = field.split("-")[1].split("__").first # Grab the field name after the hyphen, before the "__"
inc_hash[table]["columns"].push(f) unless inc_hash[table]["columns"].include?(f) # Add the field to the columns, if not there
table_field = tables.join('.') + "." + field.split("-")[1]
rpt.col_order.push(table_field) # Add the table.field to the col_order array
if field == sortby1 # Is this the first sort field?
rpt.sortby = [table_field] + rpt.sortby # Put the field first in the sortby array
elsif field == @edit[:new][:sortby2] # Is this the second sort field?
rpt.sortby.push(table_field) # Add the field to the sortby array
end
if field == @edit[:new][:pivotby1] # Save the group fields
@pg1 = table_field
elsif field == @edit[:new][:pivotby2]
@pg2 = table_field
elsif field == @edit[:new][:pivotby3]
@pg3 = table_field
end
else # Set up for the next embedded include hash
inc_hash[table]["include"] ||= {} # Create include hash for next level
inc_hash = inc_hash[table]["include"] # Point to the new hash
end
end
else # No period, this is a main table column
if field.include?("__") # Check for pivot calculated field
f = field.split("-")[1].split("__").first # Grab the field name after the hyphen, before the "__"
rpt.cols.push(f) unless rpt.cols.include?(f) # Add the original field, if not already there
else
rpt.cols.push(field.split("-")[1]) # Grab the field name after the hyphen
end
rpt.col_order.push(field.split("-")[1]) # Add the field to the col_order array
if field == sortby1 # Is this the first sort field?
rpt.sortby = [@edit[:new][:sortby1].split("-")[1]] + rpt.sortby # Put the field first in the sortby array
elsif field == sortby2 # Is this the second sort field?
rpt.sortby.push(@edit[:new][:sortby2].split("-")[1]) # Add the field to the sortby array
end
if field == @edit[:new][:pivotby1] # Save the group fields
@pg1 = field.split("-")[1]
elsif field == @edit[:new][:pivotby2]
@pg2 = field.split("-")[1]
elsif field == @edit[:new][:pivotby3]
@pg3 = field.split("-")[1]
end
end
end
# Set form variables for edit
def set_form_vars
@edit = {}
@edit[:rpt_id] = @rpt.id # Save a record id to use it later to look a record
@edit[:rpt_title] = @rpt.title
@edit[:rpt_name] = @rpt.name
@edit[:new] = {}
@edit[:key] = "report_edit__#{@rpt.id || "new"}"
if params[:pressed] == "miq_report_copy"
@edit[:new][:rpt_group] = "Custom"
@edit[:new][:rpt_type] = "Custom"
else
@edit[:new][:rpt_group] = @rpt.rpt_group
@edit[:new][:rpt_type] = @rpt.rpt_type
end
# Get the simple string/number fields
@edit[:new][:name] = @rpt.name
@edit[:new][:title] = @rpt.title
@edit[:new][:model] = @rpt.db
@edit[:new][:priority] = @rpt.priority
@edit[:new][:order] = @rpt.order.blank? ? "Ascending" : @rpt.order
# @edit[:new][:graph] = @rpt.graph
# Replaced above line to handle new graph settings Hash
if @rpt.graph.kind_of?(Hash)
@edit[:new][:graph_type] = @rpt.graph[:type]
@edit[:new][:graph_count] = @rpt.graph[:count]
@edit[:new][:chart_mode] = @rpt.graph[:mode]
@edit[:new][:chart_column] = @rpt.graph[:column]
@edit[:new][:graph_other] = @rpt.graph[:other] ? @rpt.graph[:other] : false
else
@edit[:new][:graph_type] = @rpt.graph
@edit[:new][:graph_count] = GRAPH_MAX_COUNT
@edit[:new][:chart_mode] = 'counts'
@edit[:new][:chart_column] = ''
@edit[:new][:graph_other] = true
end
@edit[:new][:dims] = @rpt.dims
@edit[:new][:categories] = @rpt.categories
@edit[:new][:categories] ||= []
@edit[:new][:col_options] = @rpt.col_options.blank? ? {} : @rpt.col_options
# Initialize options
@edit[:new][:perf_interval] = nil
@edit[:new][:perf_start] = nil
@edit[:new][:perf_end] = nil
@edit[:new][:tz] = nil
@edit[:new][:perf_trend_db] = nil
@edit[:new][:perf_trend_col] = nil
@edit[:new][:perf_limit_col] = nil
@edit[:new][:perf_limit_val] = nil
@edit[:new][:perf_target_pct1] = nil
@edit[:new][:perf_target_pct2] = nil
@edit[:new][:perf_target_pct3] = nil
@edit[:new][:cb_interval] = nil
@edit[:new][:cb_interval_size] = nil
@edit[:new][:cb_end_interval_offset] = nil
if [:performance, :trend].include?(model_report_type(@rpt.db))
@edit[:new][:perf_interval] = @rpt.db_options[:interval]
@edit[:new][:perf_avgs] = @rpt.db_options[:calc_avgs_by]
@edit[:new][:perf_end] = @rpt.db_options[:end_offset].to_s
@edit[:new][:perf_start] = (@rpt.db_options[:start_offset] - @rpt.db_options[:end_offset]).to_s
@edit[:new][:tz] = @rpt.tz ? @rpt.tz : session[:user_tz] # Set the timezone, default to user's
if @rpt.time_profile
@edit[:new][:time_profile] = @rpt.time_profile_id
@edit[:new][:time_profile_tz] = @rpt.time_profile.tz
else
set_time_profile_vars(selected_time_profile_for_pull_down, @edit[:new])
end
@edit[:new][:perf_trend_db] = @rpt.db_options[:trend_db]
@edit[:new][:perf_trend_col] = @rpt.db_options[:trend_col]
@edit[:new][:perf_limit_col] = @rpt.db_options[:limit_col]
@edit[:new][:perf_limit_val] = @rpt.db_options[:limit_val]
@edit[:new][:perf_target_pct1], @edit[:new][:perf_target_pct2], @edit[:new][:perf_target_pct3] = @rpt.db_options[:target_pcts]
elsif Chargeback.db_is_chargeback?(@rpt.db)
@edit[:new][:tz] = @rpt.tz ? @rpt.tz : session[:user_tz] # Set the timezone, default to user's
options = @rpt.db_options[:options]
if options.key?(:owner) # Get the owner options
@edit[:new][:cb_show_typ] = "owner"
@edit[:new][:cb_owner_id] = options[:owner]
elsif options.key?(:tenant_id) # Get the tenant options
@edit[:new][:cb_show_typ] = "tenant"
@edit[:new][:cb_tenant_id] = options[:tenant_id]
elsif options.key?(:tag) # Get the tag options
@edit[:new][:cb_show_typ] = "tag"
@edit[:new][:cb_tag_cat] = options[:tag].split("/")[-2]
@edit[:new][:cb_tag_value] = options[:tag].split("/")[-1]
@edit[:cb_tags] = {}
cat = Classification.find_by_name(@edit[:new][:cb_tag_cat])
cat.entries.each { |e| @edit[:cb_tags][e.name] = e.description } if cat # Collect the tags, if category is valid
elsif options.key?(:entity_id)
@edit[:new][:cb_show_typ] = "entity"
@edit[:new][:cb_entity_id] = options[:entity_id]
@edit[:new][:cb_provider_id] = options[:provider_id]
end
@edit[:new][:cb_model] = Chargeback.report_cb_model(@rpt.db)
@edit[:new][:cb_interval] = options[:interval]
@edit[:new][:cb_interval_size] = options[:interval_size]
@edit[:new][:cb_end_interval_offset] = options[:end_interval_offset]
@edit[:new][:cb_groupby] = @rpt.sortby.nil? || @rpt.sortby.first == "start_date" ? "date" : "vm"
end
# Only show chargeback users choice if an admin
if admin_user?
@edit[:cb_users] = User.all.each_with_object({}) { |u, h| h[u.userid] = u.name }
@edit[:cb_tenant] = Tenant.all.each_with_object({}) { |t, h| h[t.id] = t.name }
else
@edit[:new][:cb_show_typ] = "owner"
@edit[:new][:cb_owner_id] = session[:userid]
@edit[:cb_owner_name] = current_user.name
end
# Get chargeback tags
cats = Classification.categories.collect { |c| c if c.show }.compact # Get categories, sort by name, remove nils
cats.delete_if { |c| c.read_only? || c.entries.length == 0 } # Remove categories that are read only or have no entries
@edit[:cb_cats] = cats.each_with_object({}) { |c, h| h[c.name] = c.description }
@edit[:cb_providers] = {}
@edit[:cb_providers][:container_project] = {}
@edit[:cb_providers][:vm] = {} # Fill this in if entity show type it ever becomes relevent for VMs
@edit[:cb_entities_by_provider_id] = {}
ManageIQ::Providers::ContainerManager.all.each do |provider|
@edit[:cb_providers][:container_project][provider.name] = provider.id
@edit[:cb_entities_by_provider_id][provider.id] = {}
provider.container_projects.all.each do |project|
@edit[:cb_entities_by_provider_id][provider.id][project.id] = project.name
end
end
# Build trend limit cols array
if model_report_type(@rpt.db) == :trend
@edit[:limit_cols] = VimPerformanceTrend.trend_limit_cols(@edit[:new][:perf_trend_db], @edit[:new][:perf_trend_col], @edit[:new][:perf_interval])
end
# Build performance interval select arrays, if needed
if [:performance, :trend].include?(model_report_type(@rpt.db))
build_perf_interval_arrays(@edit[:new][:perf_interval]) # Build the start and end arrays for the performance interval chooser
end
expkey = :record_filter
@edit[expkey] ||= {} # Create hash for this expression, if needed
@edit[expkey][:record_filter] = [] # Store exps in an array
@edit[expkey][:exp_idx] ||= 0
@edit[expkey][:expression] = {"???" => "???"} # Set as new exp element
# Get the conditions MiqExpression
if @rpt.conditions.kind_of?(MiqExpression)
@edit[:new][:record_filter] = @rpt.conditions.exp
@edit[:miq_exp] = true
elsif @rpt.conditions.nil?
@edit[:new][:record_filter] = nil
@edit[:new][:record_filter] = @edit[expkey][:expression] # Copy to new exp
@edit[:miq_exp] = true
end
# Get the display_filter MiqExpression
@edit[:new][:display_filter] = @rpt.display_filter.nil? ? nil : @rpt.display_filter.exp
expkey = :display_filter
@edit[expkey] ||= {} # Create hash for this expression, if needed
@edit[expkey][:expression] = [] # Store exps in an array
@edit[expkey][:exp_idx] ||= 0 # Start at first exp
@edit[expkey][:expression] = {"???" => "???"} # Set as new exp element
# Build display filter expression
@edit[:new][:display_filter] = @edit[expkey][:expression] if @edit[:new][:display_filter].nil? # Copy to new exp
# Get timeline fields
@edit[:tl_last_units] = []
BAND_UNITS[1..-2].each { |u| @edit[:tl_last_units].push u.pluralize }
@edit[:unit1] = NOTHING_STRING # Default units and tl field to nothing
@edit[:unit2] = NOTHING_STRING
@edit[:unit3] = NOTHING_STRING
@edit[:new][:tl_field] = NOTHING_STRING
@edit[:new][:tl_position] = "Last"
@edit[:new][:tl_last_unit] = SHOWALL_STRING
@edit[:new][:tl_last_time] = nil
if @rpt.timeline.kind_of?(Hash) # Timeline has any data
@edit[:new][:tl_field] = @rpt.timeline[:field] unless @rpt.timeline[:field].blank?
@edit[:new][:tl_position] = @rpt.timeline[:position] unless @rpt.timeline[:position].blank?
@edit[:new][:tl_last_unit] = @rpt.timeline[:last_unit] unless @rpt.timeline[:last_unit].blank?
@edit[:new][:tl_last_time] = @rpt.timeline[:last_time] unless @rpt.timeline[:last_time].blank?
@edit[:new][:tl_bands] = @rpt.timeline[:bands]
unless @rpt.timeline[:bands].blank?
@edit[:unit1] = @rpt.timeline[:bands][0][:unit].capitalize
@edit[:unit2] = @rpt.timeline[:bands][1][:unit].capitalize if @rpt.timeline[:bands].length > 1
@edit[:unit3] = @rpt.timeline[:bands][2][:unit].capitalize if @rpt.timeline[:bands].length > 2
end
else
@edit[:new][:tl_bands] = []
end
# Get the pdf page size, if present
if @rpt.rpt_options.kind_of?(Hash) && @rpt.rpt_options[:pdf]
@edit[:new][:pdf_page_size] = @rpt.rpt_options[:pdf][:page_size] || DEFAULT_PDF_PAGE_SIZE
else
@edit[:new][:pdf_page_size] = DEFAULT_PDF_PAGE_SIZE
end
# Get the hide details setting, if present
if @rpt.rpt_options.kind_of?(Hash) && @rpt.rpt_options[:summary]
@edit[:new][:hide_details] = @rpt.rpt_options[:summary][:hide_detail_rows]
else
@edit[:new][:hide_details] = false
end
# Get the timeout if present
if @rpt.rpt_options.kind_of?(Hash) && @rpt.rpt_options[:queue_timeout]
@edit[:new][:queue_timeout] = @rpt.rpt_options[:queue_timeout]
else
@edit[:new][:queue_timeout] = nil
end
case @rpt.group
when "y"
@edit[:new][:group] = "Yes"
when "c"
@edit[:new][:group] = "Counts"
else
@edit[:new][:group] = "No"
@edit[:new][:row_limit] = @rpt.rpt_options[:row_limit].to_s if @rpt.rpt_options
end
# build selected fields array from the report record
@edit[:new][:sortby1] = NOTHING_STRING # Initialize sortby fields to nothing
@edit[:new][:sortby2] = NOTHING_STRING
@edit[:new][:pivotby1] = NOTHING_STRING # Initialize groupby fields to nothing
@edit[:new][:pivotby2] = NOTHING_STRING
@edit[:new][:pivotby3] = NOTHING_STRING
if params[:pressed] == "miq_report_new"
@edit[:new][:fields] = []
@edit[:new][:categories] = []
@edit[:new][:headers] = {}
@edit[:new][:col_formats] = {}
@edit[:pivot_cols] = {}
else
build_selected_fields(@rpt) # Create the field related @edit arrays and hashes
end
# Rebuild the tag descriptions in the new fields array to match the ones in available fields
@edit[:new][:fields].each do |nf|
tag = nf.first.split(':')
if nf.first.include?("Managed :")
entry = MiqExpression.reporting_available_fields(@edit[:new][:model], @edit[:new][:perf_interval]).find { |a| a.last == nf.last }
nf[0] = entry ? entry.first : "#{tag} (Category not found)"
end
end
@edit[:current] = ["copy", "new"].include?(params[:action]) ? {} : copy_hash(@edit[:new])
# For trend reports, check for percent field chosen
if @rpt.db && @rpt.db == TREND_MODEL &&
MiqExpression.reporting_available_fields(@edit[:new][:model], @edit[:new][:perf_interval]).find do|af|
af.last ==
@edit[:new][:perf_trend_db] + "-" + @edit[:new][:perf_trend_col]
end.first.include?("(%)")
@edit[:percent_col] = true
end
end
# Build the :fields array and :headers hash from the rpt record cols and includes hashes
def build_selected_fields(rpt)
fields = []
headers = {}
col_formats = {}
pivot_cols = {}
rpt.col_formats ||= Array.new(rpt.col_order.length) # Create array of nils if col_formats not present (backward compat)
rpt.col_order.each_with_index do |col, idx|
if col.starts_with?(CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX)
field_key = rpt.db + "-" + col
field_value = col.gsub(CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX, "")
elsif !col.include?(".") # Main table field
field_key = rpt.db + "-" + col
field_value = friendly_model_name(rpt.db) +
Dictionary.gettext(rpt.db + "." + col.split("__").first, :type => :column, :notfound => :titleize)
else # Included table field
inc_string = find_includes(col.split("__").first, rpt.include) # Get the full include string
field_key = rpt.db + "." + inc_string.to_s + "-" + col.split(".").last
if inc_string.to_s.ends_with?(".managed") || inc_string.to_s == "managed"
# don't titleize tag name, need it to lookup later to get description by tag name
field_value = friendly_model_name(rpt.db + "." + inc_string.to_s) + col.split(".").last
else
field_value = friendly_model_name(rpt.db + "." + inc_string.to_s) +
Dictionary.gettext(col.split(".").last.split("__").first, :type => :column, :notfound => :titleize)
end
end
if field_key.include?("__") # Check for calculated pivot column
field_key1, calc_typ = field_key.split("__")
pivot_cols[field_key1] ||= []
pivot_cols[field_key1] << calc_typ.to_sym
pivot_cols[field_key1].sort! # Sort the array
fields.push([field_value, field_key1]) unless fields.include?([field_value, field_key1]) # Add original col to fields array
else
fields.push([field_value, field_key]) # Add to fields array
end
# Create the groupby keys if groupby array is present
if rpt.rpt_options &&
rpt.rpt_options[:pivot] &&
rpt.rpt_options[:pivot][:group_cols] &&
rpt.rpt_options[:pivot][:group_cols].kind_of?(Array)
if rpt.rpt_options[:pivot][:group_cols].length > 0
@edit[:new][:pivotby1] = field_key if col == rpt.rpt_options[:pivot][:group_cols][0]
end
if rpt.rpt_options[:pivot][:group_cols].length > 1
@edit[:new][:pivotby2] = field_key if col == rpt.rpt_options[:pivot][:group_cols][1]
end
if rpt.rpt_options[:pivot][:group_cols].length > 2
@edit[:new][:pivotby3] = field_key if col == rpt.rpt_options[:pivot][:group_cols][2]
end
end
# Create the sortby keys if sortby array is present
if rpt.sortby.kind_of?(Array)
if rpt.sortby.length > 0
# If first sortby field as a break suffix, set up sortby1 with a suffix
if MiqReport.is_break_suffix?(rpt.sortby[0].split("__")[1])
sort1, suffix1 = rpt.sortby[0].split("__") # Get sort field and suffix, if present
@edit[:new][:sortby1] = field_key + (suffix1 ? "__#{suffix1}" : "") if col == sort1
else # Not a break suffix sort field, just copy the field name to sortby1
@edit[:new][:sortby1] = field_key if col == rpt.sortby[0]
end
end
if rpt.sortby.length > 1
if MiqReport.is_break_suffix?(rpt.sortby[1].split("__")[1])
sort2, suffix2 = rpt.sortby[1].split("__") # Get sort field and suffix, if present
@edit[:new][:sortby2] = field_key + (suffix2 ? "__#{suffix2}" : "") if col == sort2
else # Not a break suffix sort field, just copy the field name to sortby1
@edit[:new][:sortby2] = field_key if col == rpt.sortby[1]
end
end
end
headers[field_key] = rpt.headers[idx] # Add col to the headers hash
if field_key.include?("__") # if this a pivot calc field?
headers[field_key.split("__").first] = field_value # Save the original field key as well
end
col_formats[field_key] = rpt.col_formats[idx] # Add col to the headers hash
end
# Remove the non-cost and owner columns from the arrays for Chargeback
if Chargeback.db_is_chargeback?(rpt.db)
f_len = fields.length
for f_idx in 1..f_len # Go thru fields in reverse
f_key = fields[f_len - f_idx].last
next if f_key.ends_with?(*CHARGEBACK_ALLOWED_FIELD_SUFFIXES)
headers.delete(f_key)
col_formats.delete(f_key)
fields.delete_at(f_len - f_idx)
end
end
@edit[:new][:fields] = fields
@edit[:new][:headers] = headers
@edit[:new][:col_formats] = col_formats
@edit[:pivot_cols] = pivot_cols
build_field_order
end
# Create the field_order hash from the fields and pivot_cols structures
def build_field_order
@edit[:new][:field_order] = []
@edit[:new][:fields].each do |f|
if @edit[:new][:pivotby1] != NOTHING_STRING && # If we are doing pivoting and
@edit[:pivot_cols].key?(f.last) # this is a pivot calc column
MiqReport::PIVOTS.each do |c|
calc_typ = c.first
@edit[:new][:field_order].push([f.first + " (#{calc_typ.to_s.titleize})", f.last + "__" + calc_typ.to_s]) if @edit[:pivot_cols][f.last].include?(calc_typ)
end
else
@edit[:new][:field_order].push(f)
end
end
end
# Build the full includes string by finding the column in the includes hash
def find_includes(col, includes)
tables = col.split(".")[0..-2]
field = col.split(".").last
table = tables.first
# Does this level include have the table name and does columns have the field name?
if includes[table] && includes[table]["columns"] && includes[table]["columns"].include?(field)
return table # Yes, return the table name
end
if includes[table] && includes[table]["include"]
new_col = [tables[1..-1], field].flatten.join('.')
# recursively search it for the table.col
inc_table = find_includes(new_col, includes[table]["include"])
return table + '.' + inc_table if inc_table
end
# Need to go to the next level
includes.each_pair do |key, inc| # Check each included table
next unless inc["include"] # Does the included table have an include?
inc_table = find_includes(col, inc["include"]) # Yes, recursively search it for the table.col
return nil if inc_table.nil? # If it comes back nil, we never found it
# Otherwise, return the table name + the included string
return key + "." + inc_table
end
nil
end
def setnode_for_customreport
@sb[:rpt_menu].each_with_index do |level1_nodes, i|
if level1_nodes[0] == @sb[:grp_title]
level1_nodes[1].each_with_index do |level2_nodes, k|
# Check for the existence of the Custom folder in the Reports tree and
# check if at least one report exists underneath it
if level2_nodes[0].downcase == "custom" && level2_nodes[1].count > 1
level2_nodes[1].each_with_index do |report|
self.x_node = "xx-#{i}_xx-#{i}-#{k}_rep-#{to_cid(@rpt.id)}" if report == @rpt.name
end
end
end
end
end
end
def valid_report?(rpt)
active_tab = 'edit_1'
if @edit[:new][:model] == TREND_MODEL
unless @edit[:new][:perf_trend_col]
add_flash(_('Trending for is required'), :error)
end
unless @edit[:new][:perf_limit_col] || @edit[:new][:perf_limit_val]
add_flash(_('Trend Target Limit must be configured'), :error)
end
if @edit[:new][:perf_limit_val] && !is_numeric?(@edit[:new][:perf_limit_val])
add_flash(_('Trend Target Limit must be numeric'), :error)
end
elsif @edit[:new][:fields].empty?
add_flash(_('At least one Field must be selected'), :error)
end
if Chargeback.db_is_chargeback?(@edit[:new][:model])
msg = case @edit[:new][:cb_show_typ]
when nil
_('Show Costs by must be selected')
when 'owner'
_('An Owner must be selected') unless @edit[:new][:cb_owner_id]
when 'tenant'
_('A Tenant Category must be selected') unless @edit[:new][:cb_tenant_id]
when 'tag'
if !@edit[:new][:cb_tag_cat]
_('A Tag Category must be selected')
elsif !@edit[:new][:cb_tag_value]
_('A Tag must be selected')
end
when 'entity'
unless @edit[:new][:cb_entity_id]
_("A specific #{ui_lookup(:model => @edit[:new][:cb_model])} or all must be selected")
end
end
if msg
add_flash(msg, :error)
active_tab = 'edit_3'
end
end
# Validate column styles
unless rpt.col_options.blank? || @edit[:new][:field_order].nil?
@edit[:new][:field_order].each do |f| # Go thru all of the cols in order
col = f.last.split('.').last.split('-').last
if val = rpt.col_options[col] # Skip if no options for this col
next unless val.key?(:style) # Skip if no style options
val[:style].each_with_index do |s, s_idx| # Go through all of the configured ifs
if s[:value]
if e = MiqExpression.atom_error(rpt.col_to_expression_col(col.split('__').first), # See if the value is in error
s[:operator],
s[:value])
msg = case s_idx + 1
when 1
add_flash(_("Styling for '%{item}', first value is in error: %{message}") %
{:item => f.first, :message => e.message}, :error)
when 2
add_flash(_("Styling for '%{item}', second value is in error: %{message}") %
{:item => f.first, :message => e.message}, :error)
when 3
add_flash(_("Styling for '%{item}', third value is in error: %{message}") %
{:item => f.first, :message => e.message}, :error)
end
active_tab = 'edit_9'
end
end
end
end
end
end
unless rpt.valid? # Check the model for errors
rpt.errors.each do |field, msg|
add_flash("#{field.to_s.capitalize} #{msg}", :error)
end
end
@sb[:miq_tab] = active_tab if flash_errors?
@flash_array.nil?
end
# Check for valid report configuration in @edit[:new]
# Check if chargeback field is valid
def valid_chargeback_fields
is_valid = false
# There are valid show typ fields
if %w(owner tenant tag entity).include?(@edit[:new][:cb_show_typ])
is_valid = case @edit[:new][:cb_show_typ]
when 'owner' then @edit[:new][:cb_owner_id]
when 'tenant' then @edit[:new][:cb_tenant_id]
when 'tag' then @edit[:new][:cb_tag_cat] && @edit[:new][:cb_tag_value]
when 'entity' then @edit[:new][:cb_entity_id] && @edit[:new][:cb_provider_id]
end
end
is_valid
end
# Check for tab switch error conditions
def check_tabs
@sb[:miq_tab] = params[:tab]
active_tab = 'edit_1'
case @sb[:miq_tab].split('_')[1]
when '8'
if @edit[:new][:fields].empty?
add_flash(_('Consolidation tab is not available until at least 1 field has been selected'), :error)
end
when '2'
if @edit[:new][:fields].empty?
add_flash(_('Formatting tab is not available until at least 1 field has been selected'), :error)
end
when '3'
if @edit[:new][:model] == TREND_MODEL
unless @edit[:new][:perf_trend_col]
add_flash(_('Filter tab is not available until Trending for field has been selected'), :error)
end
unless @edit[:new][:perf_limit_col] || @edit[:new][:perf_limit_val]
add_flash(_('Filter tab is not available until Trending Target Limit has been configured'), :error)
end
if @edit[:new][:perf_limit_val] && !is_numeric?(@edit[:new][:perf_limit_val])
add_flash(_('Trend Target Limit must be numeric'), :error)
end
elsif @edit[:new][:fields].empty?
add_flash(_('Filter tab is not available until at least 1 field has been selected'), :error)
end
when '4'
if @edit[:new][:fields].empty?
add_flash(_('Summary tab is not available until at least 1 field has been selected'), :error)
end
when '5'
if @edit[:new][:fields].empty?
add_flash(_('Charts tab is not available until at least 1 field has been selected'), :error)
elsif @edit[:new][:sortby1].blank? || @edit[:new][:sortby1] == NOTHING_STRING
add_flash(_('Charts tab is not available unless a sort field has been selected'), :error)
active_tab = 'edit_4'
end
when '6'
if @edit[:new][:fields].empty?
add_flash(_('Timeline tab is not available until at least 1 field has been selected'), :error)
else
found = false
@edit[:new][:fields].each do |field|
if MiqReport.get_col_type(field[1]) == :datetime
found = true
break
end
end
unless found
add_flash(_('Timeline tab is not available unless at least 1 time field has been selected'), :error)
end
end
when '7'
if @edit[:new][:model] == TREND_MODEL
unless @edit[:new][:perf_trend_col]
add_flash(_('Preview tab is not available until Trending for field has been selected'), :error)
end
unless @edit[:new][:perf_limit_col] || @edit[:new][:perf_limit_val]
add_flash(_('Preview tab is not available until Trend Target Limit has been configured'), :error)
end
if @edit[:new][:perf_limit_val] && !is_numeric?(@edit[:new][:perf_limit_val])
add_flash(_('Trend Target Limit: Value must be numeric'), :error)
end
elsif @edit[:new][:fields].empty?
add_flash(_('Preview tab is not available until at least 1 field has been selected'), :error)
elsif Chargeback.db_is_chargeback?(@edit[:new][:model]) && !valid_chargeback_fields
add_flash(_('Preview tab is not available until Chargeback Filters has been configured'), :error)
active_tab = 'edit_3'
end
when '9'
if @edit[:new][:fields].empty?
add_flash(_('Styling tab is not available until at least 1 field has been selected'), :error)
end
end
@sb[:miq_tab] = active_tab if flash_errors?
end
end
Remove three useless assignments
Now, let's worship rubocop for a minute.
module ReportController::Reports::Editor
extend ActiveSupport::Concern
CHARGEBACK_ALLOWED_FIELD_SUFFIXES = %w(_cost -owner_name _metric -provider_name -provider_uid -project_uid -archived).freeze
def miq_report_new
assert_privileges("miq_report_new")
@_params.delete :id # incase add button was pressed from report show screen.
miq_report_edit
end
def miq_report_copy
assert_privileges("miq_report_copy")
@report = nil # Clear any saved report object
if params[:tab] # Came in to change the tab
check_tabs
build_edit_screen
else
@sb[:miq_tab] = "edit_1"
@rpt = MiqReport.find(params[:id])
@rpt.id = nil # Treat as a new report
set_form_vars
build_edit_screen
end
@ina_form = @lock_tree = true
replace_right_cell
end
def miq_report_edit
assert_privileges("miq_report_edit")
case params[:button]
when "cancel"
@edit[:rpt_id] ?
add_flash(_("Edit of %{model} \"%{name}\" was cancelled by the user") % {:model => ui_lookup(:model => "MiqReport"), :name => @edit[:rpt_title]}) :
add_flash(_("Add of new %{model} was cancelled by the user") % {:model => ui_lookup(:model => "MiqReport")})
@edit = session[:edit] = nil # clean out the saved info
replace_right_cell
when "add", "save"
id = params[:id] ? params[:id] : "new"
return unless load_edit("report_edit__#{id}", "replace_cell__explorer")
get_form_vars
@changed = (@edit[:new] != @edit[:current])
@rpt = @edit[:rpt_id] ? find_by_id_filtered(MiqReport, params[:id]) :
MiqReport.new
set_record_vars(@rpt)
unless valid_report?(@rpt)
build_edit_screen
replace_right_cell
return
end
if @edit[:new][:graph_type] && (@edit[:new][:sortby1].blank? || @edit[:new][:sortby1] == NOTHING_STRING)
add_flash(_("Report can not be saved unless sort field has been configured for Charts"), :error)
@sb[:miq_tab] = "edit_4"
build_edit_screen
replace_right_cell
return
end
if @rpt.save
# update report name in menu if name is edited
menu_repname_update(@edit[:current][:name], @edit[:new][:name]) if @edit[:current][:name] != @edit[:new][:name]
AuditEvent.success(build_saved_audit(@rpt, @edit))
@edit[:rpt_id] ?
add_flash(_("%{model} \"%{name}\" was saved") % {:model => ui_lookup(:model => "MiqReport"), :name => @rpt.name}) :
add_flash(_("%{model} \"%{name}\" was added") % {:model => ui_lookup(:model => "MiqReport"), :name => @rpt.name})
# only do this for new reports
unless @edit[:rpt_id]
self.x_node = "xx-#{@sb[:rpt_menu].length}_xx-#{@sb[:rpt_menu].length}-0"
setnode_for_customreport
end
@edit = session[:edit] = nil # clean out the saved info
if role_allows(:feature => "miq_report_widget_editor")
# all widgets for this report
get_all_widgets("report", from_cid(x_node.split('_').last))
end
replace_right_cell(:replace_trees => [:reports])
else
rpt.errors.each do |field, msg|
add_flash("#{field.to_s.capitalize} #{msg}", :error)
end
@in_a_form = true
session[:changed] = @changed ? true : false
@changed = true
replace_right_cell
end
else
add_flash(_("All changes have been reset"), :warning) if params[:button] == "reset"
@in_a_form = true
@report = nil # Clear any saved report object
if params[:tab] # Came in to change the tab
@rpt = @edit[:rpt_id] ? MiqReport.find(@edit[:rpt_id]) :
MiqReport.new
check_tabs
build_edit_screen
else
@sb[:miq_tab] = "edit_1"
@rpt = params[:id] && params[:id] != "new" ? MiqReport.find(params[:id]) :
MiqReport.new
if @rpt.rpt_type == "Default"
flash = "Default reports can not be edited"
redirect_to :action => "show", :id => @rpt.id, :flash_msg => flash, :flash_error => true
return
end
set_form_vars
build_edit_screen
end
@changed = (@edit[:new] != @edit[:current])
session[:changed] = @changed
@lock_tree = true
replace_right_cell
end
end
# AJAX driven routine to check for changes in ANY field on the form
def form_field_changed
return unless load_edit("report_edit__#{params[:id]}", "replace_cell__explorer")
get_form_vars
build_edit_screen
@changed = (@edit[:new] != @edit[:current])
render :update do |page|
page << javascript_prologue
page.replace("flash_msg_div", :partial => "layouts/flash_msg") unless @refresh_div && @refresh_div != "column_lists"
page.replace(@refresh_div, :partial => @refresh_partial) if @refresh_div
page.replace("chart_sample_div", :partial => "form_chart_sample") if @refresh_div == "chart_div"
page.replace("tl_sample_div", :partial => "form_tl_sample") if @refresh_div == "tl_settings_div"
page.replace_html("calc_#{@calc_div}_div", :text => @calc_val) if @calc_div
page << "miqSparkle(false);"
page << javascript_for_miq_button_visibility_changed(@changed)
if @tl_changed # Reload the screen if the timeline data was changed
page.replace_html("tl_sample_div", :partial => "form_tl_sample") if @tl_field != NOTHING_STRING
elsif @formatting_changed # Reload the screen if the formatting pulldowns need to be reset
page.replace_html("formatting_div", :partial => "form_formatting")
elsif @tl_repaint
# page << "tl.paint();"
page << javascript_hide("notification")
end
end
end
def filter_change
return unless load_edit("report_edit__#{params[:id]}", "replace_cell__explorer")
@expkey = $&.to_sym if params[:button].to_s =~ /^(record|display)_filter$/
render :update do |page|
page << javascript_prologue
page.replace("filter_div", :partial => "form_filter")
page << "miqSparkle(false);"
end
end
private
def build_edit_screen
build_tabs
get_time_profiles # Get time profiles list (global and user specific)
case @sb[:miq_tab].split("_")[1]
when "1" # Select columns
@edit[:models] ||= reportable_models
# Add the blank choice if no table chosen yet
# @edit[:models].insert(0,["<Choose>", "<Choose>"]) if @edit[:new][:model] == nil && @edit[:models][0][0] != "<Choose>"
if @edit[:new][:model].nil?
if @edit[:models][0][0] != "<Choose>"
@edit[:models].insert(0, ["<Choose>", "<Choose>"])
end
else
if @edit[:models][0][0] == "<Choose>"
@edit[:models].delete_at(0)
end
end
when "8" # Consolidate
# Build group chooser arrays
@pivots1 = @edit[:new][:fields].dup
@pivots2 = @pivots1.dup.delete_if { |g| g[1] == @edit[:new][:pivotby1] }
@pivots3 = @pivots2.dup.delete_if { |g| g[1] == @edit[:new][:pivotby2] }
@pivotby1 = @edit[:new][:pivotby1]
@pivotby2 = @edit[:new][:pivotby2]
@pivotby3 = @edit[:new][:pivotby3]
when "2" # Formatting
# @edit[:calc_xml] = build_calc_combo_xml # Get the combobox XML for any numeric fields
when "3" # Filter
# Build record filter expression
if @edit[:miq_exp] || # Is this stored as an MiqExp object
["new", "copy", "create"].include?(request.parameters["action"]) # or it's a new condition
@edit[:record_filter][:exp_idx] ||= 0 # Start at first exp
new_record_filter = @edit[:new][:record_filter]
@edit[:record_filter][:expression] = copy_hash(new_record_filter) unless new_record_filter.blank?
@expkey = :record_filter
# Initialize the exp array
exp_array(:init, @edit[:record_filter][:expression]) if @edit[:record_filter][:exp_array].nil?
@edit[:record_filter][:exp_table] = exp_build_table(@edit[:record_filter][:expression])
exp_get_prefill_types # Build prefill lists
@edit[:record_filter][:exp_model] = @edit[:new][:model] # Set the model for the expression editor
end
# Build display filter expression
@edit[:display_filter][:exp_idx] ||= 0 # Start at first exp
new_display_filter = @edit[:new][:display_filter]
@edit[:display_filter][:expression] = copy_hash(new_display_filter) unless new_display_filter.blank?
@expkey = :display_filter
# Initialize the exp array
exp_array(:init, @edit[:display_filter][:expression]) if @edit[:display_filter][:exp_array].nil?
@edit[:display_filter][:exp_table] = exp_build_table(@edit[:display_filter][:expression])
cols = @edit[:new][:field_order]
@edit[:display_filter][:exp_available_fields] = MiqReport.display_filter_details(cols, :field)
cols = @edit[:new][:fields]
@edit[:display_filter][:exp_available_tags] = MiqReport.display_filter_details(cols, :tag)
@edit[:display_filter][:exp_model] = "_display_filter_" # Set model for display filter
@expkey = :record_filter # Start with Record Filter showing
if @edit[:new][:perf_interval] && !@edit[:new][:time_profile]
set_time_profile_vars(selected_time_profile_for_pull_down, @edit[:new])
end
when "4" # Summarize
# Build sort chooser arrays(@edit[:new][:fields], :field)
@sortby1 = @edit[:new][:sortby1]
@sortby2 = @edit[:new][:sortby2]
@sort1 = @edit[:new][:field_order].dup
@sort2 = @sort1.dup.delete_if { |s| s[1] == @sortby1.split("__").first }
when "5" # Charts
options = chart_fields_options
if options.empty?
@edit[:new][:chart_column] = nil
else
options[0][1] unless options.detect { |_, v| v == @edit[:new][:chart_column] }
end
when "6" # Timeline
@tl_fields = []
@edit[:new][:fields].each do |field|
if MiqReport.get_col_type(field[1]) == :datetime
@tl_fields.push(field)
end
end
@tl_field = @edit[:new][:tl_field]
if @edit[:new][:tl_position] == "Last"
@position_time = format_timezone(Time.now, "UTC", nil)
else
@position_time = format_timezone(Time.now - 1.year, "UTC", nil)
end
@timeline = true if @tl_field != NOTHING_STRING
build_timeline_units
@tl_last_time_choices = case @edit[:new][:tl_last_unit]
when "Minutes" then Array.new(12) { |t| (t * 5 + 5).to_s }
when "Hours" then Array.new(24) { |t| (t + 1).to_s }
when "Days" then Array.new(31) { |t| (t + 1).to_s }
when "Weeks" then Array.new(4) { |t| (t + 1).to_s }
when "Months" then Array.new(12) { |t| (t + 1).to_s }
when "Years" then Array.new(10) { |t| (t + 1).to_s }
end
if @edit[:new][:tl_last_time].nil? && @edit[:new][:tl_last_unit] != SHOWALL_STRING
@edit[:new][:tl_last_time] = @tl_last_time_choices.first
end
when "7" # Preview
# generate preview report when
end
@in_a_form = true
if ["new", "copy", "create"].include?(request.parameters["action"])
# drop_breadcrumb( {:name=>"Add Report", :url=>"/report/new"} )
@gtl_url = "/new"
else
# drop_breadcrumb( {:name=>"Edit Report", :url=>"/report/edit"} )
@gtl_url = "/edit"
end
end
def reportable_models
MiqReport.reportable_models.collect do |m|
[Dictionary.gettext(m, :type => :model, :notfound => :titleize, :plural => true), m]
end
end
# Create the arrays for the start/end interval pulldowns
def build_perf_interval_arrays(interval)
case interval
when "hourly"
end_array = [
["Today", "0"],
["Yesterday", 1.day.to_s]
]
5.times { |i| end_array.push(["#{i + 2} days ago", (i + 2).days.to_s]) }
4.times { |i| end_array.push(["#{pluralize(i + 1, "week")} ago", (i + 1).weeks.to_s]) }
5.times { |i| end_array.push(["#{pluralize(i + 2, "month")} ago", (i + 1).months.to_s]) }
start_array = []
6.times { |i| start_array.push(["#{pluralize(i + 1, "day")}", (i + 1).days.to_s]) }
4.times { |i| start_array.push(["#{pluralize(i + 1, "week")}", (i + 1).weeks.to_s]) }
5.times { |i| start_array.push(["#{pluralize(i + 2, "month")}", (i + 1).months.to_s]) }
@edit[:new][:perf_end] ||= "0"
@edit[:new][:perf_start] ||= 1.day.to_s
when "daily"
end_array = [
["Yesterday", "0"] # Start with yesterday, since we only allow full 24 hour days in daily trending
]
5.times { |i| end_array.push(["#{i + 2} days ago", (i + 1).days.to_s]) }
3.times { |i| end_array.push(["#{pluralize((i + 1), "week")} ago", ((i + 1).weeks - 1.day).to_s]) }
6.times { |i| end_array.push(["#{pluralize((i + 1), "month")} ago", ((i + 1).months - 1.day).to_s]) }
start_array = []
5.times { |i| start_array.push(["#{pluralize(i + 2, "day")}", (i + 2).days.to_s]) }
3.times { |i| start_array.push(["#{pluralize((i + 1), "week")}", (i + 1).weeks.to_s]) }
11.times { |i| start_array.push(["#{pluralize((i + 1), "month")}", (i + 1).months.to_s]) }
start_array.push(["1 year", 1.year.to_i.to_s]) # For some reason, 1.year is a float, so use to_i to get rid of decimals
@edit[:new][:perf_end] ||= "0"
@edit[:new][:perf_start] ||= 2.days.to_s
end
@edit[:start_array] = start_array
@edit[:end_array] = end_array
end
# This method figures out what to put in each band unit pulldown array
def build_timeline_units
unless @edit[:new][:tl_bands].blank?
split1 = BAND_UNITS.join(" ").split(@edit[:unit2]).first # Split on the second band unit
@units1 = split1.split(" ") # Grab the units before the second band
split2 = BAND_UNITS.join(" ").split(@edit[:unit1]).last # Split on the first band unit
split3 = split2.split(@edit[:unit3]) # Split the rest on the 3rd unit
@units2 = split3.first.split(" ") # Grab the first part for the 2nd unit
split4 = BAND_UNITS.join(" ").split(@edit[:unit2]) # Split on the second band unit
@units3 = split4.last.split(" ") # Grab the last part for the 3rd unit
end
end
# Reset report column fields if model or interval was changed
def reset_report_col_fields
@edit[:new][:fields] = [] # Clear fields array
@edit[:new][:headers] = {} # Clear headers hash
@edit[:new][:pivotby1] = NOTHING_STRING # Clear consolidate group fields
@edit[:new][:pivotby2] = NOTHING_STRING
@edit[:new][:pivotby3] = NOTHING_STRING
@edit[:new][:sortby1] = NOTHING_STRING # Clear sort fields
@edit[:new][:sortby2] = NOTHING_STRING
@edit[:new][:filter_operator] = nil
@edit[:new][:filter_string] = nil
@edit[:new][:categories] = []
@edit[:new][:graph_type] = nil # Clear graph field
@edit[:new][:chart_mode] = nil
@edit[:new][:chart_column] = nil
@edit[:new][:perf_trend_col] = nil
@edit[:new][:perf_trend_db] = nil
@edit[:new][:perf_trend_pct1] = nil
@edit[:new][:perf_trend_pct2] = nil
@edit[:new][:perf_trend_pct3] = nil
@edit[:new][:perf_limit_col] = nil
@edit[:new][:perf_limit_val] = nil
@edit[:new][:record_filter] = nil # Clear record filter
@edit[:new][:display_filter] = nil # Clear display filter
@edit[:miq_exp] = true
end
def build_tabs
req = "edit"
if @edit[:new][:model] == TREND_MODEL
@tabs = [
["#{req}_1", _("Columns")],
["#{req}_3", _("Filter")],
["#{req}_7", _("Preview")]
]
elsif Chargeback.db_is_chargeback?(@edit[:new][:model].to_s)
@tabs = [
["#{req}_1", _("Columns")],
["#{req}_2", _("Formatting")],
["#{req}_3", _("Filter")],
["#{req}_7", _("Preview")]
]
else
@tabs = [
["#{req}_1", _("Columns")],
["#{req}_8", _("Consolidation")],
["#{req}_2", _("Formatting")],
["#{req}_9", _("Styling")],
["#{req}_3", _("Filter")],
["#{req}_4", _("Summary")],
["#{req}_5", _("Charts")],
["#{req}_6", _("Timeline")],
["#{req}_7", _("Preview")]
]
end
tab = @sb[:miq_tab].split("_")[1] # Get the tab number of the active tab
@tabs.insert(0, ["#{req}_#{tab}", ""]) # Set as the active tab in first @tabs element
end
# Get variables from edit form
def get_form_vars
@assigned_filters = []
gfv_report_fields # Global report fields
gfv_move_cols_buttons # Move cols buttons
gfv_model # Model changes
gfv_trend # Trend fields
gfv_performance # Performance fields
gfv_chargeback # Chargeback fields
gfv_charts # Charting fields
gfv_pivots # Consolidation fields
gfv_sort # Summary fields
gfv_timeline # Timeline fields
# Check for key prefixes (params starting with certain keys)
params.each do |key, value|
# See if any headers were sent in
@edit[:new][:headers][key.split("_")[1..-1].join("_")] = value if key.split("_").first == "hdr"
# See if any formats were sent in
if key.split("_").first == "fmt"
key2 = key.gsub("___", ".") # Put period sub table separator back into the key
@edit[:new][:col_formats][key2.split("_")[1..-1].join("_")] = value.blank? ? nil : value.to_sym
@formatting_changed = value.blank?
end
# See if any group calculation checkboxes were sent in
gfv_key_group_calculations(key, value) if key.split("_").first == "calc"
# See if any pivot calculation checkboxes were sent in
gfv_key_pivot_calculations(key, value) if key.split("_").first == "pivotcalc"
# Check for style fields
prefix = key.split("_").first
gfv_key_style(key, value) if prefix && prefix.starts_with?("style")
end
end
# Handle params starting with "calc"
def gfv_key_group_calculations(key, value)
field = @edit[:new][:field_order][key.split("_").last.to_i].last # Get the field name
@edit[:new][:col_options][field_to_col(field)] = {
:grouping => value.split(",").sort.map(&:to_sym).reject { |a| a == :null }
}
end
# Handle params starting with "pivotcalc"
def gfv_key_pivot_calculations(key, value)
field = @edit[:new][:fields][key.split("_").last.to_i].last # Get the field name
@edit[:pivot_cols][field] = []
value.split(',').sort.map(&:to_sym).each do |agg|
@edit[:pivot_cols][field] << agg
# Create new header from original header + aggregate function
@edit[:new][:headers][field + "__#{agg}"] = @edit[:new][:headers][field] + " (#{agg.to_s.titleize})"
end
build_field_order
end
# Handle params starting with "style"
def gfv_key_style(key, value)
parm, f_idx, s_idx = key.split("_") # Get the parm type, field index, and style index
f_idx = f_idx.to_i
s_idx = s_idx.to_i
f = @edit[:new][:field_order][f_idx] # Get the field element
field_sub_type = MiqExpression.get_col_info(f.last)[:format_sub_type]
field_data_type = MiqExpression.get_col_info(f.last)[:data_type]
field_name = f.last.include?(".") ? f.last.split(".").last.tr("-", ".") : f.last.split("-").last
case parm
when "style" # New CSS class chosen
if value.blank?
@edit[:new][:col_options][field_name][:style].delete_at(s_idx)
@edit[:new][:col_options][field_name].delete(:style) if @edit[:new][:col_options][field_name][:style].empty?
@edit[:new][:col_options].delete(field_name) if @edit[:new][:col_options][field_name].empty?
else
@edit[:new][:col_options][field_name] ||= {}
@edit[:new][:col_options][field_name][:style] ||= []
@edit[:new][:col_options][field_name][:style][s_idx] ||= {}
@edit[:new][:col_options][field_name][:style][s_idx][:class] = value.to_sym
ovs = case field_data_type
when :boolean
["DEFAULT", "true"]
when :integer, :float
["DEFAULT", "", MiqExpression::FORMAT_SUB_TYPES.fetch_path(field_sub_type, :units) ? MiqExpression::FORMAT_SUB_TYPES.fetch_path(field_sub_type, :units).first : nil]
else
["DEFAULT", ""]
end
op ||= ovs[0]
val ||= ovs[1]
suffix ||= ovs[2]
@edit[:new][:col_options][field_name][:style][s_idx][:operator] ||= op
@edit[:new][:col_options][field_name][:style][s_idx][:value] ||= val
@edit[:new][:col_options][field_name][:style][s_idx][:value_suffix] ||= suffix if suffix
end
@refresh_div = "styling_div"
@refresh_partial = "form_styling"
when "styleop" # New operator chosen
@edit[:new][:col_options][field_name][:style][s_idx][:operator] = value
if value == "DEFAULT"
@edit[:new][:col_options][field_name][:style][s_idx].delete(:value) # Remove value key
# Remove all style array elements after this one
((s_idx + 1)...@edit[:new][:col_options][field_name][:style].length).each_with_index do |_i, i_idx|
@edit[:new][:col_options][field_name][:style].delete_at(i_idx)
end
elsif value.include?("NIL") || value.include?("EMPTY")
@edit[:new][:col_options][field_name][:style][s_idx].delete(:value) # Remove value key
elsif [:datetime, :date].include?(field_data_type)
@edit[:new][:col_options][field_name][:style][s_idx][:value] = EXP_TODAY # Set default date value
elsif [:boolean].include?(field_data_type)
@edit[:new][:col_options][field_name][:style][s_idx][:value] = true # Set default boolean value
else
@edit[:new][:col_options][field_name][:style][s_idx][:value] = "" # Set default value
end
@refresh_div = "styling_div"
@refresh_partial = "form_styling"
when "styleval" # New value chosen
@edit[:new][:col_options][field_name][:style][s_idx][:value] = value
when "stylesuffix" # New suffix chosen
@edit[:new][:col_options][field_name][:style][s_idx][:value_suffix] = value.to_sym
@refresh_div = "styling_div"
@refresh_partial = "form_styling"
end
end
def gfv_report_fields
@edit[:new][:pdf_page_size] = params[:pdf_page_size] if params[:pdf_page_size]
if params[:chosen_queue_timeout]
@edit[:new][:queue_timeout] = params[:chosen_queue_timeout].blank? ? nil : params[:chosen_queue_timeout].to_i
end
@edit[:new][:row_limit] = params[:row_limit].blank? ? "" : params[:row_limit] if params[:row_limit]
@edit[:new][:name] = params[:name] if params[:name]
@edit[:new][:title] = params[:title] if params[:title]
end
def gfv_move_cols_buttons
if params[:button]
move_cols_right if params[:button] == "right"
move_cols_left if params[:button] == "left"
move_cols_up if params[:button] == "up"
move_cols_down if params[:button] == "down"
move_cols_top if params[:button] == "top"
move_cols_bottom if params[:button] == "bottom"
end
end
def gfv_model
if params[:chosen_model] && # Check for db table changed
params[:chosen_model] != @edit[:new][:model]
@edit[:new][:model] = params[:chosen_model]
@edit[:new][:perf_interval] = nil # Clear performance interval setting
@edit[:new][:tz] = nil
if [:performance, :trend].include?(model_report_type(@edit[:new][:model]))
@edit[:new][:perf_interval] ||= "daily" # Default to Daily
@edit[:new][:perf_avgs] ||= "time_interval"
@edit[:new][:tz] = session[:user_tz]
build_perf_interval_arrays(@edit[:new][:perf_interval]) # Build the start and end arrays for the performance interval chooser
end
if Chargeback.db_is_chargeback?(@edit[:new][:model])
@edit[:new][:cb_model] = Chargeback.report_cb_model(@edit[:new][:model])
@edit[:new][:cb_interval] ||= "daily" # Default to Daily
@edit[:new][:cb_interval_size] ||= 1
@edit[:new][:cb_end_interval_offset] ||= 1
@edit[:new][:cb_groupby] ||= "date" # Default to Date grouping
@edit[:new][:tz] = session[:user_tz]
end
reset_report_col_fields
build_edit_screen
@refresh_div = "form_div"
@refresh_partial = "form"
end
end
def gfv_trend
if params[:chosen_trend_col]
@edit[:new][:perf_interval] ||= "daily" # Default to Daily
@edit[:new][:perf_target_pct1] ||= 100 # Default to 100%
if params[:chosen_trend_col] == "<Choose>"
@edit[:new][:perf_trend_db] = nil
@edit[:new][:perf_trend_col] = nil
else
@edit[:new][:perf_trend_db], @edit[:new][:perf_trend_col] = params[:chosen_trend_col].split("-")
if MiqExpression.reporting_available_fields(@edit[:new][:model], @edit[:new][:perf_interval]).find { |af| af.last == params[:chosen_trend_col] }.first.include?("(%)")
@edit[:new][:perf_limit_val] = 100
@edit[:new][:perf_limit_col] = nil
@edit[:percent_col] = true
else
@edit[:percent_col] = false
@edit[:new][:perf_limit_val] = nil
end
build_perf_interval_arrays(@edit[:new][:perf_interval]) # Build the start and end arrays for the performance interval chooser
@edit[:limit_cols] = VimPerformanceTrend.trend_limit_cols(@edit[:new][:perf_trend_db], @edit[:new][:perf_trend_col], @edit[:new][:perf_interval])
end
@refresh_div = "columns_div"
@refresh_partial = "form_columns"
# build_perf_interval_arrays(@edit[:new][:perf_interval]) # Build the start and end arrays for the performance interval chooser
# @edit[:limit_cols] = VimPerformanceTrend.trend_limit_cols(@edit[:new][:perf_trend_db], @edit[:new][:perf_trend_col], @edit[:new][:perf_interval])
elsif params[:chosen_limit_col]
if params[:chosen_limit_col] == "<None>"
@edit[:new][:perf_limit_col] = nil
else
@edit[:new][:perf_limit_col] = params[:chosen_limit_col]
@edit[:new][:perf_limit_val] = nil
end
@refresh_div = "columns_div"
@refresh_partial = "form_columns"
elsif params[:chosen_limit_val]
@edit[:new][:perf_limit_val] = params[:chosen_limit_val]
elsif params[:percent1]
@edit[:new][:perf_target_pct1] = params[:percent1].to_i
elsif params[:percent2]
@edit[:new][:perf_target_pct2] = params[:percent2] == "<None>" ? nil : params[:percent2].to_i
elsif params[:percent3]
@edit[:new][:perf_target_pct3] = params[:percent3] == "<None>" ? nil : params[:percent3].to_i
end
end
def gfv_performance
if params[:chosen_interval]
@edit[:new][:perf_interval] = params[:chosen_interval]
@edit[:new][:perf_start] = nil # Clear start/end offsets
@edit[:new][:perf_end] = nil
build_perf_interval_arrays(@edit[:new][:perf_interval]) # Build the start and end arrays for the performance interval chooser
reset_report_col_fields
@refresh_div = "form_div"
@refresh_partial = "form"
elsif params[:perf_avgs]
@edit[:new][:perf_avgs] = params[:perf_avgs]
elsif params[:chosen_start]
@edit[:new][:perf_start] = params[:chosen_start]
elsif params[:chosen_end]
@edit[:new][:perf_end] = params[:chosen_end]
elsif params[:chosen_tz]
@edit[:new][:tz] = params[:chosen_tz]
elsif params.key?(:chosen_time_profile)
@edit[:new][:time_profile] = params[:chosen_time_profile].blank? ? nil : params[:chosen_time_profile].to_i
@refresh_div = "filter_div"
@refresh_partial = "form_filter"
end
end
def gfv_chargeback
# Chargeback options
if params.key?(:cb_show_typ)
@edit[:new][:cb_show_typ] = params[:cb_show_typ].blank? ? nil : params[:cb_show_typ]
@refresh_div = "filter_div"
@refresh_partial = "form_filter"
elsif params.key?(:cb_tag_cat)
@refresh_div = "filter_div"
@refresh_partial = "form_filter"
if params[:cb_tag_cat].blank?
@edit[:new][:cb_tag_cat] = nil
@edit[:new][:cb_tag_value] = nil
else
@edit[:new][:cb_tag_cat] = params[:cb_tag_cat]
@edit[:cb_tags] = {}
Classification.find_by_name(params[:cb_tag_cat]).entries.each { |e| @edit[:cb_tags][e.name] = e.description }
end
elsif params.key?(:cb_owner_id)
@edit[:new][:cb_owner_id] = params[:cb_owner_id].blank? ? nil : params[:cb_owner_id]
elsif params.key?(:cb_tenant_id)
@edit[:new][:cb_tenant_id] = params[:cb_tenant_id].blank? ? nil : params[:cb_tenant_id].to_i
elsif params.key?(:cb_tag_value)
@edit[:new][:cb_tag_value] = params[:cb_tag_value].blank? ? nil : params[:cb_tag_value]
elsif params.key?(:cb_entity_id)
@edit[:new][:cb_entity_id] = params[:cb_entity_id].blank? ? nil : params[:cb_entity_id]
elsif params.key?(:cb_provider_id)
@edit[:new][:cb_provider_id] = params[:cb_provider_id].blank? ? nil : params[:cb_provider_id]
@edit[:new][:cb_entity_id] = "all"
build_edit_screen
@refresh_div = "form_div"
@refresh_partial = "form"
elsif params.key?(:cb_groupby)
@edit[:new][:cb_groupby] = params[:cb_groupby]
elsif params[:cb_interval]
@edit[:new][:cb_interval] = params[:cb_interval]
@edit[:new][:cb_interval_size] = 1
@edit[:new][:cb_end_interval_offset] = 1
@refresh_div = "filter_div"
@refresh_partial = "form_filter"
elsif params[:cb_interval_size]
@edit[:new][:cb_interval_size] = params[:cb_interval_size].to_i
elsif params[:cb_end_interval_offset]
@edit[:new][:cb_end_interval_offset] = params[:cb_end_interval_offset].to_i
end
end
def gfv_charts
if params[:chosen_graph] && params[:chosen_graph] != @edit[:new][:graph_type]
if params[:chosen_graph] == "<No chart>"
@edit[:new][:graph_type] = nil
# Reset other setting to initial settings if choosing <No chart>
@edit[:new][:graph_count] = @edit[:current][:graph_count]
@edit[:new][:graph_other] = @edit[:current][:graph_other]
@edit[:new][:chart_mode] = @edit[:current][:chart_mode]
@edit[:new][:chart_column] = @edit[:current][:chart_column]
else
@edit[:new][:graph_other] = true if @edit[:new][:graph_type].nil? # Reset other setting if choosing first chart
@edit[:new][:graph_type] = params[:chosen_graph] # Save graph type
@edit[:new][:graph_count] ||= GRAPH_MAX_COUNT # Reset graph count, if not set
@edit[:new][:chart_mode] ||= 'counts'
@edit[:new][:chart_column] ||= ''
end
@refresh_div = "chart_div"
@refresh_partial = "form_chart"
end
if params[:chart_mode] && params[:chart_mode] != @edit[:new][:chart_mode]
@edit[:new][:chart_mode] = params[:chart_mode]
@refresh_div = "chart_div"
@refresh_partial = "form_chart"
end
if params[:chart_column] && params[:chart_column] != @edit[:new][:chart_column]
@edit[:new][:chart_column] = params[:chart_column]
@refresh_div = "chart_sample_div"
@refresh_partial = "form_chart_sample"
end
if params[:chosen_count] && params[:chosen_count] != @edit[:new][:graph_count]
@edit[:new][:graph_count] = params[:chosen_count]
@refresh_div = "chart_sample_div"
@refresh_partial = "form_chart_sample"
end
if params[:chosen_other] # If a chart is showing, set the other setting based on check box present
chosen = (params[:chosen_other].to_s == "1")
if @edit[:new][:graph_other] != chosen
@edit[:new][:graph_other] = chosen
@refresh_div = "chart_sample_div"
@refresh_partial = "form_chart_sample"
end
end
end
def gfv_pivots
if params[:chosen_pivot1] && params[:chosen_pivot1] != @edit[:new][:pivotby1]
@edit[:new][:pivotby1] = params[:chosen_pivot1]
if params[:chosen_pivot1] == NOTHING_STRING
@edit[:new][:pivotby2] = NOTHING_STRING
@edit[:new][:pivotby3] = NOTHING_STRING
elsif params[:chosen_pivot1] == @edit[:new][:pivotby2]
@edit[:new][:pivotby2] = @edit[:new][:pivotby3]
@edit[:new][:pivotby3] = NOTHING_STRING
elsif params[:chosen_pivot1] == @edit[:new][:pivotby3]
@edit[:new][:pivotby3] = NOTHING_STRING
end
elsif params[:chosen_pivot2] && params[:chosen_pivot2] != @edit[:new][:pivotby2]
@edit[:new][:pivotby2] = params[:chosen_pivot2]
if params[:chosen_pivot2] == NOTHING_STRING || params[:chosen_pivot2] == @edit[:new][:pivotby3]
@edit[:new][:pivotby3] = NOTHING_STRING
end
elsif params[:chosen_pivot3] && params[:chosen_pivot3] != @edit[:new][:pivotby3]
@edit[:new][:pivotby3] = params[:chosen_pivot3]
end
if params[:chosen_pivot1] || params[:chosen_pivot2] || params[:chosen_pivot3]
if @edit[:new][:pivotby1] == NOTHING_STRING
@edit[:pivot_cols] = {} # Clear pivot_cols if no pivot grouping fields selected
else
@edit[:pivot_cols].delete(@edit[:new][:pivotby1]) # Remove any pivot grouping fields from pivot cols
@edit[:pivot_cols].delete(@edit[:new][:pivotby2])
@edit[:pivot_cols].delete(@edit[:new][:pivotby3])
end
build_field_order
@refresh_div = "consolidate_div"
@refresh_partial = "form_consolidate"
end
end
def gfv_sort
@edit[:new][:order] = params[:sort_order] if params[:sort_order]
if params[:sort_group] # If grouping changed,
@edit[:new][:group] = params[:sort_group]
@refresh_div = "sort_div" # Resend the sort tab
@refresh_partial = "form_sort"
if @edit[:new][:chart_mode] = 'values' && !chart_mode_values_allowed?
@edit[:new][:chart_mode] = 'counts'
end
end
@edit[:new][:hide_details] = (params[:hide_details].to_s == "1") if params[:hide_details]
if params[:chosen_sort1] && params[:chosen_sort1] != @edit[:new][:sortby1].split("__").first
# Remove any col options for any existing sort + suffix
@edit[:new][:col_options].delete(@edit[:new][:sortby1].split("-").last) if @edit[:new][:sortby1].split("__")[1]
@edit[:new][:sortby1] = params[:chosen_sort1]
@edit[:new][:sortby2] = NOTHING_STRING if params[:chosen_sort1] == NOTHING_STRING || params[:chosen_sort1] == @edit[:new][:sortby2].split("__").first
@refresh_div = "sort_div"
@refresh_partial = "form_sort"
elsif params[:chosen_sort2] && params[:chosen_sort2] != @edit[:new][:sortby2].split("__").first
@edit[:new][:sortby2] = params[:chosen_sort2]
# Look at the 1st sort suffix (ie. month, day_of_week, etc)
elsif params[:sort1_suffix] && params[:sort1_suffix].to_s != @edit[:new][:sortby1].split("__")[1].to_s
# Remove any col options for any existing sort + suffix
@edit[:new][:col_options].delete(@edit[:new][:sortby1].split("-").last) if @edit[:new][:sortby1].split("__")[1]
@edit[:new][:sortby1] = @edit[:new][:sortby1].split("__").first +
(params[:sort1_suffix].blank? ? "" : "__#{params[:sort1_suffix]}")
# Look at the 2nd sort suffix (ie. month, day_of_week, etc)
elsif params[:sort2_suffix] && params[:sort2_suffix].to_s != @edit[:new][:sortby2].split("__")[1].to_s
# Remove any col options for any existing sort + suffix
@edit[:new][:col_options].delete(@edit[:new][:sortby2].split("-").last) if @edit[:new][:sortby2].split("__")[1]
@edit[:new][:sortby2] = @edit[:new][:sortby2].split("__").first + "__" + params[:sort2_suffix]
@edit[:new][:sortby2] = @edit[:new][:sortby2].split("__").first +
(params[:sort2_suffix].blank? ? "" : "__#{params[:sort2_suffix]}")
# Look at the break format
else
co_key1 = @edit[:new][:sortby1].split("-").last
if params[:break_format] &&
params[:break_format].to_s != @edit[:new].fetch_path(:col_options, co_key1)
if params[:break_format].blank? || # Remove format and col key (if empty)
params[:break_format].to_sym == MiqReport.get_col_info(@edit[:new][:sortby1])[:default_format]
if @edit[:new][:col_options][co_key1]
@edit[:new][:col_options][co_key1].delete(:break_format)
@edit[:new][:col_options].delete(co_key1) if @edit[:new][:col_options][co_key1].empty?
end
else # Add col and format to col_options
@edit[:new][:col_options][co_key1] ||= {}
@edit[:new][:col_options][co_key1][:break_format] = params[:break_format].to_sym
end
end
end
# Clear/set up the default break label
sort1 = @edit[:new][:sortby1].split("-").last unless @edit[:new][:sortby1].blank?
if @edit[:new][:group] == "No" # Clear any existing break label
if @edit[:new].fetch_path(:col_options, sort1, :break_label)
@edit[:new][:col_options][sort1].delete(:break_label)
@edit[:new][:col_options].delete(sort1) if @edit[:new][:col_options][sort1].empty?
end
else # Create a break label, if none there already
unless @edit[:new].fetch_path(:col_options, sort1, :break_label)
@edit[:new][:col_options][sort1] ||= {}
sort, suffix = @edit[:new][:sortby1].split("__")
@edit[:new][:col_options][sort1][:break_label] =
@edit[:new][:field_order].collect { |f| f.first if f.last == sort }.compact.join.strip +
(suffix ? " (#{MiqReport.date_time_break_suffixes.collect { |s| s.first if s.last == suffix }.compact.join})" : "") +
": "
end
end
# TODO: Not allowing user to change break label until editor is changed to not use form observe
# if params[:break_label]
# @edit[:new][:col_options][@edit[:new][:sortby1].split("-").last] ||= Hash.new
# @edit[:new][:col_options][@edit[:new][:sortby1].split("-").last][:break_label] == params[:break_label]
# end
end
def gfv_timeline
if params[:chosen_tl] && params[:chosen_tl] != @edit[:new][:tl_field]
if @edit[:new][:tl_field] == NOTHING_STRING || params[:chosen_tl] == NOTHING_STRING
@refresh_div = "tl_settings_div"
@refresh_partial = "form_tl_settings"
@tl_changed = true
else
@tl_repaint = true
end
@edit[:new][:tl_field] = params[:chosen_tl]
if params[:chosen_tl] == NOTHING_STRING # If clearing the timeline field
@edit[:new][:tl_bands] = [] # Clear the bands
@edit[:unit1] = NOTHING_STRING
@edit[:unit2] = NOTHING_STRING
@edit[:unit3] = NOTHING_STRING
else
if @edit[:new][:tl_bands].blank? # If the bands are blank
@edit[:unit1] = BAND_UNITS[1]
@edit[:new][:tl_bands] = [ # Create default first band
{:width => 100, :gap => 0.0, :text => true, :unit => BAND_UNITS[1], :pixels => 100}
]
end
end
elsif params[:chosen_position] && params[:chosen_position] != @edit[:new][:tl_position]
@tl_changed = true
@edit[:new][:tl_position] = params[:chosen_position]
elsif params[:chosen_last_unit] && params[:chosen_last_unit] != @edit[:new][:tl_last_unit]
@refresh_div = "tl_settings_div"
@refresh_partial = "form_tl_settings"
@tl_repaint = true
@edit[:new][:tl_last_unit] = params[:chosen_last_unit]
@edit[:new][:tl_last_time] = nil # Clear out the last time numeric choice
elsif params[:chosen_last_time] && params[:chosen_last_time] != @edit[:new][:tl_last_time]
@tl_repaint = true
@edit[:new][:tl_last_time] = params[:chosen_last_time]
elsif params[:chosen_unit1] && params[:chosen_unit1] != @edit[:unit1]
@refresh_div = "tl_settings_div"
@refresh_partial = "form_tl_settings"
@edit[:unit1] = params[:chosen_unit1]
@edit[:new][:tl_bands][0][:unit] = params[:chosen_unit1]
elsif params[:chosen_unit2] && params[:chosen_unit2] != @edit[:unit2]
@refresh_div = "tl_settings_div"
@refresh_partial = "form_tl_settings"
@tl_changed = true
@edit[:unit2] = params[:chosen_unit2]
if @edit[:unit2] == NOTHING_STRING
@edit[:unit3] = NOTHING_STRING # Clear the 3rd band unit value
@edit[:new][:tl_bands] = [@edit[:new][:tl_bands][0]] # Remove the 2nd and 3rd bands
@edit[:new][:tl_bands][0][:width] = 100
elsif @edit[:new][:tl_bands].length < 2
@edit[:new][:tl_bands][0][:width] = 70
@edit[:new][:tl_bands].push(:width => 30, :height => 0.6, :gap => 0.1, :text => false, :unit => params[:chosen_unit2], :pixels => 200)
else
@edit[:new][:tl_bands][1][:unit] = params[:chosen_unit2]
end
elsif params[:chosen_unit3] && params[:chosen_unit3] != @edit[:unit3]
@refresh_div = "tl_settings_div"
@refresh_partial = "form_tl_settings"
@tl_changed = true
@edit[:unit3] = params[:chosen_unit3]
if @edit[:unit3] == NOTHING_STRING
@edit[:new][:tl_bands] = @edit[:new][:tl_bands][0..1] # Remove the 3rd band
@edit[:new][:tl_bands][1][:width] = 30
elsif @edit[:new][:tl_bands].length < 3
@edit[:new][:tl_bands][0][:width] = 70
@edit[:new][:tl_bands][1][:width] = 20
@edit[:new][:tl_bands].push(:width => 10, :height => 0.3, :gap => 0.1, :text => false, :unit => params[:chosen_unit3], :pixels => 200)
else
@edit[:new][:tl_bands][2][:unit] = params[:chosen_unit3]
end
end
end
def move_cols_right
if !params[:available_fields] || params[:available_fields].length == 0 || params[:available_fields][0] == ""
add_flash(_("No fields were selected to move down"), :error)
elsif params[:available_fields].length + @edit[:new][:fields].length > MAX_REPORT_COLUMNS
add_flash(_("Fields not added: Adding the selected %{count} fields will exceed the maximum of %{max} fields") % {:count => params[:available_fields].length + @edit[:new][:fields].length, :max => MAX_REPORT_COLUMNS},
:error)
else
MiqExpression.reporting_available_fields(@edit[:new][:model], @edit[:new][:perf_interval]).each do |af| # Go thru all available columns
if params[:available_fields].include?(af[1]) # See if this column was selected to move
unless @edit[:new][:fields].include?(af) # Only move if it's not there already
@edit[:new][:fields].push(af) # Add it to the new fields list
if af[0].include?(":") # Not a base column
table = af[0].split(" : ")[0].split(".")[-1] # Get the table name
table = table.singularize unless table == "OS" # Singularize, except "OS"
temp = af[0].split(" : ")[1]
temp_header = table == temp.split(" ")[0] ? af[0].split(" : ")[1] : table + " " + af[0].split(" : ")[1]
else
temp_header = af[0].strip # Base column, just use it without leading space
end
@edit[:new][:headers][af[1]] = temp_header # Add the column title to the headers hash
end
end
end
@refresh_div = "column_lists"
@refresh_partial = "column_lists"
build_field_order
end
end
def move_cols_left
if !params[:selected_fields] || params[:selected_fields].length == 0 || params[:selected_fields][0] == ""
add_flash(_("No fields were selected to move up"), :error)
elsif display_filter_contains?(params[:selected_fields])
add_flash(_("No fields were moved up"), :error)
else
@edit[:new][:fields].each do |nf| # Go thru all new fields
if params[:selected_fields].include?(nf.last) # See if this col was selected to move
# Clear out headers and formatting
@edit[:new][:headers].delete(nf.last) # Delete the column name from the headers hash
@edit[:new][:headers].delete_if { |k, _v| k.starts_with?("#{nf.last}__") } # Delete pivot calc keys
@edit[:new][:col_formats].delete(nf.last) # Delete the column name from the col_formats hash
@edit[:new][:col_formats].delete_if { |k, _v| k.starts_with?("#{nf.last}__") } # Delete pivot calc keys
# Clear out pivot field options
if nf.last == @edit[:new][:pivotby1] # Compress the pivotby fields if being moved left
@edit[:new][:pivotby1] = @edit[:new][:pivotby2]
@edit[:new][:pivotby2] = @edit[:new][:pivotby3]
@edit[:new][:pivotby3] = NOTHING_STRING
elsif nf.last == @edit[:new][:pivotby2]
@edit[:new][:pivotby2] = @edit[:new][:pivotby3]
@edit[:new][:pivotby3] = NOTHING_STRING
elsif nf.last == @edit[:new][:pivotby3]
@edit[:new][:pivotby3] = NOTHING_STRING
end
@edit[:pivot_cols].delete(nf.last) # Delete the column name from the pivot_cols hash
# Clear out sort options
if @edit[:new][:sortby1] && nf.last == @edit[:new][:sortby1].split("__").first # If deleting the first sort field
if MiqReport.is_break_suffix?(@edit[:new][:sortby1].split("__")[1]) # If sort has a break suffix
@edit[:new][:col_options].delete(field_to_col(@edit[:new][:sortby1])) # Remove the <col>__<suffix> from col_options
end
unless @edit[:new][:group] == "No" # If we were grouping, remove all col_options :group keys
@edit[:new][:col_options].each do |co_key, co_val|
co_val.delete(:grouping) # Remove :group key
@edit[:new][:col_options].delete(co_key) if co_val.empty? # Remove the col, if empty
end
end
@edit[:new][:sortby1] = NOTHING_STRING
@edit[:new][:sortby2] = NOTHING_STRING
end
if @edit[:new][:sortby1] && nf.last == @edit[:new][:sortby2].split("__").first # If deleting the second sort field
@edit[:new][:sortby2] = NOTHING_STRING
end
# Clear out timeline options
if nf.last == @edit[:new][:tl_field] # If deleting the timeline field
@edit[:new][:tl_field] = NOTHING_STRING
@edit[:unit1] = NOTHING_STRING
@edit[:unit2] = NOTHING_STRING
@edit[:unit3] = NOTHING_STRING
@edit[:new][:tl_bands] = []
end
@edit[:new][:col_options].delete(field_to_col(nf.last)) # Remove this column from the col_options hash
end
end
@edit[:new][:fields].delete_if { |nf| params[:selected_fields].include?(nf.last) } # Remove selected fields
@refresh_div = "column_lists"
@refresh_partial = "column_lists"
build_field_order
end
end
# See if any of the fields passed in are present in the display filter expression
def display_filter_contains?(fields)
return false if @edit[:new][:display_filter].nil? # No display filter defined
exp = @edit[:new][:display_filter].inspect
@edit[:new][:fields].each do |f| # Go thru all of the selected fields
if fields.include?(f.last) # Is this field being removed?
add_flash(_("%{name} is currently being used in the Display Filter") %
{:name => f.first}, :error) if exp.include?(f.last)
end
end
!@flash_array.nil?
end
def move_cols_up
if !params[:selected_fields] || params[:selected_fields].length == 0 || params[:selected_fields][0] == ""
add_flash(_("No fields were selected to move up"), :error)
return
end
consecutive, first_idx, last_idx = selected_consecutive?
if !consecutive
add_flash(_("Select only one or consecutive fields to move up"), :error)
else
if first_idx > 0
@edit[:new][:fields][first_idx..last_idx].reverse_each do |field|
pulled = @edit[:new][:fields].delete(field)
@edit[:new][:fields].insert(first_idx - 1, pulled)
end
end
@refresh_div = "column_lists"
@refresh_partial = "column_lists"
end
@selected = params[:selected_fields]
build_field_order
end
def move_cols_down
if !params[:selected_fields] || params[:selected_fields].length == 0 || params[:selected_fields][0] == ""
add_flash(_("No fields were selected to move down"), :error)
return
end
consecutive, first_idx, last_idx = selected_consecutive?
if !consecutive
add_flash(_("Select only one or consecutive fields to move down"), :error)
else
if last_idx < @edit[:new][:fields].length - 1
insert_idx = last_idx + 1 # Insert before the element after the last one
insert_idx = -1 if last_idx == @edit[:new][:fields].length - 2 # Insert at end if 1 away from end
@edit[:new][:fields][first_idx..last_idx].each do |field|
pulled = @edit[:new][:fields].delete(field)
@edit[:new][:fields].insert(insert_idx, pulled)
end
end
@refresh_div = "column_lists"
@refresh_partial = "column_lists"
end
@selected = params[:selected_fields]
build_field_order
end
def move_cols_top
if !params[:selected_fields] || params[:selected_fields].length == 0 || params[:selected_fields][0] == ""
add_flash(_("No fields were selected to move to the top"), :error)
return
end
consecutive, first_idx, last_idx = selected_consecutive?
if !consecutive
add_flash(_("Select only one or consecutive fields to move to the top"), :error)
else
if first_idx > 0
@edit[:new][:fields][first_idx..last_idx].reverse_each do |field|
pulled = @edit[:new][:fields].delete(field)
@edit[:new][:fields].unshift(pulled)
end
end
@refresh_div = "column_lists"
@refresh_partial = "column_lists"
end
@selected = params[:selected_fields]
build_field_order
end
def move_cols_bottom
if !params[:selected_fields] || params[:selected_fields].length == 0 || params[:selected_fields][0] == ""
add_flash(_("No fields were selected to move to the bottom"), :error)
return
end
consecutive, first_idx, last_idx = selected_consecutive?
if !consecutive
add_flash(_("Select only one or consecutive fields to move to the bottom"), :error)
else
if last_idx < @edit[:new][:fields].length - 1
@edit[:new][:fields][first_idx..last_idx].each do |field|
pulled = @edit[:new][:fields].delete(field)
@edit[:new][:fields].push(pulled)
end
end
@refresh_div = "column_lists"
@refresh_partial = "column_lists"
end
@selected = params[:selected_fields]
build_field_order
end
def selected_consecutive?
first_idx = last_idx = 0
@edit[:new][:fields].each_with_index do |nf, idx|
first_idx = idx if nf[1] == params[:selected_fields].first
if nf[1] == params[:selected_fields].last
last_idx = idx
break
end
end
if last_idx - first_idx + 1 > params[:selected_fields].length
return [false, first_idx, last_idx]
else
return [true, first_idx, last_idx]
end
end
# Set record variables to new values
def set_record_vars(rpt)
# Set the simple string/number fields
rpt.template_type = "report"
rpt.name = @edit[:new][:name].to_s.strip
rpt.title = @edit[:new][:title].to_s.strip
rpt.db = @edit[:new][:model]
rpt.rpt_group = @edit[:new][:rpt_group]
rpt.rpt_type = @edit[:new][:rpt_type]
rpt.priority = @edit[:new][:priority]
rpt.categories = @edit[:new][:categories]
rpt.col_options = @edit[:new][:col_options]
rpt.order = @edit[:new][:sortby1].nil? ? nil : @edit[:new][:order]
# Set the graph fields
if @edit[:new][:sortby1] == NOTHING_STRING || @edit[:new][:graph_type].nil?
rpt.dims = nil
rpt.graph = nil
else
if @edit[:new][:graph_type] =~ /^(Pie|Donut)/ # Pie and Donut charts must be set to 1 dimension
rpt.dims = 1
else
rpt.dims = @edit[:new][:sortby2] == NOTHING_STRING ? 1 : 2 # Set dims to 1 or 2 based on presence of sortby2
end
if @edit[:new][:chart_mode] == 'values' && @edit[:new][:chart_column].blank?
options = chart_fields_options
@edit[:new][:chart_column] = options[0][1] unless options.empty?
end
rpt.graph = {
:type => @edit[:new][:graph_type],
:mode => @edit[:new][:chart_mode],
:column => @edit[:new][:chart_column],
:count => @edit[:new][:graph_count],
:other => @edit[:new][:graph_other],
}
end
# Set the conditions field (expression)
if !@edit[:new][:record_filter].nil? && @edit[:new][:record_filter]["???"].nil?
rpt.conditions = MiqExpression.new(@edit[:new][:record_filter])
else
rpt.conditions = nil
end
# Set the display_filter field (expression)
if !@edit[:new][:display_filter].nil? && @edit[:new][:display_filter]["???"].nil?
rpt.display_filter = MiqExpression.new(@edit[:new][:display_filter])
else
rpt.display_filter = nil
end
# Set the performance options
rpt.db_options = Hash.new
if model_report_type(rpt.db) == :performance
rpt.db_options[:interval] = @edit[:new][:perf_interval]
rpt.db_options[:calc_avgs_by] = @edit[:new][:perf_avgs]
rpt.db_options[:end_offset] = @edit[:new][:perf_end].to_i
rpt.db_options[:start_offset] = @edit[:new][:perf_end].to_i + @edit[:new][:perf_start].to_i
elsif model_report_type(rpt.db) == :trend
rpt.db_options[:rpt_type] = "trend"
rpt.db_options[:interval] = @edit[:new][:perf_interval]
rpt.db_options[:end_offset] = @edit[:new][:perf_end].to_i
rpt.db_options[:start_offset] = @edit[:new][:perf_end].to_i + @edit[:new][:perf_start].to_i
rpt.db_options[:trend_db] = @edit[:new][:perf_trend_db]
rpt.db_options[:trend_col] = @edit[:new][:perf_trend_col]
rpt.db_options[:limit_col] = @edit[:new][:perf_limit_col] if @edit[:new][:perf_limit_col]
rpt.db_options[:limit_val] = @edit[:new][:perf_limit_val] if @edit[:new][:perf_limit_val]
rpt.db_options[:target_pcts] = []
rpt.db_options[:target_pcts].push(@edit[:new][:perf_target_pct1])
rpt.db_options[:target_pcts].push(@edit[:new][:perf_target_pct2]) if @edit[:new][:perf_target_pct2]
rpt.db_options[:target_pcts].push(@edit[:new][:perf_target_pct3]) if @edit[:new][:perf_target_pct3]
elsif Chargeback.db_is_chargeback?(rpt.db)
rpt.db_options[:rpt_type] = @edit[:new][:model]
options = {} # CB options go in db_options[:options] key
options[:interval] = @edit[:new][:cb_interval]
options[:interval_size] = @edit[:new][:cb_interval_size]
options[:end_interval_offset] = @edit[:new][:cb_end_interval_offset]
if @edit[:new][:cb_show_typ] == "owner"
options[:owner] = @edit[:new][:cb_owner_id]
elsif @edit[:new][:cb_show_typ] == "tenant"
options[:tenant_id] = @edit[:new][:cb_tenant_id]
elsif @edit[:new][:cb_show_typ] == "tag"
if @edit[:new][:cb_tag_cat] && @edit[:new][:cb_tag_value]
options[:tag] = "/managed/#{@edit[:new][:cb_tag_cat]}/#{@edit[:new][:cb_tag_value]}"
end
elsif @edit[:new][:cb_show_typ] == "entity"
options[:provider_id] = @edit[:new][:cb_provider_id]
options[:entity_id] = @edit[:new][:cb_entity_id]
end
rpt.db_options[:options] = options
end
rpt.time_profile_id = @edit[:new][:time_profile]
if @edit[:new][:time_profile]
time_profile = TimeProfile.find_by_id(@edit[:new][:time_profile])
rpt.tz = time_profile.tz
end
# Set the timeline field
if @edit[:new][:tl_field] == NOTHING_STRING
rpt.timeline = nil
else
rpt.timeline = Hash.new
rpt.timeline[:field] = @edit[:new][:tl_field]
rpt.timeline[:position] = @edit[:new][:tl_position]
rpt.timeline[:bands] = @edit[:new][:tl_bands]
if @edit[:new][:tl_last_unit] == SHOWALL_STRING
rpt.timeline[:last_unit] = rpt.timeline[:last_time] = nil
else
rpt.timeline[:last_unit] = @edit[:new][:tl_last_unit]
rpt.timeline[:last_time] = @edit[:new][:tl_last_time]
end
end
# Set the line break group field
if @edit[:new][:sortby1] == NOTHING_STRING # If no sort fields
rpt.group = nil # Clear line break group
else # Otherwise, check the setting
case @edit[:new][:group]
when "Yes"
rpt.group = "y"
when "Counts"
rpt.group = "c"
else
rpt.group = nil
end
end
# Set defaults, if not present
rpt.rpt_group ||= "Custom"
rpt.rpt_type ||= "Custom"
rpt.cols = []
rpt.col_order = []
rpt.col_formats = []
rpt.headers = []
rpt.include = Hash.new
rpt.sortby = @edit[:new][:sortby1] == NOTHING_STRING ? nil : [] # Clear sortby if sortby1 not present, else set up array
# Add in the chargeback static fields
if Chargeback.db_is_chargeback?(rpt.db) # For chargeback, add in static fields
rpt.cols = %w(start_date display_range)
name_col = @edit[:new][:model].constantize.report_name_field
rpt.cols += [name_col]
if @edit[:new][:cb_groupby] == "date"
rpt.col_order = ["display_range", name_col]
rpt.sortby = ["start_date", name_col]
elsif @edit[:new][:cb_groupby] == "vm"
rpt.col_order = [name_col, "display_range"]
rpt.sortby = [name_col, "start_date"]
end
rpt.col_order.each do |c|
rpt.headers.push(Dictionary.gettext(c, :type => :column, :notfound => :titleize))
rpt.col_formats.push(nil) # No formatting needed on the static cols
end
rpt.col_options = @edit[:new][:model].constantize.report_col_options
rpt.order = "Ascending"
rpt.group = "y"
rpt.tz = @edit[:new][:tz]
end
# Remove when we support user sorting of trend reports
if rpt.db == TREND_MODEL
rpt.sortby = ["resource_name"]
rpt.order = "Ascending"
end
# Build column related report fields
@pg1 = @pg2 = @pg3 = nil # Init the pivot group cols
@edit[:new][:fields].each do |field_entry| # Go thru all of the fields
field = field_entry[1] # Get the encoded fully qualified field name
if @edit[:new][:pivotby1] != NOTHING_STRING && # If we are doing pivoting and
@edit[:pivot_cols].key?(field) # this is a pivot calc column
@edit[:pivot_cols][field].each do |calc_typ| # Add header/format/col_order for each calc type
rpt.headers.push(@edit[:new][:headers][field + "__#{calc_typ}"])
rpt.col_formats.push(@edit[:new][:col_formats][field + "__#{calc_typ}"])
add_field_to_col_order(rpt, field + "__#{calc_typ}")
end
else # Normal field, set header/format/col_order
rpt.headers.push(@edit[:new][:headers][field])
rpt.col_formats.push(@edit[:new][:col_formats][field])
add_field_to_col_order(rpt, field)
end
end
rpt.rpt_options ||= {}
rpt.rpt_options.delete(:pivot)
unless @pg1.nil? # Build the pivot group_cols array
rpt.rpt_options[:pivot] = {}
rpt.rpt_options[:pivot][:group_cols] = []
rpt.rpt_options[:pivot][:group_cols].push(@pg1)
rpt.rpt_options[:pivot][:group_cols].push(@pg2) unless @pg2.nil?
rpt.rpt_options[:pivot][:group_cols].push(@pg3) unless @pg3.nil?
end
if @edit[:new][:group] != "No" || @edit[:new][:row_limit].blank?
rpt.rpt_options.delete(:row_limit)
else
rpt.rpt_options[:row_limit] = @edit[:new][:row_limit].to_i
end
# Add pdf page size to rpt_options
rpt.rpt_options ||= {}
rpt.rpt_options[:pdf] ||= {}
rpt.rpt_options[:pdf][:page_size] = @edit[:new][:pdf_page_size] || DEFAULT_PDF_PAGE_SIZE
rpt.rpt_options[:queue_timeout] = @edit[:new][:queue_timeout]
# Add hide detail rows option, if grouping
if rpt.group.nil?
rpt.rpt_options.delete(:summary)
else
rpt.rpt_options[:summary] ||= {}
rpt.rpt_options[:summary][:hide_detail_rows] = @edit[:new][:hide_details]
end
user = current_user
rpt.user = user
rpt.miq_group = user.current_group
rpt.add_includes_for_virtual_custom_attributes
end
def add_field_to_col_order(rpt, field)
# Get the sort columns, removing the suffix if it exists
sortby1 = MiqReport.is_break_suffix?(@edit[:new][:sortby1].split("__")[1]) ?
@edit[:new][:sortby1].split("__").first :
@edit[:new][:sortby1]
sortby2 = MiqReport.is_break_suffix?(@edit[:new][:sortby2].split("__")[1]) ?
@edit[:new][:sortby2].split("__").first :
@edit[:new][:sortby2]
if field.include?(".") # Has a period, so it's an include
tables = field.split("-")[0].split(".")[1..-1] # Get the list of tables from before the hyphen
inc_hash = rpt.include # Start at the main hash
tables.each_with_index do |table, idx|
inc_hash[table] ||= {} # Create hash for the table, if it's not there already
if idx == tables.length - 1 # We're at the end of the field name, so add the column
inc_hash[table]["columns"] ||= [] # Create the columns array for this table
f = field.split("-")[1].split("__").first # Grab the field name after the hyphen, before the "__"
inc_hash[table]["columns"].push(f) unless inc_hash[table]["columns"].include?(f) # Add the field to the columns, if not there
table_field = tables.join('.') + "." + field.split("-")[1]
rpt.col_order.push(table_field) # Add the table.field to the col_order array
if field == sortby1 # Is this the first sort field?
rpt.sortby = [table_field] + rpt.sortby # Put the field first in the sortby array
elsif field == @edit[:new][:sortby2] # Is this the second sort field?
rpt.sortby.push(table_field) # Add the field to the sortby array
end
if field == @edit[:new][:pivotby1] # Save the group fields
@pg1 = table_field
elsif field == @edit[:new][:pivotby2]
@pg2 = table_field
elsif field == @edit[:new][:pivotby3]
@pg3 = table_field
end
else # Set up for the next embedded include hash
inc_hash[table]["include"] ||= {} # Create include hash for next level
inc_hash = inc_hash[table]["include"] # Point to the new hash
end
end
else # No period, this is a main table column
if field.include?("__") # Check for pivot calculated field
f = field.split("-")[1].split("__").first # Grab the field name after the hyphen, before the "__"
rpt.cols.push(f) unless rpt.cols.include?(f) # Add the original field, if not already there
else
rpt.cols.push(field.split("-")[1]) # Grab the field name after the hyphen
end
rpt.col_order.push(field.split("-")[1]) # Add the field to the col_order array
if field == sortby1 # Is this the first sort field?
rpt.sortby = [@edit[:new][:sortby1].split("-")[1]] + rpt.sortby # Put the field first in the sortby array
elsif field == sortby2 # Is this the second sort field?
rpt.sortby.push(@edit[:new][:sortby2].split("-")[1]) # Add the field to the sortby array
end
if field == @edit[:new][:pivotby1] # Save the group fields
@pg1 = field.split("-")[1]
elsif field == @edit[:new][:pivotby2]
@pg2 = field.split("-")[1]
elsif field == @edit[:new][:pivotby3]
@pg3 = field.split("-")[1]
end
end
end
# Set form variables for edit
def set_form_vars
@edit = {}
@edit[:rpt_id] = @rpt.id # Save a record id to use it later to look a record
@edit[:rpt_title] = @rpt.title
@edit[:rpt_name] = @rpt.name
@edit[:new] = {}
@edit[:key] = "report_edit__#{@rpt.id || "new"}"
if params[:pressed] == "miq_report_copy"
@edit[:new][:rpt_group] = "Custom"
@edit[:new][:rpt_type] = "Custom"
else
@edit[:new][:rpt_group] = @rpt.rpt_group
@edit[:new][:rpt_type] = @rpt.rpt_type
end
# Get the simple string/number fields
@edit[:new][:name] = @rpt.name
@edit[:new][:title] = @rpt.title
@edit[:new][:model] = @rpt.db
@edit[:new][:priority] = @rpt.priority
@edit[:new][:order] = @rpt.order.blank? ? "Ascending" : @rpt.order
# @edit[:new][:graph] = @rpt.graph
# Replaced above line to handle new graph settings Hash
if @rpt.graph.kind_of?(Hash)
@edit[:new][:graph_type] = @rpt.graph[:type]
@edit[:new][:graph_count] = @rpt.graph[:count]
@edit[:new][:chart_mode] = @rpt.graph[:mode]
@edit[:new][:chart_column] = @rpt.graph[:column]
@edit[:new][:graph_other] = @rpt.graph[:other] ? @rpt.graph[:other] : false
else
@edit[:new][:graph_type] = @rpt.graph
@edit[:new][:graph_count] = GRAPH_MAX_COUNT
@edit[:new][:chart_mode] = 'counts'
@edit[:new][:chart_column] = ''
@edit[:new][:graph_other] = true
end
@edit[:new][:dims] = @rpt.dims
@edit[:new][:categories] = @rpt.categories
@edit[:new][:categories] ||= []
@edit[:new][:col_options] = @rpt.col_options.blank? ? {} : @rpt.col_options
# Initialize options
@edit[:new][:perf_interval] = nil
@edit[:new][:perf_start] = nil
@edit[:new][:perf_end] = nil
@edit[:new][:tz] = nil
@edit[:new][:perf_trend_db] = nil
@edit[:new][:perf_trend_col] = nil
@edit[:new][:perf_limit_col] = nil
@edit[:new][:perf_limit_val] = nil
@edit[:new][:perf_target_pct1] = nil
@edit[:new][:perf_target_pct2] = nil
@edit[:new][:perf_target_pct3] = nil
@edit[:new][:cb_interval] = nil
@edit[:new][:cb_interval_size] = nil
@edit[:new][:cb_end_interval_offset] = nil
if [:performance, :trend].include?(model_report_type(@rpt.db))
@edit[:new][:perf_interval] = @rpt.db_options[:interval]
@edit[:new][:perf_avgs] = @rpt.db_options[:calc_avgs_by]
@edit[:new][:perf_end] = @rpt.db_options[:end_offset].to_s
@edit[:new][:perf_start] = (@rpt.db_options[:start_offset] - @rpt.db_options[:end_offset]).to_s
@edit[:new][:tz] = @rpt.tz ? @rpt.tz : session[:user_tz] # Set the timezone, default to user's
if @rpt.time_profile
@edit[:new][:time_profile] = @rpt.time_profile_id
@edit[:new][:time_profile_tz] = @rpt.time_profile.tz
else
set_time_profile_vars(selected_time_profile_for_pull_down, @edit[:new])
end
@edit[:new][:perf_trend_db] = @rpt.db_options[:trend_db]
@edit[:new][:perf_trend_col] = @rpt.db_options[:trend_col]
@edit[:new][:perf_limit_col] = @rpt.db_options[:limit_col]
@edit[:new][:perf_limit_val] = @rpt.db_options[:limit_val]
@edit[:new][:perf_target_pct1], @edit[:new][:perf_target_pct2], @edit[:new][:perf_target_pct3] = @rpt.db_options[:target_pcts]
elsif Chargeback.db_is_chargeback?(@rpt.db)
@edit[:new][:tz] = @rpt.tz ? @rpt.tz : session[:user_tz] # Set the timezone, default to user's
options = @rpt.db_options[:options]
if options.key?(:owner) # Get the owner options
@edit[:new][:cb_show_typ] = "owner"
@edit[:new][:cb_owner_id] = options[:owner]
elsif options.key?(:tenant_id) # Get the tenant options
@edit[:new][:cb_show_typ] = "tenant"
@edit[:new][:cb_tenant_id] = options[:tenant_id]
elsif options.key?(:tag) # Get the tag options
@edit[:new][:cb_show_typ] = "tag"
@edit[:new][:cb_tag_cat] = options[:tag].split("/")[-2]
@edit[:new][:cb_tag_value] = options[:tag].split("/")[-1]
@edit[:cb_tags] = {}
cat = Classification.find_by_name(@edit[:new][:cb_tag_cat])
cat.entries.each { |e| @edit[:cb_tags][e.name] = e.description } if cat # Collect the tags, if category is valid
elsif options.key?(:entity_id)
@edit[:new][:cb_show_typ] = "entity"
@edit[:new][:cb_entity_id] = options[:entity_id]
@edit[:new][:cb_provider_id] = options[:provider_id]
end
@edit[:new][:cb_model] = Chargeback.report_cb_model(@rpt.db)
@edit[:new][:cb_interval] = options[:interval]
@edit[:new][:cb_interval_size] = options[:interval_size]
@edit[:new][:cb_end_interval_offset] = options[:end_interval_offset]
@edit[:new][:cb_groupby] = @rpt.sortby.nil? || @rpt.sortby.first == "start_date" ? "date" : "vm"
end
# Only show chargeback users choice if an admin
if admin_user?
@edit[:cb_users] = User.all.each_with_object({}) { |u, h| h[u.userid] = u.name }
@edit[:cb_tenant] = Tenant.all.each_with_object({}) { |t, h| h[t.id] = t.name }
else
@edit[:new][:cb_show_typ] = "owner"
@edit[:new][:cb_owner_id] = session[:userid]
@edit[:cb_owner_name] = current_user.name
end
# Get chargeback tags
cats = Classification.categories.collect { |c| c if c.show }.compact # Get categories, sort by name, remove nils
cats.delete_if { |c| c.read_only? || c.entries.length == 0 } # Remove categories that are read only or have no entries
@edit[:cb_cats] = cats.each_with_object({}) { |c, h| h[c.name] = c.description }
@edit[:cb_providers] = {}
@edit[:cb_providers][:container_project] = {}
@edit[:cb_providers][:vm] = {} # Fill this in if entity show type it ever becomes relevent for VMs
@edit[:cb_entities_by_provider_id] = {}
ManageIQ::Providers::ContainerManager.all.each do |provider|
@edit[:cb_providers][:container_project][provider.name] = provider.id
@edit[:cb_entities_by_provider_id][provider.id] = {}
provider.container_projects.all.each do |project|
@edit[:cb_entities_by_provider_id][provider.id][project.id] = project.name
end
end
# Build trend limit cols array
if model_report_type(@rpt.db) == :trend
@edit[:limit_cols] = VimPerformanceTrend.trend_limit_cols(@edit[:new][:perf_trend_db], @edit[:new][:perf_trend_col], @edit[:new][:perf_interval])
end
# Build performance interval select arrays, if needed
if [:performance, :trend].include?(model_report_type(@rpt.db))
build_perf_interval_arrays(@edit[:new][:perf_interval]) # Build the start and end arrays for the performance interval chooser
end
expkey = :record_filter
@edit[expkey] ||= {} # Create hash for this expression, if needed
@edit[expkey][:record_filter] = [] # Store exps in an array
@edit[expkey][:exp_idx] ||= 0
@edit[expkey][:expression] = {"???" => "???"} # Set as new exp element
# Get the conditions MiqExpression
if @rpt.conditions.kind_of?(MiqExpression)
@edit[:new][:record_filter] = @rpt.conditions.exp
@edit[:miq_exp] = true
elsif @rpt.conditions.nil?
@edit[:new][:record_filter] = nil
@edit[:new][:record_filter] = @edit[expkey][:expression] # Copy to new exp
@edit[:miq_exp] = true
end
# Get the display_filter MiqExpression
@edit[:new][:display_filter] = @rpt.display_filter.nil? ? nil : @rpt.display_filter.exp
expkey = :display_filter
@edit[expkey] ||= {} # Create hash for this expression, if needed
@edit[expkey][:expression] = [] # Store exps in an array
@edit[expkey][:exp_idx] ||= 0 # Start at first exp
@edit[expkey][:expression] = {"???" => "???"} # Set as new exp element
# Build display filter expression
@edit[:new][:display_filter] = @edit[expkey][:expression] if @edit[:new][:display_filter].nil? # Copy to new exp
# Get timeline fields
@edit[:tl_last_units] = []
BAND_UNITS[1..-2].each { |u| @edit[:tl_last_units].push u.pluralize }
@edit[:unit1] = NOTHING_STRING # Default units and tl field to nothing
@edit[:unit2] = NOTHING_STRING
@edit[:unit3] = NOTHING_STRING
@edit[:new][:tl_field] = NOTHING_STRING
@edit[:new][:tl_position] = "Last"
@edit[:new][:tl_last_unit] = SHOWALL_STRING
@edit[:new][:tl_last_time] = nil
if @rpt.timeline.kind_of?(Hash) # Timeline has any data
@edit[:new][:tl_field] = @rpt.timeline[:field] unless @rpt.timeline[:field].blank?
@edit[:new][:tl_position] = @rpt.timeline[:position] unless @rpt.timeline[:position].blank?
@edit[:new][:tl_last_unit] = @rpt.timeline[:last_unit] unless @rpt.timeline[:last_unit].blank?
@edit[:new][:tl_last_time] = @rpt.timeline[:last_time] unless @rpt.timeline[:last_time].blank?
@edit[:new][:tl_bands] = @rpt.timeline[:bands]
unless @rpt.timeline[:bands].blank?
@edit[:unit1] = @rpt.timeline[:bands][0][:unit].capitalize
@edit[:unit2] = @rpt.timeline[:bands][1][:unit].capitalize if @rpt.timeline[:bands].length > 1
@edit[:unit3] = @rpt.timeline[:bands][2][:unit].capitalize if @rpt.timeline[:bands].length > 2
end
else
@edit[:new][:tl_bands] = []
end
# Get the pdf page size, if present
if @rpt.rpt_options.kind_of?(Hash) && @rpt.rpt_options[:pdf]
@edit[:new][:pdf_page_size] = @rpt.rpt_options[:pdf][:page_size] || DEFAULT_PDF_PAGE_SIZE
else
@edit[:new][:pdf_page_size] = DEFAULT_PDF_PAGE_SIZE
end
# Get the hide details setting, if present
if @rpt.rpt_options.kind_of?(Hash) && @rpt.rpt_options[:summary]
@edit[:new][:hide_details] = @rpt.rpt_options[:summary][:hide_detail_rows]
else
@edit[:new][:hide_details] = false
end
# Get the timeout if present
if @rpt.rpt_options.kind_of?(Hash) && @rpt.rpt_options[:queue_timeout]
@edit[:new][:queue_timeout] = @rpt.rpt_options[:queue_timeout]
else
@edit[:new][:queue_timeout] = nil
end
case @rpt.group
when "y"
@edit[:new][:group] = "Yes"
when "c"
@edit[:new][:group] = "Counts"
else
@edit[:new][:group] = "No"
@edit[:new][:row_limit] = @rpt.rpt_options[:row_limit].to_s if @rpt.rpt_options
end
# build selected fields array from the report record
@edit[:new][:sortby1] = NOTHING_STRING # Initialize sortby fields to nothing
@edit[:new][:sortby2] = NOTHING_STRING
@edit[:new][:pivotby1] = NOTHING_STRING # Initialize groupby fields to nothing
@edit[:new][:pivotby2] = NOTHING_STRING
@edit[:new][:pivotby3] = NOTHING_STRING
if params[:pressed] == "miq_report_new"
@edit[:new][:fields] = []
@edit[:new][:categories] = []
@edit[:new][:headers] = {}
@edit[:new][:col_formats] = {}
@edit[:pivot_cols] = {}
else
build_selected_fields(@rpt) # Create the field related @edit arrays and hashes
end
# Rebuild the tag descriptions in the new fields array to match the ones in available fields
@edit[:new][:fields].each do |nf|
tag = nf.first.split(':')
if nf.first.include?("Managed :")
entry = MiqExpression.reporting_available_fields(@edit[:new][:model], @edit[:new][:perf_interval]).find { |a| a.last == nf.last }
nf[0] = entry ? entry.first : "#{tag} (Category not found)"
end
end
@edit[:current] = ["copy", "new"].include?(params[:action]) ? {} : copy_hash(@edit[:new])
# For trend reports, check for percent field chosen
if @rpt.db && @rpt.db == TREND_MODEL &&
MiqExpression.reporting_available_fields(@edit[:new][:model], @edit[:new][:perf_interval]).find do|af|
af.last ==
@edit[:new][:perf_trend_db] + "-" + @edit[:new][:perf_trend_col]
end.first.include?("(%)")
@edit[:percent_col] = true
end
end
# Build the :fields array and :headers hash from the rpt record cols and includes hashes
def build_selected_fields(rpt)
fields = []
headers = {}
col_formats = {}
pivot_cols = {}
rpt.col_formats ||= Array.new(rpt.col_order.length) # Create array of nils if col_formats not present (backward compat)
rpt.col_order.each_with_index do |col, idx|
if col.starts_with?(CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX)
field_key = rpt.db + "-" + col
field_value = col.gsub(CustomAttributeMixin::CUSTOM_ATTRIBUTES_PREFIX, "")
elsif !col.include?(".") # Main table field
field_key = rpt.db + "-" + col
field_value = friendly_model_name(rpt.db) +
Dictionary.gettext(rpt.db + "." + col.split("__").first, :type => :column, :notfound => :titleize)
else # Included table field
inc_string = find_includes(col.split("__").first, rpt.include) # Get the full include string
field_key = rpt.db + "." + inc_string.to_s + "-" + col.split(".").last
if inc_string.to_s.ends_with?(".managed") || inc_string.to_s == "managed"
# don't titleize tag name, need it to lookup later to get description by tag name
field_value = friendly_model_name(rpt.db + "." + inc_string.to_s) + col.split(".").last
else
field_value = friendly_model_name(rpt.db + "." + inc_string.to_s) +
Dictionary.gettext(col.split(".").last.split("__").first, :type => :column, :notfound => :titleize)
end
end
if field_key.include?("__") # Check for calculated pivot column
field_key1, calc_typ = field_key.split("__")
pivot_cols[field_key1] ||= []
pivot_cols[field_key1] << calc_typ.to_sym
pivot_cols[field_key1].sort! # Sort the array
fields.push([field_value, field_key1]) unless fields.include?([field_value, field_key1]) # Add original col to fields array
else
fields.push([field_value, field_key]) # Add to fields array
end
# Create the groupby keys if groupby array is present
if rpt.rpt_options &&
rpt.rpt_options[:pivot] &&
rpt.rpt_options[:pivot][:group_cols] &&
rpt.rpt_options[:pivot][:group_cols].kind_of?(Array)
if rpt.rpt_options[:pivot][:group_cols].length > 0
@edit[:new][:pivotby1] = field_key if col == rpt.rpt_options[:pivot][:group_cols][0]
end
if rpt.rpt_options[:pivot][:group_cols].length > 1
@edit[:new][:pivotby2] = field_key if col == rpt.rpt_options[:pivot][:group_cols][1]
end
if rpt.rpt_options[:pivot][:group_cols].length > 2
@edit[:new][:pivotby3] = field_key if col == rpt.rpt_options[:pivot][:group_cols][2]
end
end
# Create the sortby keys if sortby array is present
if rpt.sortby.kind_of?(Array)
if rpt.sortby.length > 0
# If first sortby field as a break suffix, set up sortby1 with a suffix
if MiqReport.is_break_suffix?(rpt.sortby[0].split("__")[1])
sort1, suffix1 = rpt.sortby[0].split("__") # Get sort field and suffix, if present
@edit[:new][:sortby1] = field_key + (suffix1 ? "__#{suffix1}" : "") if col == sort1
else # Not a break suffix sort field, just copy the field name to sortby1
@edit[:new][:sortby1] = field_key if col == rpt.sortby[0]
end
end
if rpt.sortby.length > 1
if MiqReport.is_break_suffix?(rpt.sortby[1].split("__")[1])
sort2, suffix2 = rpt.sortby[1].split("__") # Get sort field and suffix, if present
@edit[:new][:sortby2] = field_key + (suffix2 ? "__#{suffix2}" : "") if col == sort2
else # Not a break suffix sort field, just copy the field name to sortby1
@edit[:new][:sortby2] = field_key if col == rpt.sortby[1]
end
end
end
headers[field_key] = rpt.headers[idx] # Add col to the headers hash
if field_key.include?("__") # if this a pivot calc field?
headers[field_key.split("__").first] = field_value # Save the original field key as well
end
col_formats[field_key] = rpt.col_formats[idx] # Add col to the headers hash
end
# Remove the non-cost and owner columns from the arrays for Chargeback
if Chargeback.db_is_chargeback?(rpt.db)
f_len = fields.length
for f_idx in 1..f_len # Go thru fields in reverse
f_key = fields[f_len - f_idx].last
next if f_key.ends_with?(*CHARGEBACK_ALLOWED_FIELD_SUFFIXES)
headers.delete(f_key)
col_formats.delete(f_key)
fields.delete_at(f_len - f_idx)
end
end
@edit[:new][:fields] = fields
@edit[:new][:headers] = headers
@edit[:new][:col_formats] = col_formats
@edit[:pivot_cols] = pivot_cols
build_field_order
end
# Create the field_order hash from the fields and pivot_cols structures
def build_field_order
@edit[:new][:field_order] = []
@edit[:new][:fields].each do |f|
if @edit[:new][:pivotby1] != NOTHING_STRING && # If we are doing pivoting and
@edit[:pivot_cols].key?(f.last) # this is a pivot calc column
MiqReport::PIVOTS.each do |c|
calc_typ = c.first
@edit[:new][:field_order].push([f.first + " (#{calc_typ.to_s.titleize})", f.last + "__" + calc_typ.to_s]) if @edit[:pivot_cols][f.last].include?(calc_typ)
end
else
@edit[:new][:field_order].push(f)
end
end
end
# Build the full includes string by finding the column in the includes hash
def find_includes(col, includes)
tables = col.split(".")[0..-2]
field = col.split(".").last
table = tables.first
# Does this level include have the table name and does columns have the field name?
if includes[table] && includes[table]["columns"] && includes[table]["columns"].include?(field)
return table # Yes, return the table name
end
if includes[table] && includes[table]["include"]
new_col = [tables[1..-1], field].flatten.join('.')
# recursively search it for the table.col
inc_table = find_includes(new_col, includes[table]["include"])
return table + '.' + inc_table if inc_table
end
# Need to go to the next level
includes.each_pair do |key, inc| # Check each included table
next unless inc["include"] # Does the included table have an include?
inc_table = find_includes(col, inc["include"]) # Yes, recursively search it for the table.col
return nil if inc_table.nil? # If it comes back nil, we never found it
# Otherwise, return the table name + the included string
return key + "." + inc_table
end
nil
end
def setnode_for_customreport
@sb[:rpt_menu].each_with_index do |level1_nodes, i|
if level1_nodes[0] == @sb[:grp_title]
level1_nodes[1].each_with_index do |level2_nodes, k|
# Check for the existence of the Custom folder in the Reports tree and
# check if at least one report exists underneath it
if level2_nodes[0].downcase == "custom" && level2_nodes[1].count > 1
level2_nodes[1].each_with_index do |report|
self.x_node = "xx-#{i}_xx-#{i}-#{k}_rep-#{to_cid(@rpt.id)}" if report == @rpt.name
end
end
end
end
end
end
def valid_report?(rpt)
active_tab = 'edit_1'
if @edit[:new][:model] == TREND_MODEL
unless @edit[:new][:perf_trend_col]
add_flash(_('Trending for is required'), :error)
end
unless @edit[:new][:perf_limit_col] || @edit[:new][:perf_limit_val]
add_flash(_('Trend Target Limit must be configured'), :error)
end
if @edit[:new][:perf_limit_val] && !is_numeric?(@edit[:new][:perf_limit_val])
add_flash(_('Trend Target Limit must be numeric'), :error)
end
elsif @edit[:new][:fields].empty?
add_flash(_('At least one Field must be selected'), :error)
end
if Chargeback.db_is_chargeback?(@edit[:new][:model])
msg = case @edit[:new][:cb_show_typ]
when nil
_('Show Costs by must be selected')
when 'owner'
_('An Owner must be selected') unless @edit[:new][:cb_owner_id]
when 'tenant'
_('A Tenant Category must be selected') unless @edit[:new][:cb_tenant_id]
when 'tag'
if !@edit[:new][:cb_tag_cat]
_('A Tag Category must be selected')
elsif !@edit[:new][:cb_tag_value]
_('A Tag must be selected')
end
when 'entity'
unless @edit[:new][:cb_entity_id]
_("A specific #{ui_lookup(:model => @edit[:new][:cb_model])} or all must be selected")
end
end
if msg
add_flash(msg, :error)
active_tab = 'edit_3'
end
end
# Validate column styles
unless rpt.col_options.blank? || @edit[:new][:field_order].nil?
@edit[:new][:field_order].each do |f| # Go thru all of the cols in order
col = f.last.split('.').last.split('-').last
if val = rpt.col_options[col] # Skip if no options for this col
next unless val.key?(:style) # Skip if no style options
val[:style].each_with_index do |s, s_idx| # Go through all of the configured ifs
if s[:value]
if e = MiqExpression.atom_error(rpt.col_to_expression_col(col.split('__').first), # See if the value is in error
s[:operator],
s[:value])
msg = case s_idx + 1
when 1
add_flash(_("Styling for '%{item}', first value is in error: %{message}") %
{:item => f.first, :message => e.message}, :error)
when 2
add_flash(_("Styling for '%{item}', second value is in error: %{message}") %
{:item => f.first, :message => e.message}, :error)
when 3
add_flash(_("Styling for '%{item}', third value is in error: %{message}") %
{:item => f.first, :message => e.message}, :error)
end
active_tab = 'edit_9'
end
end
end
end
end
end
unless rpt.valid? # Check the model for errors
rpt.errors.each do |field, msg|
add_flash("#{field.to_s.capitalize} #{msg}", :error)
end
end
@sb[:miq_tab] = active_tab if flash_errors?
@flash_array.nil?
end
# Check for valid report configuration in @edit[:new]
# Check if chargeback field is valid
def valid_chargeback_fields
is_valid = false
# There are valid show typ fields
if %w(owner tenant tag entity).include?(@edit[:new][:cb_show_typ])
is_valid = case @edit[:new][:cb_show_typ]
when 'owner' then @edit[:new][:cb_owner_id]
when 'tenant' then @edit[:new][:cb_tenant_id]
when 'tag' then @edit[:new][:cb_tag_cat] && @edit[:new][:cb_tag_value]
when 'entity' then @edit[:new][:cb_entity_id] && @edit[:new][:cb_provider_id]
end
end
is_valid
end
# Check for tab switch error conditions
def check_tabs
@sb[:miq_tab] = params[:tab]
active_tab = 'edit_1'
case @sb[:miq_tab].split('_')[1]
when '8'
if @edit[:new][:fields].empty?
add_flash(_('Consolidation tab is not available until at least 1 field has been selected'), :error)
end
when '2'
if @edit[:new][:fields].empty?
add_flash(_('Formatting tab is not available until at least 1 field has been selected'), :error)
end
when '3'
if @edit[:new][:model] == TREND_MODEL
unless @edit[:new][:perf_trend_col]
add_flash(_('Filter tab is not available until Trending for field has been selected'), :error)
end
unless @edit[:new][:perf_limit_col] || @edit[:new][:perf_limit_val]
add_flash(_('Filter tab is not available until Trending Target Limit has been configured'), :error)
end
if @edit[:new][:perf_limit_val] && !is_numeric?(@edit[:new][:perf_limit_val])
add_flash(_('Trend Target Limit must be numeric'), :error)
end
elsif @edit[:new][:fields].empty?
add_flash(_('Filter tab is not available until at least 1 field has been selected'), :error)
end
when '4'
if @edit[:new][:fields].empty?
add_flash(_('Summary tab is not available until at least 1 field has been selected'), :error)
end
when '5'
if @edit[:new][:fields].empty?
add_flash(_('Charts tab is not available until at least 1 field has been selected'), :error)
elsif @edit[:new][:sortby1].blank? || @edit[:new][:sortby1] == NOTHING_STRING
add_flash(_('Charts tab is not available unless a sort field has been selected'), :error)
active_tab = 'edit_4'
end
when '6'
if @edit[:new][:fields].empty?
add_flash(_('Timeline tab is not available until at least 1 field has been selected'), :error)
else
found = false
@edit[:new][:fields].each do |field|
if MiqReport.get_col_type(field[1]) == :datetime
found = true
break
end
end
unless found
add_flash(_('Timeline tab is not available unless at least 1 time field has been selected'), :error)
end
end
when '7'
if @edit[:new][:model] == TREND_MODEL
unless @edit[:new][:perf_trend_col]
add_flash(_('Preview tab is not available until Trending for field has been selected'), :error)
end
unless @edit[:new][:perf_limit_col] || @edit[:new][:perf_limit_val]
add_flash(_('Preview tab is not available until Trend Target Limit has been configured'), :error)
end
if @edit[:new][:perf_limit_val] && !is_numeric?(@edit[:new][:perf_limit_val])
add_flash(_('Trend Target Limit: Value must be numeric'), :error)
end
elsif @edit[:new][:fields].empty?
add_flash(_('Preview tab is not available until at least 1 field has been selected'), :error)
elsif Chargeback.db_is_chargeback?(@edit[:new][:model]) && !valid_chargeback_fields
add_flash(_('Preview tab is not available until Chargeback Filters has been configured'), :error)
active_tab = 'edit_3'
end
when '9'
if @edit[:new][:fields].empty?
add_flash(_('Styling tab is not available until at least 1 field has been selected'), :error)
end
end
@sb[:miq_tab] = active_tab if flash_errors?
end
end
|
require 'google_places/review'
module GooglePlaces
class Spot
attr_accessor :lat, :lng, :viewport, :name, :icon, :reference, :vicinity, :types, :id, :formatted_phone_number, :international_phone_number, :formatted_address, :address_components, :street_number, :street, :city, :region, :postal_code, :country, :rating, :url, :cid, :website, :reviews, :aspects, :zagat_selected, :zagat_reviewed, :photos, :review_summary, :nextpagetoken, :price_level, :opening_hours, :events, :utc_offset, :place_id, :permanently_closed
# Search for Spots at the provided location
#
# @return [Array<Spot>]
# @param [String,Integer] lat the latitude for the search
# @param [String,Integer] lng the longitude for the search
# @param [String] api_key the provided api key
# @param [Hash] options
# @option options [Integer] :radius (1000)
# Defines the distance (in meters) within which to return Place results.
# The maximum allowed radius is 50,000 meters.
# Note that radius must not be included if :rankby => 'distance' (described below) is specified.
# <b>Note that this is a mandatory parameter</b>
# @option options [String] :rankby
# Specifies the order in which results are listed. Possible values are:
# - prominence (default). This option sorts results based on their importance.
# Ranking will favor prominent places within the specified area.
# Prominence can be affected by a Place's ranking in Google's index,
# the number of check-ins from your application, global popularity, and other factors.
# - distance. This option sorts results in ascending order by their distance from the specified location.
# Ranking results by distance will set a fixed search radius of 50km.
# One or more of keyword, name, or types is required.
# @option options [String,Array] :types
# Restricts the results to Spots matching at least one of the specified types
# @option options [String] :name
# A term to be matched against the names of Places.
# Results will be restricted to those containing the passed name value.
# @option options [String] :keyword
# A term to be matched against all content that Google has indexed for this Spot,
# including but not limited to name, type, and address,
# as well as customer reviews and other third-party content.
# @option options [String] :language
# The language code, indicating in which language the results should be returned, if possible.
# @option options [String,Array<String>] :exclude ([])
# A String or an Array of <b>types</b> to exclude from results
#
# @option options [Hash] :retry_options ({})
# A Hash containing parameters for search retries
# @option options [Object] :retry_options[:status] ([])
# @option options [Integer] :retry_options[:max] (0) the maximum retries
# @option options [Integer] :retry_options[:delay] (5) the delay between each retry in seconds
#
# @see http://spreadsheets.google.com/pub?key=p9pdwsai2hDMsLkXsoM05KQ&gid=1 List of supported languages
# @see https://developers.google.com/maps/documentation/places/supported_types List of supported types
def self.list(lat, lng, api_key, options = {})
location = Location.new(lat, lng)
multipage_request = !!options.delete(:multipage)
rankby = options.delete(:rankby)
radius = options.delete(:radius) || 1000 if rankby.nil? || rankby =~ /prominence/
types = options.delete(:types)
name = options.delete(:name)
keyword = options.delete(:keyword)
language = options.delete(:language)
exclude = options.delete(:exclude) || []
retry_options = options.delete(:retry_options) || {}
zagat_selected = options.delete(:zagat_selected) || false
exclude = [exclude] unless exclude.is_a?(Array)
options = {
:location => location.format,
:radius => radius,
:rankby => rankby,
:key => api_key,
:name => name,
:language => language,
:keyword => keyword,
:retry_options => retry_options
}
options[:zagatselected] = zagat_selected if zagat_selected
# Accept Types as a string or array
if types
types = (types.is_a?(Array) ? types.join('|') : types)
options.merge!(:types => types)
end
request(:spots, multipage_request, exclude, options)
end
# Search for Spots within a give SW|NE bounds with query
#
# @return [Array<Spot>]
# @param [Hash] bounds
# @param [String] api_key the provided api key
# @param [Hash] options
# @option bounds [String, Array] :start_point
# An array that contains the lat/lng pair for the first
# point in the bounds (rectangle)
# @option bounds [:start_point][String, Integer] :lat
# The starting point coordinates latitude value
# @option bounds [:start_point][String, Integer] :lng
# The starting point coordinates longitude value
# @option bounds [String, Array] :end_point
# An array that contains the lat/lng pair for the end
# point in the bounds (rectangle)
# @option bounds [:end_point][String, Integer] :lat
# The end point coordinates latitude value
# @option bounds [:end_point][String, Integer] :lng
# The end point coordinates longitude value
# @option options [String,Array] :query
# Restricts the results to Spots matching term(s) in the specified query
# @option options [String] :language
# The language code, indicating in which language the results should be returned, if possible.
# @option options [String,Array<String>] :exclude ([])
# A String or an Array of <b>types</b> to exclude from results
#
# @option options [Hash] :retry_options ({})
# A Hash containing parameters for search retries
# @option options [Object] :retry_options[:status] ([])
# @option options [Integer] :retry_options[:max] (0) the maximum retries
# @option options [Integer] :retry_options[:delay] (5) the delay between each retry in seconds
#
# @see https://developers.google.com/maps/documentation/places/supported_types List of supported types
def self.list_by_bounds(bounds, api_key, options = {})
start_lat = bounds[:start_point][:lat]
start_lng = bounds[:start_point][:lng]
end_lat = bounds[:end_point][:lat]
end_lng = bounds[:end_point][:lng]
rect = Rectangle.new(start_lat, start_lng, end_lat, end_lng)
multipage_request = !!options.delete(:multipage)
rankby = options.delete(:rankby)
query = options.delete(:query)
name = options.delete(:name)
language = options.delete(:language)
exclude = options.delete(:exclude) || []
retry_options = options.delete(:retry_options) || {}
zagat_selected = options.delete(:zagat_selected) || false
exclude = [exclude] unless exclude.is_a?(Array)
options = {
:bounds => rect.format,
:key => api_key,
:language => language,
:retry_options => retry_options
}
options[:zagatselected] = zagat_selected if zagat_selected
# Accept Types as a string or array
if query
query = (query.is_a?(Array) ? query.join('|') : query)
options.merge!(:query => query)
end
request(:spots_by_bounds, multipage_request, exclude, options)
end
# Search for Spots using Radar Search. Spots will only include reference and lat/lng information. You can send a Place Details request for more information about any of them.
#
# @return [Array<Spot>]
# @param [String,Integer] lat the latitude for the search
# @param [String,Integer] lng the longitude for the search
# @param [String] api_key the provided api key
# @param [Hash] options
# @option options [Integer] :radius (1000)
# Defines the distance (in meters) within which to return Place results.
# The maximum allowed radius is 50,000 meters.
# <b>Note that this is a mandatory parameter</b>
# @option options [String,Array] :types
# Restricts the results to Spots matching at least one of the specified types
# @option options [String] :name
# A term to be matched against the names of Places.
# Results will be restricted to those containing the passed name value.
# @option options [String] :keyword
# A term to be matched against all content that Google has indexed for this Spot,
# including but not limited to name, type, and address,
# as well as customer reviews and other third-party content.
# @option options [Integer] :minprice
# Restricts results to only those places within the specified price range. Valid values range between 0 (most affordable) to 4 (most expensive), inclusive.
# @option options [Integer] :maxprice
# Restricts results to only those places within the specified price range. Valid values range between 0 (most affordable) to 4 (most expensive), inclusive.
# @option options [Boolean] :opennow
# Retricts results to those Places that are open for business at the time the query is sent.
# Places that do not specify opening hours in the Google Places database will not be returned if you include this parameter in your query.
# Setting openNow to false has no effect.
# @option options [Boolean] :zagatselected
# Restrict your search to only those locations that are Zagat selected businesses.
# This parameter does not require a true or false value, simply including the parameter in the request is sufficient to restrict your search.
# The zagatselected parameter is experimental, and only available to Places API enterprise customers.
# @option options [Hash] :retry_options ({})
# A Hash containing parameters for search retries
# @option options [Object] :retry_options[:status] ([])
# @option options [Integer] :retry_options[:max] (0) the maximum retries
# @option options [Integer] :retry_options[:delay] (5) the delay between each retry in seconds
#
# @see https://developers.google.com/places/documentation/search#RadarSearchRequests Radar Search
# @see https://developers.google.com/maps/documentation/places/supported_types List of supported types
def self.list_by_radar(lat, lng, api_key, options = {})
location = Location.new(lat, lng)
multipage_request = !!options.delete(:multipage)
radius = options.delete(:radius) || 1000
types = options.delete(:types)
name = options.delete(:name)
keyword = options.delete(:keyword)
retry_options = options.delete(:retry_options) || {}
zagat_selected = options.delete(:zagat_selected) || false
opennow = options.delete(:opennow) || false
minprice = options.delete(:minprice) || false
maxprice = options.delete(:maxprice) || false
exclude = []
options = {
:location => location.format,
:radius => radius,
:key => api_key,
:name => name,
:keyword => keyword,
:retry_options => retry_options
}
options[:zagatselected] = zagat_selected if zagat_selected
options[:opennow] = opennow if opennow
options[:minprice] = minprice if minprice
options[:maxprice] = maxprice if maxprice
# Accept Types as a string or array
if types
types = (types.is_a?(Array) ? types.join('|') : types)
options.merge!(:types => types)
end
request(:spots_by_radar, multipage_request, exclude, options)
end
# Search for a Spot with a reference key
#
# @return [Spot]
# @param [String] place_id the place_id of the spot
# @param [String] api_key the provided api key
# @param [Hash] options
# @option options [String] :language
# The language code, indicating in which language the results should be returned, if possible.
# @option options [String] :region
# The region code, specified as a ccTLD (country code top-level domain) two-character value. Most ccTLD
# codes are identical to ISO 3166-1 codes, with some exceptions. This parameter will only influence, not
# fully restrict, search results. If more relevant results exist outside of the specified region, they may
# be included. When this parameter is used, the country name is omitted from the resulting formatted_address
# for results in the specified region.
#
# @option options [Hash] :retry_options ({})
# A Hash containing parameters for search retries
# @option options [Object] :retry_options[:status] ([])
# @option options [Integer] :retry_options[:max] (0) the maximum retries
# @option options [Integer] :retry_options[:delay] (5) the delay between each retry in seconds
def self.find(place_id, api_key, options = {})
language = options.delete(:language)
region = options.delete(:region)
retry_options = options.delete(:retry_options) || {}
extensions = options.delete(:review_summary) ? 'review_summary' : nil
response = Request.spot(
:placeid => place_id,
:key => api_key,
:language => language,
:region => region,
:extensions => extensions,
:retry_options => retry_options
)
self.new(response['result'], api_key)
end
# Search for Spots with a pagetoken
#
# @return [Array<Spot>]
# @param [String] pagetoken the token to find next results
# @param [String] api_key the provided api key
# @param [Hash] options
def self.list_by_pagetoken(pagetoken, api_key, options = {})
exclude = options.delete(:exclude) || []
exclude = [exclude] unless exclude.is_a?(Array)
options = {
:pagetoken => pagetoken,
:key => api_key
}
request(:spots_by_pagetoken, false, exclude, options)
end
# Search for Spots with a query
#
# @return [Array<Spot>]
# @param [String] query the query to search for
# @param [String] api_key the provided api key
# @param [Hash] options
# @option options [String,Integer] :lat
# the latitude for the search
# @option options [String,Integer] :lng
# the longitude for the search
# @option options [Integer] :radius (1000)
# Defines the distance (in meters) within which to return Place results.
# The maximum allowed radius is 50,000 meters.
# Note that radius must not be included if :rankby => 'distance' (described below) is specified.
# <b>Note that this is a mandatory parameter</b>
# @option options [String] :rankby
# Specifies the order in which results are listed. Possible values are:
# - prominence (default). This option sorts results based on their importance.
# Ranking will favor prominent places within the specified area.
# Prominence can be affected by a Place's ranking in Google's index,
# the number of check-ins from your application, global popularity, and other factors.
# - distance. This option sorts results in ascending order by their distance from the specified location.
# Ranking results by distance will set a fixed search radius of 50km.
# One or more of keyword, name, or types is required.
# @option options [String,Array] :types
# Restricts the results to Spots matching at least one of the specified types
# @option options [String] :language
# The language code, indicating in which language the results should be returned, if possible.
# @option options [String] :region
# The region code, specified as a ccTLD (country code top-level domain) two-character value. Most ccTLD
# codes are identical to ISO 3166-1 codes, with some exceptions. This parameter will only influence, not
# fully restrict, search results. If more relevant results exist outside of the specified region, they may
# be included. When this parameter is used, the country name is omitted from the resulting formatted_address
# for results in the specified region.
# @option options [String,Array<String>] :exclude ([])
# A String or an Array of <b>types</b> to exclude from results
#
# @option options [Hash] :retry_options ({})
# A Hash containing parameters for search retries
# @option options [Object] :retry_options[:status] ([])
# @option options [Integer] :retry_options[:max] (0) the maximum retries
# @option options [Integer] :retry_options[:delay] (5) the delay between each retry in seconds
#
# @see http://spreadsheets.google.com/pub?key=p9pdwsai2hDMsLkXsoM05KQ&gid=1 List of supported languages
# @see https://developers.google.com/maps/documentation/places/supported_types List of supported types
def self.list_by_query(query, api_key, options = {})
if options.has_key?(:lat) && options.has_key?(:lng)
with_location = true
else
with_location = false
end
if options.has_key?(:radius)
with_radius = true
else
with_radius = false
end
query = query
multipage_request = !!options.delete(:multipage)
location = Location.new(options.delete(:lat), options.delete(:lng)) if with_location
radius = options.delete(:radius) if with_radius
rankby = options.delete(:rankby)
language = options.delete(:language)
region = options.delete(:region)
types = options.delete(:types)
exclude = options.delete(:exclude) || []
retry_options = options.delete(:retry_options) || {}
exclude = [exclude] unless exclude.is_a?(Array)
options = {
:query => query,
:key => api_key,
:rankby => rankby,
:language => language,
:region => region,
:retry_options => retry_options
}
options[:location] = location.format if with_location
options[:radius] = radius if with_radius
# Accept Types as a string or array
if types
types = (types.is_a?(Array) ? types.join('|') : types)
options.merge!(:types => types)
end
request(:spots_by_query, multipage_request, exclude, options)
end
def self.request(method, multipage_request, exclude, options)
results = []
self.multi_pages_request(method, multipage_request, options) do |result|
# Some places returned by Google do not have a 'types' property. If the user specified 'types', then
# this is a non-issue because those places will not be returned. However, if the user did not specify
# 'types', then we do not want to filter out places with a missing 'types' property from the results set.
results << self.new(result, options[:key]) if result['types'].nil? || (result['types'] & exclude) == []
end
results
end
def self.multi_pages_request(method, multipage_request, options)
begin
response = Request.send(method, options)
response['results'].each do |result|
if !multipage_request && !response["next_page_token"].nil? && result == response['results'].last
# add next page token on the last result
result.merge!("nextpagetoken" => response["next_page_token"])
end
yield(result)
end
# request the next page if presence of a "next_page" token
next_page = false
if multipage_request && !response["next_page_token"].nil?
options = {
:pagetoken => response["next_page_token"],
:key => options[:key]
}
# There is a short delay between when a next_page_token is issued, and when it will become valid.
# If requested too early, it will result in InvalidRequestError.
# See: https://developers.google.com/places/documentation/search#PlaceSearchPaging
sleep(2)
next_page = true
end
end while (next_page)
end
# @param [JSON] json_result_object a JSON object to create a Spot from
# @return [Spot] a newly created spot
def initialize(json_result_object, api_key)
@reference = json_result_object['reference']
@place_id = json_result_object['place_id']
@vicinity = json_result_object['vicinity']
@lat = json_result_object['geometry']['location']['lat']
@lng = json_result_object['geometry']['location']['lng']
@viewport = json_result_object['geometry']['viewport']
@name = json_result_object['name']
@icon = json_result_object['icon']
@types = json_result_object['types']
@id = json_result_object['id']
@formatted_phone_number = json_result_object['formatted_phone_number']
@international_phone_number = json_result_object['international_phone_number']
@formatted_address = json_result_object['formatted_address']
@address_components = json_result_object['address_components']
@street_number = address_component(:street_number, 'short_name')
@street = address_component(:route, 'long_name')
@city = address_component(:locality, 'long_name')
@region = address_component(:administrative_area_level_1, 'long_name')
@postal_code = address_component(:postal_code, 'long_name')
@country = address_component(:country, 'long_name')
@rating = json_result_object['rating']
@price_level = json_result_object['price_level']
@opening_hours = json_result_object['opening_hours']
@url = json_result_object['url']
@cid = json_result_object['url'].to_i
@website = json_result_object['website']
@zagat_reviewed = json_result_object['zagat_reviewed']
@zagat_selected = json_result_object['zagat_selected']
@aspects = aspects_component(json_result_object['aspects'])
@review_summary = json_result_object['review_summary']
@photos = photos_component(json_result_object['photos'], api_key)
@reviews = reviews_component(json_result_object['reviews'])
@nextpagetoken = json_result_object['nextpagetoken']
@events = events_component(json_result_object['events'])
@utc_offset = json_result_object['utc_offset']
@permanently_closed = json_result_object['permanently_closed']
end
def [] (key)
send(key)
end
def address_component(address_component_type, address_component_length)
if component = address_components_of_type(address_component_type)
component.first[address_component_length] unless component.first.nil?
end
end
def address_components_of_type(type)
@address_components.select{ |c| c['types'].include?(type.to_s) } unless @address_components.nil?
end
def reviews_component(json_reviews)
if json_reviews
json_reviews.map { |r|
Review.new(
r['rating'],
r['type'],
r['author_name'],
r['author_url'],
r['text'],
r['time'].to_i
)
}
else []
end
end
def aspects_component(json_aspects)
json_aspects.to_a.map{ |r| { :type => r['type'], :rating => r['rating'] } }
end
def photos_component(json_photos, api_key)
if json_photos
json_photos.map{ |p|
Photo.new(
p['width'],
p['height'],
p['photo_reference'],
p['html_attributions'],
api_key
)
}
else []
end
end
def events_component(json_events)
json_events.to_a.map{ |r| {:event_id => r['event_id'], :summary => r['summary'], :url => r['url'], :start_time => r['start_time']} }
end
end
end
don't send empty region parameter
require 'google_places/review'
module GooglePlaces
class Spot
attr_accessor :lat, :lng, :viewport, :name, :icon, :reference, :vicinity, :types, :id, :formatted_phone_number, :international_phone_number, :formatted_address, :address_components, :street_number, :street, :city, :region, :postal_code, :country, :rating, :url, :cid, :website, :reviews, :aspects, :zagat_selected, :zagat_reviewed, :photos, :review_summary, :nextpagetoken, :price_level, :opening_hours, :events, :utc_offset, :place_id, :permanently_closed
# Search for Spots at the provided location
#
# @return [Array<Spot>]
# @param [String,Integer] lat the latitude for the search
# @param [String,Integer] lng the longitude for the search
# @param [String] api_key the provided api key
# @param [Hash] options
# @option options [Integer] :radius (1000)
# Defines the distance (in meters) within which to return Place results.
# The maximum allowed radius is 50,000 meters.
# Note that radius must not be included if :rankby => 'distance' (described below) is specified.
# <b>Note that this is a mandatory parameter</b>
# @option options [String] :rankby
# Specifies the order in which results are listed. Possible values are:
# - prominence (default). This option sorts results based on their importance.
# Ranking will favor prominent places within the specified area.
# Prominence can be affected by a Place's ranking in Google's index,
# the number of check-ins from your application, global popularity, and other factors.
# - distance. This option sorts results in ascending order by their distance from the specified location.
# Ranking results by distance will set a fixed search radius of 50km.
# One or more of keyword, name, or types is required.
# @option options [String,Array] :types
# Restricts the results to Spots matching at least one of the specified types
# @option options [String] :name
# A term to be matched against the names of Places.
# Results will be restricted to those containing the passed name value.
# @option options [String] :keyword
# A term to be matched against all content that Google has indexed for this Spot,
# including but not limited to name, type, and address,
# as well as customer reviews and other third-party content.
# @option options [String] :language
# The language code, indicating in which language the results should be returned, if possible.
# @option options [String,Array<String>] :exclude ([])
# A String or an Array of <b>types</b> to exclude from results
#
# @option options [Hash] :retry_options ({})
# A Hash containing parameters for search retries
# @option options [Object] :retry_options[:status] ([])
# @option options [Integer] :retry_options[:max] (0) the maximum retries
# @option options [Integer] :retry_options[:delay] (5) the delay between each retry in seconds
#
# @see http://spreadsheets.google.com/pub?key=p9pdwsai2hDMsLkXsoM05KQ&gid=1 List of supported languages
# @see https://developers.google.com/maps/documentation/places/supported_types List of supported types
def self.list(lat, lng, api_key, options = {})
location = Location.new(lat, lng)
multipage_request = !!options.delete(:multipage)
rankby = options.delete(:rankby)
radius = options.delete(:radius) || 1000 if rankby.nil? || rankby =~ /prominence/
types = options.delete(:types)
name = options.delete(:name)
keyword = options.delete(:keyword)
language = options.delete(:language)
exclude = options.delete(:exclude) || []
retry_options = options.delete(:retry_options) || {}
zagat_selected = options.delete(:zagat_selected) || false
exclude = [exclude] unless exclude.is_a?(Array)
options = {
:location => location.format,
:radius => radius,
:rankby => rankby,
:key => api_key,
:name => name,
:language => language,
:keyword => keyword,
:retry_options => retry_options
}
options[:zagatselected] = zagat_selected if zagat_selected
# Accept Types as a string or array
if types
types = (types.is_a?(Array) ? types.join('|') : types)
options.merge!(:types => types)
end
request(:spots, multipage_request, exclude, options)
end
# Search for Spots within a give SW|NE bounds with query
#
# @return [Array<Spot>]
# @param [Hash] bounds
# @param [String] api_key the provided api key
# @param [Hash] options
# @option bounds [String, Array] :start_point
# An array that contains the lat/lng pair for the first
# point in the bounds (rectangle)
# @option bounds [:start_point][String, Integer] :lat
# The starting point coordinates latitude value
# @option bounds [:start_point][String, Integer] :lng
# The starting point coordinates longitude value
# @option bounds [String, Array] :end_point
# An array that contains the lat/lng pair for the end
# point in the bounds (rectangle)
# @option bounds [:end_point][String, Integer] :lat
# The end point coordinates latitude value
# @option bounds [:end_point][String, Integer] :lng
# The end point coordinates longitude value
# @option options [String,Array] :query
# Restricts the results to Spots matching term(s) in the specified query
# @option options [String] :language
# The language code, indicating in which language the results should be returned, if possible.
# @option options [String,Array<String>] :exclude ([])
# A String or an Array of <b>types</b> to exclude from results
#
# @option options [Hash] :retry_options ({})
# A Hash containing parameters for search retries
# @option options [Object] :retry_options[:status] ([])
# @option options [Integer] :retry_options[:max] (0) the maximum retries
# @option options [Integer] :retry_options[:delay] (5) the delay between each retry in seconds
#
# @see https://developers.google.com/maps/documentation/places/supported_types List of supported types
def self.list_by_bounds(bounds, api_key, options = {})
start_lat = bounds[:start_point][:lat]
start_lng = bounds[:start_point][:lng]
end_lat = bounds[:end_point][:lat]
end_lng = bounds[:end_point][:lng]
rect = Rectangle.new(start_lat, start_lng, end_lat, end_lng)
multipage_request = !!options.delete(:multipage)
rankby = options.delete(:rankby)
query = options.delete(:query)
name = options.delete(:name)
language = options.delete(:language)
exclude = options.delete(:exclude) || []
retry_options = options.delete(:retry_options) || {}
zagat_selected = options.delete(:zagat_selected) || false
exclude = [exclude] unless exclude.is_a?(Array)
options = {
:bounds => rect.format,
:key => api_key,
:language => language,
:retry_options => retry_options
}
options[:zagatselected] = zagat_selected if zagat_selected
# Accept Types as a string or array
if query
query = (query.is_a?(Array) ? query.join('|') : query)
options.merge!(:query => query)
end
request(:spots_by_bounds, multipage_request, exclude, options)
end
# Search for Spots using Radar Search. Spots will only include reference and lat/lng information. You can send a Place Details request for more information about any of them.
#
# @return [Array<Spot>]
# @param [String,Integer] lat the latitude for the search
# @param [String,Integer] lng the longitude for the search
# @param [String] api_key the provided api key
# @param [Hash] options
# @option options [Integer] :radius (1000)
# Defines the distance (in meters) within which to return Place results.
# The maximum allowed radius is 50,000 meters.
# <b>Note that this is a mandatory parameter</b>
# @option options [String,Array] :types
# Restricts the results to Spots matching at least one of the specified types
# @option options [String] :name
# A term to be matched against the names of Places.
# Results will be restricted to those containing the passed name value.
# @option options [String] :keyword
# A term to be matched against all content that Google has indexed for this Spot,
# including but not limited to name, type, and address,
# as well as customer reviews and other third-party content.
# @option options [Integer] :minprice
# Restricts results to only those places within the specified price range. Valid values range between 0 (most affordable) to 4 (most expensive), inclusive.
# @option options [Integer] :maxprice
# Restricts results to only those places within the specified price range. Valid values range between 0 (most affordable) to 4 (most expensive), inclusive.
# @option options [Boolean] :opennow
# Retricts results to those Places that are open for business at the time the query is sent.
# Places that do not specify opening hours in the Google Places database will not be returned if you include this parameter in your query.
# Setting openNow to false has no effect.
# @option options [Boolean] :zagatselected
# Restrict your search to only those locations that are Zagat selected businesses.
# This parameter does not require a true or false value, simply including the parameter in the request is sufficient to restrict your search.
# The zagatselected parameter is experimental, and only available to Places API enterprise customers.
# @option options [Hash] :retry_options ({})
# A Hash containing parameters for search retries
# @option options [Object] :retry_options[:status] ([])
# @option options [Integer] :retry_options[:max] (0) the maximum retries
# @option options [Integer] :retry_options[:delay] (5) the delay between each retry in seconds
#
# @see https://developers.google.com/places/documentation/search#RadarSearchRequests Radar Search
# @see https://developers.google.com/maps/documentation/places/supported_types List of supported types
def self.list_by_radar(lat, lng, api_key, options = {})
location = Location.new(lat, lng)
multipage_request = !!options.delete(:multipage)
radius = options.delete(:radius) || 1000
types = options.delete(:types)
name = options.delete(:name)
keyword = options.delete(:keyword)
retry_options = options.delete(:retry_options) || {}
zagat_selected = options.delete(:zagat_selected) || false
opennow = options.delete(:opennow) || false
minprice = options.delete(:minprice) || false
maxprice = options.delete(:maxprice) || false
exclude = []
options = {
:location => location.format,
:radius => radius,
:key => api_key,
:name => name,
:keyword => keyword,
:retry_options => retry_options
}
options[:zagatselected] = zagat_selected if zagat_selected
options[:opennow] = opennow if opennow
options[:minprice] = minprice if minprice
options[:maxprice] = maxprice if maxprice
# Accept Types as a string or array
if types
types = (types.is_a?(Array) ? types.join('|') : types)
options.merge!(:types => types)
end
request(:spots_by_radar, multipage_request, exclude, options)
end
# Search for a Spot with a reference key
#
# @return [Spot]
# @param [String] place_id the place_id of the spot
# @param [String] api_key the provided api key
# @param [Hash] options
# @option options [String] :language
# The language code, indicating in which language the results should be returned, if possible.
# @option options [String] :region
# The region code, specified as a ccTLD (country code top-level domain) two-character value. Most ccTLD
# codes are identical to ISO 3166-1 codes, with some exceptions. This parameter will only influence, not
# fully restrict, search results. If more relevant results exist outside of the specified region, they may
# be included. When this parameter is used, the country name is omitted from the resulting formatted_address
# for results in the specified region.
#
# @option options [Hash] :retry_options ({})
# A Hash containing parameters for search retries
# @option options [Object] :retry_options[:status] ([])
# @option options [Integer] :retry_options[:max] (0) the maximum retries
# @option options [Integer] :retry_options[:delay] (5) the delay between each retry in seconds
def self.find(place_id, api_key, options = {})
language = options.delete(:language)
region = options.delete(:region)
retry_options = options.delete(:retry_options) || {}
extensions = options.delete(:review_summary) ? 'review_summary' : nil
request_options = {
:placeid => place_id,
:key => api_key,
:language => language,
:extensions => extensions,
:retry_options => retry_options
}
request_options[:region] = region unless region.nil?
response = Request.spot(request_options)
self.new(response['result'], api_key)
end
# Search for Spots with a pagetoken
#
# @return [Array<Spot>]
# @param [String] pagetoken the token to find next results
# @param [String] api_key the provided api key
# @param [Hash] options
def self.list_by_pagetoken(pagetoken, api_key, options = {})
exclude = options.delete(:exclude) || []
exclude = [exclude] unless exclude.is_a?(Array)
options = {
:pagetoken => pagetoken,
:key => api_key
}
request(:spots_by_pagetoken, false, exclude, options)
end
# Search for Spots with a query
#
# @return [Array<Spot>]
# @param [String] query the query to search for
# @param [String] api_key the provided api key
# @param [Hash] options
# @option options [String,Integer] :lat
# the latitude for the search
# @option options [String,Integer] :lng
# the longitude for the search
# @option options [Integer] :radius (1000)
# Defines the distance (in meters) within which to return Place results.
# The maximum allowed radius is 50,000 meters.
# Note that radius must not be included if :rankby => 'distance' (described below) is specified.
# <b>Note that this is a mandatory parameter</b>
# @option options [String] :rankby
# Specifies the order in which results are listed. Possible values are:
# - prominence (default). This option sorts results based on their importance.
# Ranking will favor prominent places within the specified area.
# Prominence can be affected by a Place's ranking in Google's index,
# the number of check-ins from your application, global popularity, and other factors.
# - distance. This option sorts results in ascending order by their distance from the specified location.
# Ranking results by distance will set a fixed search radius of 50km.
# One or more of keyword, name, or types is required.
# @option options [String,Array] :types
# Restricts the results to Spots matching at least one of the specified types
# @option options [String] :language
# The language code, indicating in which language the results should be returned, if possible.
# @option options [String] :region
# The region code, specified as a ccTLD (country code top-level domain) two-character value. Most ccTLD
# codes are identical to ISO 3166-1 codes, with some exceptions. This parameter will only influence, not
# fully restrict, search results. If more relevant results exist outside of the specified region, they may
# be included. When this parameter is used, the country name is omitted from the resulting formatted_address
# for results in the specified region.
# @option options [String,Array<String>] :exclude ([])
# A String or an Array of <b>types</b> to exclude from results
#
# @option options [Hash] :retry_options ({})
# A Hash containing parameters for search retries
# @option options [Object] :retry_options[:status] ([])
# @option options [Integer] :retry_options[:max] (0) the maximum retries
# @option options [Integer] :retry_options[:delay] (5) the delay between each retry in seconds
#
# @see http://spreadsheets.google.com/pub?key=p9pdwsai2hDMsLkXsoM05KQ&gid=1 List of supported languages
# @see https://developers.google.com/maps/documentation/places/supported_types List of supported types
def self.list_by_query(query, api_key, options = {})
if options.has_key?(:lat) && options.has_key?(:lng)
with_location = true
else
with_location = false
end
if options.has_key?(:radius)
with_radius = true
else
with_radius = false
end
query = query
multipage_request = !!options.delete(:multipage)
location = Location.new(options.delete(:lat), options.delete(:lng)) if with_location
radius = options.delete(:radius) if with_radius
rankby = options.delete(:rankby)
language = options.delete(:language)
region = options.delete(:region)
types = options.delete(:types)
exclude = options.delete(:exclude) || []
retry_options = options.delete(:retry_options) || {}
exclude = [exclude] unless exclude.is_a?(Array)
options = {
:query => query,
:key => api_key,
:rankby => rankby,
:language => language,
:retry_options => retry_options
}
options[:location] = location.format if with_location
options[:radius] = radius if with_radius
options[:region] = region unless region.nil?
# Accept Types as a string or array
if types
types = (types.is_a?(Array) ? types.join('|') : types)
options.merge!(:types => types)
end
request(:spots_by_query, multipage_request, exclude, options)
end
def self.request(method, multipage_request, exclude, options)
results = []
self.multi_pages_request(method, multipage_request, options) do |result|
# Some places returned by Google do not have a 'types' property. If the user specified 'types', then
# this is a non-issue because those places will not be returned. However, if the user did not specify
# 'types', then we do not want to filter out places with a missing 'types' property from the results set.
results << self.new(result, options[:key]) if result['types'].nil? || (result['types'] & exclude) == []
end
results
end
def self.multi_pages_request(method, multipage_request, options)
begin
response = Request.send(method, options)
response['results'].each do |result|
if !multipage_request && !response["next_page_token"].nil? && result == response['results'].last
# add next page token on the last result
result.merge!("nextpagetoken" => response["next_page_token"])
end
yield(result)
end
# request the next page if presence of a "next_page" token
next_page = false
if multipage_request && !response["next_page_token"].nil?
options = {
:pagetoken => response["next_page_token"],
:key => options[:key]
}
# There is a short delay between when a next_page_token is issued, and when it will become valid.
# If requested too early, it will result in InvalidRequestError.
# See: https://developers.google.com/places/documentation/search#PlaceSearchPaging
sleep(2)
next_page = true
end
end while (next_page)
end
# @param [JSON] json_result_object a JSON object to create a Spot from
# @return [Spot] a newly created spot
def initialize(json_result_object, api_key)
@reference = json_result_object['reference']
@place_id = json_result_object['place_id']
@vicinity = json_result_object['vicinity']
@lat = json_result_object['geometry']['location']['lat']
@lng = json_result_object['geometry']['location']['lng']
@viewport = json_result_object['geometry']['viewport']
@name = json_result_object['name']
@icon = json_result_object['icon']
@types = json_result_object['types']
@id = json_result_object['id']
@formatted_phone_number = json_result_object['formatted_phone_number']
@international_phone_number = json_result_object['international_phone_number']
@formatted_address = json_result_object['formatted_address']
@address_components = json_result_object['address_components']
@street_number = address_component(:street_number, 'short_name')
@street = address_component(:route, 'long_name')
@city = address_component(:locality, 'long_name')
@region = address_component(:administrative_area_level_1, 'long_name')
@postal_code = address_component(:postal_code, 'long_name')
@country = address_component(:country, 'long_name')
@rating = json_result_object['rating']
@price_level = json_result_object['price_level']
@opening_hours = json_result_object['opening_hours']
@url = json_result_object['url']
@cid = json_result_object['url'].to_i
@website = json_result_object['website']
@zagat_reviewed = json_result_object['zagat_reviewed']
@zagat_selected = json_result_object['zagat_selected']
@aspects = aspects_component(json_result_object['aspects'])
@review_summary = json_result_object['review_summary']
@photos = photos_component(json_result_object['photos'], api_key)
@reviews = reviews_component(json_result_object['reviews'])
@nextpagetoken = json_result_object['nextpagetoken']
@events = events_component(json_result_object['events'])
@utc_offset = json_result_object['utc_offset']
@permanently_closed = json_result_object['permanently_closed']
end
def [] (key)
send(key)
end
def address_component(address_component_type, address_component_length)
if component = address_components_of_type(address_component_type)
component.first[address_component_length] unless component.first.nil?
end
end
def address_components_of_type(type)
@address_components.select{ |c| c['types'].include?(type.to_s) } unless @address_components.nil?
end
def reviews_component(json_reviews)
if json_reviews
json_reviews.map { |r|
Review.new(
r['rating'],
r['type'],
r['author_name'],
r['author_url'],
r['text'],
r['time'].to_i
)
}
else []
end
end
def aspects_component(json_aspects)
json_aspects.to_a.map{ |r| { :type => r['type'], :rating => r['rating'] } }
end
def photos_component(json_photos, api_key)
if json_photos
json_photos.map{ |p|
Photo.new(
p['width'],
p['height'],
p['photo_reference'],
p['html_attributions'],
api_key
)
}
else []
end
end
def events_component(json_events)
json_events.to_a.map{ |r| {:event_id => r['event_id'], :summary => r['summary'], :url => r['url'], :start_time => r['start_time']} }
end
end
end
|
module Spree
StoreController.class_eval do
before_action :setRansack
private
def setRansack
@searcher = build_searcher(params.merge(include_images: true))
@products = @searcher.retrieve_products
@search = @products.search(params[:q])
end
end
end
Spree product object used directly instead of build search
module Spree
StoreController.class_eval do
before_action :setRansack
private
def setRansack
@search = Spree::Product.search(params[:q])
end
end
end
|
# Author: Hiroshi Ichikawa <http://gimite.net/>
# The license of this source is "New BSD Licence"
require "enumerator"
require "set"
require "net/https"
require "open-uri"
require "cgi"
require "uri"
require "rubygems"
require "hpricot"
require "oauth"
Net::HTTP.version_1_2
module GoogleSpreadsheet
# Authenticates with given +mail+ and +password+, and returns GoogleSpreadsheet::Session
# if succeeds. Raises GoogleSpreadsheet::AuthenticationError if fails.
# Google Apps account is supported.
def self.login(mail, password)
return Session.login(mail, password)
end
# Authenticates with given OAuth token.
#
# For generating oauth_token, you can proceed as follow:
#
# 1) First generate OAuth consumer object with key and secret for your site by registering site with google
# @consumer = OAuth::Consumer.new( "key","secret", {:site=>"https://agree2"})
# 2) Request token with OAuth
# @request_token = @consumer.get_request_token
# session[:request_token] = @request_token
# redirect_to @request_token.authorize_url
# 3) Create an oauth access token
# @oauth_access_token = @request_token.get_access_token
# @access_token = OAuth::AccessToken.new(@consumer, @oauth_access_token.token, @oauth_access_token.secret)
#
# See these documents for details:
#
# - http://oauth.rubyforge.org/
# - http://code.google.com/apis/accounts/docs/OAuth.html
def self.login_with_oauth(oauth_token)
return Session.login_with_oauth(oauth_token)
end
# Restores GoogleSpreadsheet::Session from +path+ and returns it.
# If +path+ doesn't exist or authentication has failed, prompts mail and password on console,
# authenticates with them, stores the session to +path+ and returns it.
#
# This method requires Highline library: http://rubyforge.org/projects/highline/
def self.saved_session(path = ENV["HOME"] + "/.ruby_google_spreadsheet.token")
tokens = {}
if File.exist?(path)
open(path) do |f|
for auth in [:wise, :writely]
line = f.gets()
tokens[auth] = line && line.chomp()
end
end
end
session = Session.new(tokens)
session.on_auth_fail = proc() do
begin
require "highline"
rescue LoadError
raise(LoadError,
"GoogleSpreadsheet.saved_session requires Highline library.\n" +
"Run\n" +
" \$ sudo gem install highline\n" +
"to install it.")
end
highline = HighLine.new()
mail = highline.ask("Mail: ")
password = highline.ask("Password: "){ |q| q.echo = false }
session.login(mail, password)
open(path, "w", 0600) do |f|
f.puts(session.auth_token(:wise))
f.puts(session.auth_token(:writely))
end
true
end
if !session.auth_token
session.on_auth_fail.call()
end
return session
end
module Util #:nodoc:
module_function
def encode_query(params)
return params.map(){ |k, v| CGI.escape(k) + "=" + CGI.escape(v) }.join("&")
end
def h(str)
return CGI.escapeHTML(str.to_s())
end
def as_utf8(str)
if str.respond_to?(:force_encoding)
str.force_encoding("UTF-8")
else
str
end
end
end
# Raised when spreadsheets.google.com has returned error.
class Error < RuntimeError
end
# Raised when GoogleSpreadsheet.login has failed.
class AuthenticationError < GoogleSpreadsheet::Error
end
# Use GoogleSpreadsheet.login or GoogleSpreadsheet.saved_session to get
# GoogleSpreadsheet::Session object.
class Session
include(Util)
extend(Util)
# The same as GoogleSpreadsheet.login.
def self.login(mail, password)
session = Session.new()
session.login(mail, password)
return session
end
# The same as GoogleSpreadsheet.login_with_oauth.
def self.login_with_oauth(oauth_token)
session = Session.new(nil, oauth_token)
end
# Restores session using return value of auth_tokens method of previous session.
def initialize(auth_tokens = nil, oauth_token = nil)
if oauth_token
@oauth_token = oauth_token
else
@auth_tokens = auth_tokens
end
end
# Authenticates with given +mail+ and +password+, and updates current session object
# if succeeds. Raises GoogleSpreadsheet::AuthenticationError if fails.
# Google Apps account is supported.
def login(mail, password)
begin
@auth_tokens = {}
authenticate(mail, password, :wise)
authenticate(mail, password, :writely)
rescue GoogleSpreadsheet::Error => ex
return true if @on_auth_fail && @on_auth_fail.call()
raise(AuthenticationError, "authentication failed for #{mail}: #{ex.message}")
end
end
# Authentication tokens.
attr_reader(:auth_tokens)
# Authentication token.
def auth_token(auth = :wise)
return @auth_tokens[auth]
end
# Proc or Method called when authentication has failed.
# When this function returns +true+, it tries again.
attr_accessor :on_auth_fail
def auth_header(auth) #:nodoc:
if auth == :none
return {}
else
return {"Authorization" => "GoogleLogin auth=#{@auth_tokens[auth]}"}
end
end
# Returns list of spreadsheets for the user as array of GoogleSpreadsheet::Spreadsheet.
# You can specify query parameters described at
# http://code.google.com/apis/spreadsheets/docs/2.0/reference.html#Parameters
#
# e.g.
# session.spreadsheets
# session.spreadsheets("title" => "hoge")
def spreadsheets(params = {})
query = encode_query(params)
doc = request(:get, "https://spreadsheets.google.com/feeds/spreadsheets/private/full?#{query}")
result = []
for entry in doc.search("entry")
title = as_utf8(entry.search("title").text)
url = as_utf8(entry.search(
"link[@rel='http://schemas.google.com/spreadsheets/2006#worksheetsfeed']")[0]["href"])
result.push(Spreadsheet.new(self, url, title))
end
return result
end
# Returns GoogleSpreadsheet::Spreadsheet with given +key+.
#
# e.g.
# # http://spreadsheets.google.com/ccc?key=pz7XtlQC-PYx-jrVMJErTcg&hl=ja
# session.spreadsheet_by_key("pz7XtlQC-PYx-jrVMJErTcg")
def spreadsheet_by_key(key)
url = "https://spreadsheets.google.com/feeds/worksheets/#{key}/private/full"
return Spreadsheet.new(self, url)
end
# Returns GoogleSpreadsheet::Spreadsheet with given +url+. You must specify either of:
# - URL of the page you open to access the spreadsheet in your browser
# - URL of worksheet-based feed of the spreadseet
#
# e.g.
# session.spreadsheet_by_url(
# "http://spreadsheets.google.com/ccc?key=pz7XtlQC-PYx-jrVMJErTcg&hl=en")
# session.spreadsheet_by_url(
# "https://spreadsheets.google.com/feeds/worksheets/pz7XtlQC-PYx-jrVMJErTcg/private/full")
def spreadsheet_by_url(url)
# Tries to parse it as URL of human-readable spreadsheet.
uri = URI.parse(url)
if uri.host == "spreadsheets.google.com" && uri.path =~ /\/ccc$/
if (uri.query || "").split(/&/).find(){ |s| s=~ /^key=(.*)$/ }
return spreadsheet_by_key($1)
end
end
# Assumes the URL is worksheets feed URL.
return Spreadsheet.new(self, url)
end
# Returns GoogleSpreadsheet::Worksheet with given +url+.
# You must specify URL of cell-based feed of the worksheet.
#
# e.g.
# session.worksheet_by_url(
# "http://spreadsheets.google.com/feeds/cells/pz7XtlQC-PYxNmbBVgyiNWg/od6/private/full")
def worksheet_by_url(url)
return Worksheet.new(self, nil, url)
end
# Creates new spreadsheet and returns the new GoogleSpreadsheet::Spreadsheet.
#
# e.g.
# session.create_spreadsheet("My new sheet")
def create_spreadsheet(
title = "Untitled",
feed_url = "https://docs.google.com/feeds/documents/private/full")
xml = <<-"EOS"
<atom:entry xmlns:atom="http://www.w3.org/2005/Atom" xmlns:docs="http://schemas.google.com/docs/2007">
<atom:category scheme="http://schemas.google.com/g/2005#kind"
term="http://schemas.google.com/docs/2007#spreadsheet" label="spreadsheet"/>
<atom:title>#{h(title)}</atom:title>
</atom:entry>
EOS
doc = request(:post, feed_url, :data => xml, :auth => :writely)
ss_url = as_utf8(doc.search(
"link[@rel='http://schemas.google.com/spreadsheets/2006#worksheetsfeed']")[0]["href"])
return Spreadsheet.new(self, ss_url, title)
end
def request(method, url, params = {}) #:nodoc:
# Always uses HTTPS.
uri = URI.parse(url.gsub(%r{^http://}, "https://"))
data = params[:data]
auth = params[:auth] || :wise
if params[:header]
add_header = params[:header]
else
add_header = data ? {"Content-Type" => "application/atom+xml"} : {}
end
response_type = params[:response_type] || :xml
if @oauth_token
if method == :delete || method == :get
response = @oauth_token.__send__(method, url, add_header)
else
response = @oauth_token.__send__(method, url, data, add_header)
end
return convert_response(response, response_type)
else
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
http.start() do
while true
path = uri.path + (uri.query ? "?#{uri.query}" : "")
header = auth_header(auth).merge(add_header)
if method == :delete || method == :get
response = http.__send__(method, path, header)
else
response = http.__send__(method, path, data, header)
end
if response.code == "401" && @on_auth_fail && @on_auth_fail.call()
next
end
if !(response.code =~ /^2/)
raise(
response.code == "401" ? AuthenticationError : GoogleSpreadsheet::Error,
"Response code #{response.code} for #{method} #{url}: " +
CGI.unescapeHTML(response.body))
end
return convert_response(response, response_type)
end
end
end
end
private
def convert_response(response, response_type)
case response_type
when :xml
return Hpricot.XML(response.body)
when :raw
return response.body
else
raise("unknown params[:response_type]: %s" % response_type)
end
end
def authenticate(mail, password, auth)
params = {
"accountType" => "HOSTED_OR_GOOGLE",
"Email" => mail,
"Passwd" => password,
"service" => auth.to_s(),
"source" => "Gimite-RubyGoogleSpreadsheet-1.00",
}
response = request(:post,
"https://www.google.com/accounts/ClientLogin",
:data => encode_query(params), :auth => :none, :header => {}, :response_type => :raw)
@auth_tokens[auth] = response.slice(/^Auth=(.*)$/, 1)
end
end
# Use methods in GoogleSpreadsheet::Session to get GoogleSpreadsheet::Spreadsheet object.
class Spreadsheet
include(Util)
def initialize(session, worksheets_feed_url, title = nil) #:nodoc:
@session = session
@worksheets_feed_url = worksheets_feed_url
@title = title
end
# URL of worksheet-based feed of the spreadsheet.
attr_reader(:worksheets_feed_url)
# Title of the spreadsheet. So far only available if you get this object by
# GoogleSpreadsheet::Session#spreadsheets.
attr_reader(:title)
# Key of the spreadsheet.
def key
if !(@worksheets_feed_url =~
%r{^https?://spreadsheets.google.com/feeds/worksheets/(.*)/private/full$})
raise(GoogleSpreadsheet::Error,
"worksheets feed URL is in unknown format: #{@worksheets_feed_url}")
end
return $1
end
# Tables feed URL of the spreadsheet.
def tables_feed_url
return "https://spreadsheets.google.com/feeds/#{self.key}/tables"
end
# URL of feed used in document list feed API.
def document_feed_url
return "https://docs.google.com/feeds/documents/private/full/spreadsheet%3A#{self.key}"
end
# Creates copy of this spreadsheet with the given name.
def duplicate(new_name = nil)
new_name ||= (@title ? "Copy of " + @title : "Untitled")
get_url = "https://spreadsheets.google.com/feeds/download/spreadsheets/Export?key=#{key}&exportFormat=ods"
ods = @session.request(:get, get_url, :response_type => :raw)
url = "https://docs.google.com/feeds/documents/private/full"
header = {
"Content-Type" => "application/x-vnd.oasis.opendocument.spreadsheet",
"Slug" => URI.encode(new_name),
}
doc = @session.request(:post, url, :data => ods, :auth => :writely, :header => header)
ss_url = as_utf8(doc.search(
"link[@rel='http://schemas.google.com/spreadsheets/2006#worksheetsfeed']")[0]["href"])
return Spreadsheet.new(@session, ss_url, title)
end
# If +permanent+ is +false+, moves the spreadsheet to the trash.
# If +permanent+ is +true+, deletes the spreadsheet permanently.
def delete(permanent = false)
@session.request(:delete,
self.document_feed_url + (permanent ? "?delete=true" : ""),
:auth => :writely, :header => {"If-Match" => "*"})
end
# Renames title of the spreadsheet.
def rename(title)
doc = @session.request(:get, self.document_feed_url)
edit_url = doc.search("link[@rel='edit']")[0]["href"]
xml = <<-"EOS"
<atom:entry
xmlns:atom="http://www.w3.org/2005/Atom"
xmlns:docs="http://schemas.google.com/docs/2007">
<atom:category
scheme="http://schemas.google.com/g/2005#kind"
term="http://schemas.google.com/docs/2007#spreadsheet" label="spreadsheet"/>
<atom:title>#{h(title)}</atom:title>
</atom:entry>
EOS
@session.request(:put, edit_url, :data => xml)
end
# Returns worksheets of the spreadsheet as array of GoogleSpreadsheet::Worksheet.
def worksheets
doc = @session.request(:get, @worksheets_feed_url)
result = []
for entry in doc.search("entry")
title = as_utf8(entry.search("title").text)
url = as_utf8(entry.search(
"link[@rel='http://schemas.google.com/spreadsheets/2006#cellsfeed']")[0]["href"])
result.push(Worksheet.new(@session, self, url, title))
end
return result.freeze()
end
# Adds a new worksheet to the spreadsheet. Returns added GoogleSpreadsheet::Worksheet.
def add_worksheet(title, max_rows = 100, max_cols = 20)
xml = <<-"EOS"
<entry xmlns='http://www.w3.org/2005/Atom'
xmlns:gs='http://schemas.google.com/spreadsheets/2006'>
<title>#{h(title)}</title>
<gs:rowCount>#{h(max_rows)}</gs:rowCount>
<gs:colCount>#{h(max_cols)}</gs:colCount>
</entry>
EOS
doc = @session.request(:post, @worksheets_feed_url, :data => xml)
url = as_utf8(doc.search(
"link[@rel='http://schemas.google.com/spreadsheets/2006#cellsfeed']")[0]["href"])
return Worksheet.new(@session, self, url, title)
end
# Returns list of tables in the spreadsheet.
def tables
doc = @session.request(:get, self.tables_feed_url)
return doc.search("entry").map(){ |e| Table.new(@session, e) }.freeze()
end
end
# Use GoogleSpreadsheet::Worksheet#add_table to create table.
# Use GoogleSpreadsheet::Worksheet#tables to get GoogleSpreadsheet::Table objects.
class Table
include(Util)
def initialize(session, entry) #:nodoc:
@columns = {}
@worksheet_title = as_utf8(entry.search("gs:worksheet")[0]["name"])
@records_url = as_utf8(entry.search("content")[0]["src"])
@session = session
end
# Title of the worksheet the table belongs to.
attr_reader(:worksheet_title)
# Adds a record.
def add_record(values)
fields = ""
values.each do |name, value|
fields += "<gs:field name='#{h(name)}'>#{h(value)}</gs:field>"
end
xml =<<-EOS
<entry
xmlns="http://www.w3.org/2005/Atom"
xmlns:gs="http://schemas.google.com/spreadsheets/2006">
#{fields}
</entry>
EOS
@session.request(:post, @records_url, :data => xml)
end
# Returns records in the table.
def records
doc = @session.request(:get, @records_url)
return doc.search("entry").map(){ |e| Record.new(@session, e) }
end
end
# Use GoogleSpreadsheet::Table#records to get GoogleSpreadsheet::Record objects.
class Record < Hash
def initialize(session, entry) #:nodoc:
@session = session
for field in entry.search("gs:field")
self[as_utf8(field["name"])] = as_utf8(field.inner_text)
end
end
def inspect #:nodoc:
content = self.map(){ |k, v| "%p => %p" % [k, v] }.join(", ")
return "\#<%p:{%s}>" % [self.class, content]
end
end
# Use GoogleSpreadsheet::Spreadsheet#worksheets to get GoogleSpreadsheet::Worksheet object.
class Worksheet
include(Util)
def initialize(session, spreadsheet, cells_feed_url, title = nil) #:nodoc:
@session = session
@spreadsheet = spreadsheet
@cells_feed_url = cells_feed_url
@title = title
@cells = nil
@input_values = nil
@modified = Set.new()
end
# URL of cell-based feed of the worksheet.
attr_reader(:cells_feed_url)
# URL of worksheet feed URL of the worksheet.
def worksheet_feed_url
# I don't know good way to get worksheet feed URL from cells feed URL.
# Probably it would be cleaner to keep worksheet feed URL and get cells feed URL
# from it.
if !(@cells_feed_url =~
%r{^https?://spreadsheets.google.com/feeds/cells/(.*)/(.*)/private/full$})
raise(GoogleSpreadsheet::Error,
"cells feed URL is in unknown format: #{@cells_feed_url}")
end
return "https://spreadsheets.google.com/feeds/worksheets/#{$1}/private/full/#{$2}"
end
# GoogleSpreadsheet::Spreadsheet which this worksheet belongs to.
def spreadsheet
if !@spreadsheet
if !(@cells_feed_url =~
%r{^https?://spreadsheets.google.com/feeds/cells/(.*)/(.*)/private/full$})
raise(GoogleSpreadsheet::Error,
"cells feed URL is in unknown format: #{@cells_feed_url}")
end
@spreadsheet = @session.spreadsheet_by_key($1)
end
return @spreadsheet
end
# Returns content of the cell as String. Top-left cell is [1, 1].
def [](row, col)
return self.cells[[row, col]] || ""
end
# Updates content of the cell.
# Note that update is not sent to the server until you call save().
# Top-left cell is [1, 1].
#
# e.g.
# worksheet[2, 1] = "hoge"
# worksheet[1, 3] = "=A1+B1"
def []=(row, col, value)
reload() if !@cells
@cells[[row, col]] = value
@input_values[[row, col]] = value
@modified.add([row, col])
self.max_rows = row if row > @max_rows
self.max_cols = col if col > @max_cols
end
# Returns the value or the formula of the cell. Top-left cell is [1, 1].
#
# If user input "=A1+B1" to cell [1, 3], worksheet[1, 3] is "3" for example and
# worksheet.input_value(1, 3) is "=RC[-2]+RC[-1]".
def input_value(row, col)
reload() if !@cells
return @input_values[[row, col]] || ""
end
# Row number of the bottom-most non-empty row.
def num_rows
reload() if !@cells
return @cells.keys.map(){ |r, c| r }.max || 0
end
# Column number of the right-most non-empty column.
def num_cols
reload() if !@cells
return @cells.keys.map(){ |r, c| c }.max || 0
end
# Number of rows including empty rows.
def max_rows
reload() if !@cells
return @max_rows
end
# Updates number of rows.
# Note that update is not sent to the server until you call save().
def max_rows=(rows)
reload() if !@cells
@max_rows = rows
@meta_modified = true
end
# Number of columns including empty columns.
def max_cols
reload() if !@cells
return @max_cols
end
# Updates number of columns.
# Note that update is not sent to the server until you call save().
def max_cols=(cols)
reload() if !@cells
@max_cols = cols
@meta_modified = true
end
# Title of the worksheet (shown as tab label in Web interface).
def title
reload() if !@title
return @title
end
# Updates title of the worksheet.
# Note that update is not sent to the server until you call save().
def title=(title)
reload() if !@cells
@title = title
@meta_modified = true
end
def cells #:nodoc:
reload() if !@cells
return @cells
end
# An array of spreadsheet rows. Each row contains an array of
# columns. Note that resulting array is 0-origin so
# worksheet.rows[0][0] == worksheet[1, 1].
def rows(skip = 0)
nc = self.num_cols
result = ((1 + skip)..self.num_rows).map() do |row|
(1..nc).map(){ |col| self[row, col] }.freeze()
end
return result.freeze()
end
# Reloads content of the worksheets from the server.
# Note that changes you made by []= is discarded if you haven't called save().
def reload()
doc = @session.request(:get, @cells_feed_url)
@max_rows = doc.search("gs:rowCount").text.to_i()
@max_cols = doc.search("gs:colCount").text.to_i()
@title = as_utf8(doc.search("/feed/title").text)
@cells = {}
@input_values = {}
for entry in doc.search("entry")
cell = entry.search("gs:cell")[0]
row = cell["row"].to_i()
col = cell["col"].to_i()
@cells[[row, col]] = as_utf8(cell.inner_text)
@input_values[[row, col]] = as_utf8(cell["inputValue"])
end
@modified.clear()
@meta_modified = false
return true
end
# Saves your changes made by []=, etc. to the server.
def save()
sent = false
if @meta_modified
ws_doc = @session.request(:get, self.worksheet_feed_url)
edit_url = ws_doc.search("link[@rel='edit']")[0]["href"]
xml = <<-"EOS"
<entry xmlns='http://www.w3.org/2005/Atom'
xmlns:gs='http://schemas.google.com/spreadsheets/2006'>
<title>#{h(self.title)}</title>
<gs:rowCount>#{h(self.max_rows)}</gs:rowCount>
<gs:colCount>#{h(self.max_cols)}</gs:colCount>
</entry>
EOS
@session.request(:put, edit_url, :data => xml)
@meta_modified = false
sent = true
end
if !@modified.empty?
# Gets id and edit URL for each cell.
# Note that return-empty=true is required to get those info for empty cells.
cell_entries = {}
rows = @modified.map(){ |r, c| r }
cols = @modified.map(){ |r, c| c }
url = "#{@cells_feed_url}?return-empty=true&min-row=#{rows.min}&max-row=#{rows.max}" +
"&min-col=#{cols.min}&max-col=#{cols.max}"
doc = @session.request(:get, url)
for entry in doc.search("entry")
row = entry.search("gs:cell")[0]["row"].to_i()
col = entry.search("gs:cell")[0]["col"].to_i()
cell_entries[[row, col]] = entry
end
# Updates cell values using batch operation.
# If the data is large, we split it into multiple operations, otherwise batch may fail.
@modified.each_slice(250) do |chunk|
xml = <<-EOS
<feed xmlns="http://www.w3.org/2005/Atom"
xmlns:batch="http://schemas.google.com/gdata/batch"
xmlns:gs="http://schemas.google.com/spreadsheets/2006">
<id>#{h(@cells_feed_url)}</id>
EOS
for row, col in chunk
value = @cells[[row, col]]
entry = cell_entries[[row, col]]
id = entry.search("id").text
edit_url = entry.search("link[@rel='edit']")[0]["href"]
xml << <<-EOS
<entry>
<batch:id>#{h(row)},#{h(col)}</batch:id>
<batch:operation type="update"/>
<id>#{h(id)}</id>
<link rel="edit" type="application/atom+xml"
href="#{h(edit_url)}"/>
<gs:cell row="#{h(row)}" col="#{h(col)}" inputValue="#{h(value)}"/>
</entry>
EOS
end
xml << <<-"EOS"
</feed>
EOS
result = @session.request(:post, "#{@cells_feed_url}/batch", :data => xml)
for entry in result.search("atom:entry")
interrupted = entry.search("batch:interrupted")[0]
if interrupted
raise(GoogleSpreadsheet::Error, "Update has failed: %s" %
interrupted["reason"])
end
if !(entry.search("batch:status")[0]["code"] =~ /^2/)
raise(GoogleSpreadsheet::Error, "Updating cell %s has failed: %s" %
[entry.search("atom:id").text, entry.search("batch:status")[0]["reason"]])
end
end
end
@modified.clear()
sent = true
end
return sent
end
# Calls save() and reload().
def synchronize()
save()
reload()
end
# Deletes this worksheet. Deletion takes effect right away without calling save().
def delete()
ws_doc = @session.request(:get, self.worksheet_feed_url)
edit_url = ws_doc.search("link[@rel='edit']")[0]["href"]
@session.request(:delete, edit_url)
end
# Returns true if you have changes made by []= which haven't been saved.
def dirty?
return !@modified.empty?
end
# Creates table for the worksheet and returns GoogleSpreadsheet::Table.
# See this document for details:
# http://code.google.com/intl/en/apis/spreadsheets/docs/3.0/developers_guide_protocol.html#TableFeeds
def add_table(table_title, summary, columns)
column_xml = ""
columns.each do |index, name|
column_xml += "<gs:column index='#{h(index)}' name='#{h(name)}'/>\n"
end
xml = <<-"EOS"
<entry xmlns="http://www.w3.org/2005/Atom"
xmlns:gs="http://schemas.google.com/spreadsheets/2006">
<title type='text'>#{h(table_title)}</title>
<summary type='text'>#{h(summary)}</summary>
<gs:worksheet name='#{h(self.title)}' />
<gs:header row='1' />
<gs:data numRows='0' startRow='2'>
#{column_xml}
</gs:data>
</entry>
EOS
result = @session.request(:post, self.spreadsheet.tables_feed_url, :data => xml)
return Table.new(@session, result)
end
# Returns list of tables for the workwheet.
def tables
return self.spreadsheet.tables.select(){ |t| t.worksheet_title == self.title }
end
# List feed URL of the worksheet.
def list_feed_url
# Gets the worksheets metafeed.
entry = @session.request(:get, self.worksheet_feed_url)
# Gets the URL of list-based feed for the given spreadsheet.
return as_utf8(entry.search(
"link[@rel='http://schemas.google.com/spreadsheets/2006#listfeed']")[0]["href"])
end
end
end
Small modification to @auth_tokens handling.
# Author: Hiroshi Ichikawa <http://gimite.net/>
# The license of this source is "New BSD Licence"
require "enumerator"
require "set"
require "net/https"
require "open-uri"
require "cgi"
require "uri"
require "rubygems"
require "hpricot"
require "oauth"
Net::HTTP.version_1_2
module GoogleSpreadsheet
# Authenticates with given +mail+ and +password+, and returns GoogleSpreadsheet::Session
# if succeeds. Raises GoogleSpreadsheet::AuthenticationError if fails.
# Google Apps account is supported.
def self.login(mail, password)
return Session.login(mail, password)
end
# Authenticates with given OAuth token.
#
# For generating oauth_token, you can proceed as follow:
#
# 1) First generate OAuth consumer object with key and secret for your site by registering site with google
# @consumer = OAuth::Consumer.new( "key","secret", {:site=>"https://agree2"})
# 2) Request token with OAuth
# @request_token = @consumer.get_request_token
# session[:request_token] = @request_token
# redirect_to @request_token.authorize_url
# 3) Create an oauth access token
# @oauth_access_token = @request_token.get_access_token
# @access_token = OAuth::AccessToken.new(@consumer, @oauth_access_token.token, @oauth_access_token.secret)
#
# See these documents for details:
#
# - http://oauth.rubyforge.org/
# - http://code.google.com/apis/accounts/docs/OAuth.html
def self.login_with_oauth(oauth_token)
return Session.login_with_oauth(oauth_token)
end
# Restores GoogleSpreadsheet::Session from +path+ and returns it.
# If +path+ doesn't exist or authentication has failed, prompts mail and password on console,
# authenticates with them, stores the session to +path+ and returns it.
#
# This method requires Highline library: http://rubyforge.org/projects/highline/
def self.saved_session(path = ENV["HOME"] + "/.ruby_google_spreadsheet.token")
tokens = {}
if File.exist?(path)
open(path) do |f|
for auth in [:wise, :writely]
line = f.gets()
tokens[auth] = line && line.chomp()
end
end
end
session = Session.new(tokens)
session.on_auth_fail = proc() do
begin
require "highline"
rescue LoadError
raise(LoadError,
"GoogleSpreadsheet.saved_session requires Highline library.\n" +
"Run\n" +
" \$ sudo gem install highline\n" +
"to install it.")
end
highline = HighLine.new()
mail = highline.ask("Mail: ")
password = highline.ask("Password: "){ |q| q.echo = false }
session.login(mail, password)
open(path, "w", 0600) do |f|
f.puts(session.auth_token(:wise))
f.puts(session.auth_token(:writely))
end
true
end
if !session.auth_token
session.on_auth_fail.call()
end
return session
end
module Util #:nodoc:
module_function
def encode_query(params)
return params.map(){ |k, v| CGI.escape(k) + "=" + CGI.escape(v) }.join("&")
end
def h(str)
return CGI.escapeHTML(str.to_s())
end
def as_utf8(str)
if str.respond_to?(:force_encoding)
str.force_encoding("UTF-8")
else
str
end
end
end
# Raised when spreadsheets.google.com has returned error.
class Error < RuntimeError
end
# Raised when GoogleSpreadsheet.login has failed.
class AuthenticationError < GoogleSpreadsheet::Error
end
# Use GoogleSpreadsheet.login or GoogleSpreadsheet.saved_session to get
# GoogleSpreadsheet::Session object.
class Session
include(Util)
extend(Util)
# The same as GoogleSpreadsheet.login.
def self.login(mail, password)
session = Session.new()
session.login(mail, password)
return session
end
# The same as GoogleSpreadsheet.login_with_oauth.
def self.login_with_oauth(oauth_token)
session = Session.new(nil, oauth_token)
end
# Restores session using return value of auth_tokens method of previous session.
def initialize(auth_tokens = nil, oauth_token = nil)
@oauth_token = oauth_token
@auth_tokens = auth_tokens || {}
end
# Authenticates with given +mail+ and +password+, and updates current session object
# if succeeds. Raises GoogleSpreadsheet::AuthenticationError if fails.
# Google Apps account is supported.
def login(mail, password)
begin
@auth_tokens = {}
authenticate(mail, password, :wise)
authenticate(mail, password, :writely)
rescue GoogleSpreadsheet::Error => ex
return true if @on_auth_fail && @on_auth_fail.call()
raise(AuthenticationError, "authentication failed for #{mail}: #{ex.message}")
end
end
# Authentication tokens.
attr_reader(:auth_tokens)
# Authentication token.
def auth_token(auth = :wise)
return @auth_tokens[auth]
end
# Proc or Method called when authentication has failed.
# When this function returns +true+, it tries again.
attr_accessor :on_auth_fail
def auth_header(auth) #:nodoc:
token = auth == :none ? nil : @auth_tokens[auth]
if token
return {"Authorization" => "GoogleLogin auth=#{token}"}
else
return {}
end
end
# Returns list of spreadsheets for the user as array of GoogleSpreadsheet::Spreadsheet.
# You can specify query parameters described at
# http://code.google.com/apis/spreadsheets/docs/2.0/reference.html#Parameters
#
# e.g.
# session.spreadsheets
# session.spreadsheets("title" => "hoge")
def spreadsheets(params = {})
query = encode_query(params)
doc = request(:get, "https://spreadsheets.google.com/feeds/spreadsheets/private/full?#{query}")
result = []
for entry in doc.search("entry")
title = as_utf8(entry.search("title").text)
url = as_utf8(entry.search(
"link[@rel='http://schemas.google.com/spreadsheets/2006#worksheetsfeed']")[0]["href"])
result.push(Spreadsheet.new(self, url, title))
end
return result
end
# Returns GoogleSpreadsheet::Spreadsheet with given +key+.
#
# e.g.
# # http://spreadsheets.google.com/ccc?key=pz7XtlQC-PYx-jrVMJErTcg&hl=ja
# session.spreadsheet_by_key("pz7XtlQC-PYx-jrVMJErTcg")
def spreadsheet_by_key(key)
url = "https://spreadsheets.google.com/feeds/worksheets/#{key}/private/full"
return Spreadsheet.new(self, url)
end
# Returns GoogleSpreadsheet::Spreadsheet with given +url+. You must specify either of:
# - URL of the page you open to access the spreadsheet in your browser
# - URL of worksheet-based feed of the spreadseet
#
# e.g.
# session.spreadsheet_by_url(
# "http://spreadsheets.google.com/ccc?key=pz7XtlQC-PYx-jrVMJErTcg&hl=en")
# session.spreadsheet_by_url(
# "https://spreadsheets.google.com/feeds/worksheets/pz7XtlQC-PYx-jrVMJErTcg/private/full")
def spreadsheet_by_url(url)
# Tries to parse it as URL of human-readable spreadsheet.
uri = URI.parse(url)
if uri.host == "spreadsheets.google.com" && uri.path =~ /\/ccc$/
if (uri.query || "").split(/&/).find(){ |s| s=~ /^key=(.*)$/ }
return spreadsheet_by_key($1)
end
end
# Assumes the URL is worksheets feed URL.
return Spreadsheet.new(self, url)
end
# Returns GoogleSpreadsheet::Worksheet with given +url+.
# You must specify URL of cell-based feed of the worksheet.
#
# e.g.
# session.worksheet_by_url(
# "http://spreadsheets.google.com/feeds/cells/pz7XtlQC-PYxNmbBVgyiNWg/od6/private/full")
def worksheet_by_url(url)
return Worksheet.new(self, nil, url)
end
# Creates new spreadsheet and returns the new GoogleSpreadsheet::Spreadsheet.
#
# e.g.
# session.create_spreadsheet("My new sheet")
def create_spreadsheet(
title = "Untitled",
feed_url = "https://docs.google.com/feeds/documents/private/full")
xml = <<-"EOS"
<atom:entry xmlns:atom="http://www.w3.org/2005/Atom" xmlns:docs="http://schemas.google.com/docs/2007">
<atom:category scheme="http://schemas.google.com/g/2005#kind"
term="http://schemas.google.com/docs/2007#spreadsheet" label="spreadsheet"/>
<atom:title>#{h(title)}</atom:title>
</atom:entry>
EOS
doc = request(:post, feed_url, :data => xml, :auth => :writely)
ss_url = as_utf8(doc.search(
"link[@rel='http://schemas.google.com/spreadsheets/2006#worksheetsfeed']")[0]["href"])
return Spreadsheet.new(self, ss_url, title)
end
def request(method, url, params = {}) #:nodoc:
# Always uses HTTPS.
uri = URI.parse(url.gsub(%r{^http://}, "https://"))
data = params[:data]
auth = params[:auth] || :wise
if params[:header]
add_header = params[:header]
else
add_header = data ? {"Content-Type" => "application/atom+xml"} : {}
end
response_type = params[:response_type] || :xml
if @oauth_token
if method == :delete || method == :get
response = @oauth_token.__send__(method, url, add_header)
else
response = @oauth_token.__send__(method, url, data, add_header)
end
return convert_response(response, response_type)
else
http = Net::HTTP.new(uri.host, uri.port)
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
http.start() do
while true
path = uri.path + (uri.query ? "?#{uri.query}" : "")
header = auth_header(auth).merge(add_header)
if method == :delete || method == :get
response = http.__send__(method, path, header)
else
response = http.__send__(method, path, data, header)
end
if response.code == "401" && @on_auth_fail && @on_auth_fail.call()
next
end
if !(response.code =~ /^2/)
raise(
response.code == "401" ? AuthenticationError : GoogleSpreadsheet::Error,
"Response code #{response.code} for #{method} #{url}: " +
CGI.unescapeHTML(response.body))
end
return convert_response(response, response_type)
end
end
end
end
private
def convert_response(response, response_type)
case response_type
when :xml
return Hpricot.XML(response.body)
when :raw
return response.body
else
raise("unknown params[:response_type]: %s" % response_type)
end
end
def authenticate(mail, password, auth)
params = {
"accountType" => "HOSTED_OR_GOOGLE",
"Email" => mail,
"Passwd" => password,
"service" => auth.to_s(),
"source" => "Gimite-RubyGoogleSpreadsheet-1.00",
}
response = request(:post,
"https://www.google.com/accounts/ClientLogin",
:data => encode_query(params), :auth => :none, :header => {}, :response_type => :raw)
@auth_tokens[auth] = response.slice(/^Auth=(.*)$/, 1)
end
end
# Use methods in GoogleSpreadsheet::Session to get GoogleSpreadsheet::Spreadsheet object.
class Spreadsheet
include(Util)
def initialize(session, worksheets_feed_url, title = nil) #:nodoc:
@session = session
@worksheets_feed_url = worksheets_feed_url
@title = title
end
# URL of worksheet-based feed of the spreadsheet.
attr_reader(:worksheets_feed_url)
# Title of the spreadsheet. So far only available if you get this object by
# GoogleSpreadsheet::Session#spreadsheets.
attr_reader(:title)
# Key of the spreadsheet.
def key
if !(@worksheets_feed_url =~
%r{^https?://spreadsheets.google.com/feeds/worksheets/(.*)/private/full$})
raise(GoogleSpreadsheet::Error,
"worksheets feed URL is in unknown format: #{@worksheets_feed_url}")
end
return $1
end
# Tables feed URL of the spreadsheet.
def tables_feed_url
return "https://spreadsheets.google.com/feeds/#{self.key}/tables"
end
# URL of feed used in document list feed API.
def document_feed_url
return "https://docs.google.com/feeds/documents/private/full/spreadsheet%3A#{self.key}"
end
# Creates copy of this spreadsheet with the given name.
def duplicate(new_name = nil)
new_name ||= (@title ? "Copy of " + @title : "Untitled")
get_url = "https://spreadsheets.google.com/feeds/download/spreadsheets/Export?key=#{key}&exportFormat=ods"
ods = @session.request(:get, get_url, :response_type => :raw)
url = "https://docs.google.com/feeds/documents/private/full"
header = {
"Content-Type" => "application/x-vnd.oasis.opendocument.spreadsheet",
"Slug" => URI.encode(new_name),
}
doc = @session.request(:post, url, :data => ods, :auth => :writely, :header => header)
ss_url = as_utf8(doc.search(
"link[@rel='http://schemas.google.com/spreadsheets/2006#worksheetsfeed']")[0]["href"])
return Spreadsheet.new(@session, ss_url, title)
end
# If +permanent+ is +false+, moves the spreadsheet to the trash.
# If +permanent+ is +true+, deletes the spreadsheet permanently.
def delete(permanent = false)
@session.request(:delete,
self.document_feed_url + (permanent ? "?delete=true" : ""),
:auth => :writely, :header => {"If-Match" => "*"})
end
# Renames title of the spreadsheet.
def rename(title)
doc = @session.request(:get, self.document_feed_url)
edit_url = doc.search("link[@rel='edit']")[0]["href"]
xml = <<-"EOS"
<atom:entry
xmlns:atom="http://www.w3.org/2005/Atom"
xmlns:docs="http://schemas.google.com/docs/2007">
<atom:category
scheme="http://schemas.google.com/g/2005#kind"
term="http://schemas.google.com/docs/2007#spreadsheet" label="spreadsheet"/>
<atom:title>#{h(title)}</atom:title>
</atom:entry>
EOS
@session.request(:put, edit_url, :data => xml)
end
# Returns worksheets of the spreadsheet as array of GoogleSpreadsheet::Worksheet.
def worksheets
doc = @session.request(:get, @worksheets_feed_url)
result = []
for entry in doc.search("entry")
title = as_utf8(entry.search("title").text)
url = as_utf8(entry.search(
"link[@rel='http://schemas.google.com/spreadsheets/2006#cellsfeed']")[0]["href"])
result.push(Worksheet.new(@session, self, url, title))
end
return result.freeze()
end
# Adds a new worksheet to the spreadsheet. Returns added GoogleSpreadsheet::Worksheet.
def add_worksheet(title, max_rows = 100, max_cols = 20)
xml = <<-"EOS"
<entry xmlns='http://www.w3.org/2005/Atom'
xmlns:gs='http://schemas.google.com/spreadsheets/2006'>
<title>#{h(title)}</title>
<gs:rowCount>#{h(max_rows)}</gs:rowCount>
<gs:colCount>#{h(max_cols)}</gs:colCount>
</entry>
EOS
doc = @session.request(:post, @worksheets_feed_url, :data => xml)
url = as_utf8(doc.search(
"link[@rel='http://schemas.google.com/spreadsheets/2006#cellsfeed']")[0]["href"])
return Worksheet.new(@session, self, url, title)
end
# Returns list of tables in the spreadsheet.
def tables
doc = @session.request(:get, self.tables_feed_url)
return doc.search("entry").map(){ |e| Table.new(@session, e) }.freeze()
end
end
# Use GoogleSpreadsheet::Worksheet#add_table to create table.
# Use GoogleSpreadsheet::Worksheet#tables to get GoogleSpreadsheet::Table objects.
class Table
include(Util)
def initialize(session, entry) #:nodoc:
@columns = {}
@worksheet_title = as_utf8(entry.search("gs:worksheet")[0]["name"])
@records_url = as_utf8(entry.search("content")[0]["src"])
@session = session
end
# Title of the worksheet the table belongs to.
attr_reader(:worksheet_title)
# Adds a record.
def add_record(values)
fields = ""
values.each do |name, value|
fields += "<gs:field name='#{h(name)}'>#{h(value)}</gs:field>"
end
xml =<<-EOS
<entry
xmlns="http://www.w3.org/2005/Atom"
xmlns:gs="http://schemas.google.com/spreadsheets/2006">
#{fields}
</entry>
EOS
@session.request(:post, @records_url, :data => xml)
end
# Returns records in the table.
def records
doc = @session.request(:get, @records_url)
return doc.search("entry").map(){ |e| Record.new(@session, e) }
end
end
# Use GoogleSpreadsheet::Table#records to get GoogleSpreadsheet::Record objects.
class Record < Hash
def initialize(session, entry) #:nodoc:
@session = session
for field in entry.search("gs:field")
self[as_utf8(field["name"])] = as_utf8(field.inner_text)
end
end
def inspect #:nodoc:
content = self.map(){ |k, v| "%p => %p" % [k, v] }.join(", ")
return "\#<%p:{%s}>" % [self.class, content]
end
end
# Use GoogleSpreadsheet::Spreadsheet#worksheets to get GoogleSpreadsheet::Worksheet object.
class Worksheet
include(Util)
def initialize(session, spreadsheet, cells_feed_url, title = nil) #:nodoc:
@session = session
@spreadsheet = spreadsheet
@cells_feed_url = cells_feed_url
@title = title
@cells = nil
@input_values = nil
@modified = Set.new()
end
# URL of cell-based feed of the worksheet.
attr_reader(:cells_feed_url)
# URL of worksheet feed URL of the worksheet.
def worksheet_feed_url
# I don't know good way to get worksheet feed URL from cells feed URL.
# Probably it would be cleaner to keep worksheet feed URL and get cells feed URL
# from it.
if !(@cells_feed_url =~
%r{^https?://spreadsheets.google.com/feeds/cells/(.*)/(.*)/private/full$})
raise(GoogleSpreadsheet::Error,
"cells feed URL is in unknown format: #{@cells_feed_url}")
end
return "https://spreadsheets.google.com/feeds/worksheets/#{$1}/private/full/#{$2}"
end
# GoogleSpreadsheet::Spreadsheet which this worksheet belongs to.
def spreadsheet
if !@spreadsheet
if !(@cells_feed_url =~
%r{^https?://spreadsheets.google.com/feeds/cells/(.*)/(.*)/private/full$})
raise(GoogleSpreadsheet::Error,
"cells feed URL is in unknown format: #{@cells_feed_url}")
end
@spreadsheet = @session.spreadsheet_by_key($1)
end
return @spreadsheet
end
# Returns content of the cell as String. Top-left cell is [1, 1].
def [](row, col)
return self.cells[[row, col]] || ""
end
# Updates content of the cell.
# Note that update is not sent to the server until you call save().
# Top-left cell is [1, 1].
#
# e.g.
# worksheet[2, 1] = "hoge"
# worksheet[1, 3] = "=A1+B1"
def []=(row, col, value)
reload() if !@cells
@cells[[row, col]] = value
@input_values[[row, col]] = value
@modified.add([row, col])
self.max_rows = row if row > @max_rows
self.max_cols = col if col > @max_cols
end
# Returns the value or the formula of the cell. Top-left cell is [1, 1].
#
# If user input "=A1+B1" to cell [1, 3], worksheet[1, 3] is "3" for example and
# worksheet.input_value(1, 3) is "=RC[-2]+RC[-1]".
def input_value(row, col)
reload() if !@cells
return @input_values[[row, col]] || ""
end
# Row number of the bottom-most non-empty row.
def num_rows
reload() if !@cells
return @cells.keys.map(){ |r, c| r }.max || 0
end
# Column number of the right-most non-empty column.
def num_cols
reload() if !@cells
return @cells.keys.map(){ |r, c| c }.max || 0
end
# Number of rows including empty rows.
def max_rows
reload() if !@cells
return @max_rows
end
# Updates number of rows.
# Note that update is not sent to the server until you call save().
def max_rows=(rows)
reload() if !@cells
@max_rows = rows
@meta_modified = true
end
# Number of columns including empty columns.
def max_cols
reload() if !@cells
return @max_cols
end
# Updates number of columns.
# Note that update is not sent to the server until you call save().
def max_cols=(cols)
reload() if !@cells
@max_cols = cols
@meta_modified = true
end
# Title of the worksheet (shown as tab label in Web interface).
def title
reload() if !@title
return @title
end
# Updates title of the worksheet.
# Note that update is not sent to the server until you call save().
def title=(title)
reload() if !@cells
@title = title
@meta_modified = true
end
def cells #:nodoc:
reload() if !@cells
return @cells
end
# An array of spreadsheet rows. Each row contains an array of
# columns. Note that resulting array is 0-origin so
# worksheet.rows[0][0] == worksheet[1, 1].
def rows(skip = 0)
nc = self.num_cols
result = ((1 + skip)..self.num_rows).map() do |row|
(1..nc).map(){ |col| self[row, col] }.freeze()
end
return result.freeze()
end
# Reloads content of the worksheets from the server.
# Note that changes you made by []= is discarded if you haven't called save().
def reload()
doc = @session.request(:get, @cells_feed_url)
@max_rows = doc.search("gs:rowCount").text.to_i()
@max_cols = doc.search("gs:colCount").text.to_i()
@title = as_utf8(doc.search("/feed/title").text)
@cells = {}
@input_values = {}
for entry in doc.search("entry")
cell = entry.search("gs:cell")[0]
row = cell["row"].to_i()
col = cell["col"].to_i()
@cells[[row, col]] = as_utf8(cell.inner_text)
@input_values[[row, col]] = as_utf8(cell["inputValue"])
end
@modified.clear()
@meta_modified = false
return true
end
# Saves your changes made by []=, etc. to the server.
def save()
sent = false
if @meta_modified
ws_doc = @session.request(:get, self.worksheet_feed_url)
edit_url = ws_doc.search("link[@rel='edit']")[0]["href"]
xml = <<-"EOS"
<entry xmlns='http://www.w3.org/2005/Atom'
xmlns:gs='http://schemas.google.com/spreadsheets/2006'>
<title>#{h(self.title)}</title>
<gs:rowCount>#{h(self.max_rows)}</gs:rowCount>
<gs:colCount>#{h(self.max_cols)}</gs:colCount>
</entry>
EOS
@session.request(:put, edit_url, :data => xml)
@meta_modified = false
sent = true
end
if !@modified.empty?
# Gets id and edit URL for each cell.
# Note that return-empty=true is required to get those info for empty cells.
cell_entries = {}
rows = @modified.map(){ |r, c| r }
cols = @modified.map(){ |r, c| c }
url = "#{@cells_feed_url}?return-empty=true&min-row=#{rows.min}&max-row=#{rows.max}" +
"&min-col=#{cols.min}&max-col=#{cols.max}"
doc = @session.request(:get, url)
for entry in doc.search("entry")
row = entry.search("gs:cell")[0]["row"].to_i()
col = entry.search("gs:cell")[0]["col"].to_i()
cell_entries[[row, col]] = entry
end
# Updates cell values using batch operation.
# If the data is large, we split it into multiple operations, otherwise batch may fail.
@modified.each_slice(250) do |chunk|
xml = <<-EOS
<feed xmlns="http://www.w3.org/2005/Atom"
xmlns:batch="http://schemas.google.com/gdata/batch"
xmlns:gs="http://schemas.google.com/spreadsheets/2006">
<id>#{h(@cells_feed_url)}</id>
EOS
for row, col in chunk
value = @cells[[row, col]]
entry = cell_entries[[row, col]]
id = entry.search("id").text
edit_url = entry.search("link[@rel='edit']")[0]["href"]
xml << <<-EOS
<entry>
<batch:id>#{h(row)},#{h(col)}</batch:id>
<batch:operation type="update"/>
<id>#{h(id)}</id>
<link rel="edit" type="application/atom+xml"
href="#{h(edit_url)}"/>
<gs:cell row="#{h(row)}" col="#{h(col)}" inputValue="#{h(value)}"/>
</entry>
EOS
end
xml << <<-"EOS"
</feed>
EOS
result = @session.request(:post, "#{@cells_feed_url}/batch", :data => xml)
for entry in result.search("atom:entry")
interrupted = entry.search("batch:interrupted")[0]
if interrupted
raise(GoogleSpreadsheet::Error, "Update has failed: %s" %
interrupted["reason"])
end
if !(entry.search("batch:status")[0]["code"] =~ /^2/)
raise(GoogleSpreadsheet::Error, "Updating cell %s has failed: %s" %
[entry.search("atom:id").text, entry.search("batch:status")[0]["reason"]])
end
end
end
@modified.clear()
sent = true
end
return sent
end
# Calls save() and reload().
def synchronize()
save()
reload()
end
# Deletes this worksheet. Deletion takes effect right away without calling save().
def delete()
ws_doc = @session.request(:get, self.worksheet_feed_url)
edit_url = ws_doc.search("link[@rel='edit']")[0]["href"]
@session.request(:delete, edit_url)
end
# Returns true if you have changes made by []= which haven't been saved.
def dirty?
return !@modified.empty?
end
# Creates table for the worksheet and returns GoogleSpreadsheet::Table.
# See this document for details:
# http://code.google.com/intl/en/apis/spreadsheets/docs/3.0/developers_guide_protocol.html#TableFeeds
def add_table(table_title, summary, columns)
column_xml = ""
columns.each do |index, name|
column_xml += "<gs:column index='#{h(index)}' name='#{h(name)}'/>\n"
end
xml = <<-"EOS"
<entry xmlns="http://www.w3.org/2005/Atom"
xmlns:gs="http://schemas.google.com/spreadsheets/2006">
<title type='text'>#{h(table_title)}</title>
<summary type='text'>#{h(summary)}</summary>
<gs:worksheet name='#{h(self.title)}' />
<gs:header row='1' />
<gs:data numRows='0' startRow='2'>
#{column_xml}
</gs:data>
</entry>
EOS
result = @session.request(:post, self.spreadsheet.tables_feed_url, :data => xml)
return Table.new(@session, result)
end
# Returns list of tables for the workwheet.
def tables
return self.spreadsheet.tables.select(){ |t| t.worksheet_title == self.title }
end
# List feed URL of the worksheet.
def list_feed_url
# Gets the worksheets metafeed.
entry = @session.request(:get, self.worksheet_feed_url)
# Gets the URL of list-based feed for the given spreadsheet.
return as_utf8(entry.search(
"link[@rel='http://schemas.google.com/spreadsheets/2006#listfeed']")[0]["href"])
end
end
end
|
module Spree
class WorldpayIframeController < StoreController
before_action :setup_order
# def success
# payment = Spree::Payment.new
# payment_method = Spree::PaymentMethod.find_by_id(params[:payment_method])
# payment_method.create_payment
# unless @order.reload.next
# flash[:error] = @order.errors.full_messages.join("\n")
# redirect_to checkout_state_path(@order.state) and return
# end
# if @order.completed?
# @current_order = nil
# flash.notice = Spree.t(:order_processed_successfully)
# flash['order_completed'] = true
# redirect_to spree.order_path(@order)
# else
# redirect_to checkout_state_path(@order.state)
# end
# end
def success
unless authorized?
flash.notice = Spree.t(:payment_processing_failed)
redirect_to checkout_state_path(@order.state) and return
end
payment_method = Spree::PaymentMethod.find_by_id(params[:payment_method])
payment = @order.payments.create!(
:amount => @order.total,
:payment_method => payment_method,
:response_code => params[:order_code]
)
@order.next
if @order.complete?
flash.notice = Spree.t(:order_processed_successfully)
redirect_to order_path(@order, :token => @order.guest_token)
else
redirect_to checkout_state_path(@order.state)
end
end
def cancel
redirect_to checkout_state_path(@order.state)
end
def failure
flash.error = "Order has not completed"
redirect_to checkout_state_path(@order.state)
end
def pending
flash.notice = "Order is in process"
redirect_to checkout_state_path(@order.state)
end
def error
flash.notice = "Order is not completed"
redirect_to checkout_state_path(@order.state)
end
private
def authorized?
params[:paymentStatus] == 'AUTHORISED'
end
def setup_order
@order = Spree::Order.find_by_number(params[:order_number])
end
end
end
Updated worldpay payment routes for handle flash messages
module Spree
class WorldpayIframeController < StoreController
before_action :setup_order
# def success
# payment = Spree::Payment.new
# payment_method = Spree::PaymentMethod.find_by_id(params[:payment_method])
# payment_method.create_payment
# unless @order.reload.next
# flash[:error] = @order.errors.full_messages.join("\n")
# redirect_to checkout_state_path(@order.state) and return
# end
# if @order.completed?
# @current_order = nil
# flash[:notice] = Spree.t(:order_processed_successfully)
# flash['order_completed'] = true
# redirect_to spree.order_path(@order)
# else
# redirect_to checkout_state_path(@order.state)
# end
# end
def success
unless authorized?
flash[:notice] = Spree.t(:payment_processing_failed)
redirect_to checkout_state_path(@order.state) and return
end
payment_method = Spree::PaymentMethod.find_by_id(params[:payment_method])
payment = @order.payments.create!(
:amount => @order.total,
:payment_method => payment_method,
:response_code => params[:order_code]
)
@order.next
if @order.complete?
flash[:notice] = Spree.t(:order_processed_successfully)
redirect_to order_path(@order, :token => @order.guest_token)
else
redirect_to checkout_state_path(@order.state)
end
end
def cancel
puts '**************cancel**************'
puts params.inspect
flash[:error] = "Payment has been canceled"
redirect_to checkout_state_path(@order.state)
end
def failure
puts '**************failure**************'
puts params.inspect
flash[:error] = "Order has not completed due to payment failure"
redirect_to checkout_state_path(@order.state)
end
def pending
puts '**************pending**************'
puts params.inspect
flash[:notice] = "Order is in process"
redirect_to checkout_state_path(@order.state)
end
def error
puts '**************Error**************'
puts params.inspect
flash[:error] = "Order is not completed due to payment error"
redirect_to checkout_state_path(@order.state)
end
private
def authorized?
params[:paymentStatus] == 'AUTHORISED'
end
def setup_order
@order = Spree::Order.find_by_number(params[:order_number])
end
end
end
|
# JetCollins monkeypatches to add Collins integration
module Jetpants
class Pool
##### JETCOLLINS MIX-IN ####################################################
include Plugin::JetCollins
# Used at startup time, to keep track of parent/child shard relationships
attr_accessor :has_parent
# Collins accessors for configuration asset metadata
collins_attr_accessor :slave_pool_name, :aliases, :master_read_weight, :config_sort_order
# Returns a Collins::Asset for this pool. Can optionally create one if not found.
def collins_asset(create_if_missing=false)
selector = {
operation: 'and',
details: true,
type: 'CONFIGURATION',
primary_role: 'MYSQL_POOL',
pool: "^#{@name.upcase}$",
status: 'Allocated',
}
selector[:remoteLookup] = true if Jetpants.plugins['jetpants_collins']['remote_lookup']
results = Plugin::JetCollins.find selector, !create_if_missing
# If we got back multiple results, try ignoring the remote datacenter ones
if results.count > 1
filtered_results = results.select {|a| a.location.nil? || a.location.upcase == Plugin::JetCollins.datacenter}
results = filtered_results if filtered_results.count > 0
end
if results.count > 1
raise "Multiple configuration assets found for pool #{name}"
elsif results.count == 0 && create_if_missing
output "Could not find configuration asset for pool; creating now"
new_tag = 'mysql-' + @name
asset = Collins::Asset.new type: 'CONFIGURATION', tag: new_tag, status: 'Allocated'
begin
Plugin::JetCollins.create!(asset)
rescue
collins_set asset: asset,
status: 'Allocated'
end
collins_set asset: asset,
primary_role: 'MYSQL_POOL',
pool: @name.upcase
Plugin::JetCollins.get new_tag
elsif results.count == 0 && !create_if_missing
raise "Could not find configuration asset for pool #{name}"
else
results.first
end
end
##### METHOD OVERRIDES #####################################################
# Examines the current state of the pool (as known to Jetpants) and updates
# Collins to reflect this, in terms of the pool's configuration asset as
# well as the individual hosts.
def sync_configuration
asset = collins_asset(true)
collins_set asset: asset,
slave_pool_name: slave_name || '',
aliases: aliases.join(',') || '',
master_read_weight: master_read_weight
[@master, slaves].flatten.each do |db|
current_status = (db.collins_status || '').downcase
db.collins_status = 'Allocated:RUNNING' unless current_status == 'maintenance'
db.collins_pool = @name
end
@master.collins_secondary_role = 'MASTER'
slaves(:active).each do |db|
db.collins_secondary_role = 'ACTIVE_SLAVE'
weight = @active_slave_weights[db]
db.collins_slave_weight = (weight == 100 ? '' : weight)
end
slaves(:standby).each {|db| db.collins_secondary_role = 'STANDBY_SLAVE'}
slaves(:backup).each {|db| db.collins_secondary_role = 'BACKUP_SLAVE'}
true
end
# Return the count of Allocated:RUNNING slaves
def running_slaves(secondary_role=false)
slaves.select { |slave|
collins_secondary_role = Jetpants.topology.normalize_roles(slave.collins_secondary_role).first rescue false
(slave.collins_status == 'Allocated:RUNNING') && (secondary_role ? collins_secondary_role == secondary_role : true)
}
end
# If the pool's master hasn't been probed yet, return active_slaves list
# based strictly on what we found in Collins. This is a major speed-up at
# start-up time, especially for tasks that need to iterate over all pools.
alias :active_slaves_from_probe :active_slaves
def active_slaves
if @master.probed?
active_slaves_from_probe
else
@active_slave_weights.keys
end
end
##### CALLBACKS ############################################################
# Pushes slave removal to Collins. (Normally this type of logic is handled by
# Pool#sync_configuration, but that won't handle this case, since
# sync_configuration only updates hosts still in the pool.)
def after_remove_slave!(slave_db)
slave_db.collins_pool = slave_db.collins_secondary_role = slave_db.collins_slave_weight = ''
current_status = (slave_db.collins_status || '').downcase
slave_db.collins_status = 'Unallocated' unless current_status == 'maintenance'
end
# If the demoted master was offline, record some info in Collins, otherwise
# there will be 2 masters listed
def after_master_promotion!(promoted, enslave_old_master=true)
Jetpants.topology.clear_asset_cache
# Find the master asset(s) for this pool, filtering down to only current datacenter
assets = Jetpants.topology.server_node_assets(@name, :master)
assets.reject! {|a| a.location && a.location.upcase != Plugin::JetCollins.datacenter}
assets.map(&:to_db).each do |db|
if db != @master || !db.running?
db.collins_pool = ''
db.collins_secondary_role = ''
if enslave_old_master
db.output 'REMINDER: you must manually put this host into Maintenance status in Collins' unless db.collins_status.downcase == 'maintenance'
else
db.collins_status = 'Unallocated'
end
end
end
# Clean up any slaves that are no longer slaving (again only looking at current datacenter)
assets = Jetpants.topology.server_node_assets(@name, :slave)
assets.reject! {|a| a.location && a.location.upcase != Plugin::JetCollins.datacenter}
assets.map(&:to_db).each do |db|
if !db.running? || db.pool != self
db.output "Not replicating from new master, removing from pool #{self}"
db.collins_pool = ''
db.collins_secondary_role = ''
db.collins_status = 'Unallocated'
end
end
end
##### NEW METHODS ##########################################################
# Returns the pool's creation time (as a unix timestamp) according to Collins.
# (note: may be off by a few hours until https://github.com/tumblr/collins/issues/80
# is resolved)
# Not called from anything in jetpants_collins, but available to your own
# custom automation if useful
def collins_creation_timestamp
collins_asset.created.to_time.to_i
end
# Called from DB#after_probe_master and DB#after_probe_slave for machines
# that are unreachable via SSH, or reachable but MySQL isn't running.
def slaves_according_to_collins
results = []
Jetpants.topology.server_node_assets(@name, :slave).each do |asset|
slave = asset.to_db
output "Collins found slave #{slave.ip} (#{slave.hostname})"
results << slave
end
results
end
def db_layout
raise "Database physical location hierarchy not set!" if Jetpants.plugins['jetpants_collins']['location_hierarchy'].nil?
location_hierarchy = Jetpants.plugins['jetpants_collins']['location_hierarchy'].map(&:to_sym)
dbs = [ master, slaves ].flatten
location_map = {}
db_locs = dbs.map{|db| [ db, db.location_hash ]}.flatten
locations = Hash[*db_locs]
locations.each do |db,db_loc|
location_hierarchy.reduce(location_map) do |map,hierarchy_val|
if hierarchy_val == location_hierarchy.last
map[db_loc[hierarchy_val]] ||= []
map[db_loc[hierarchy_val]] << db
else
map[db_loc[hierarchy_val]] ||= {}
end
map[db_loc[hierarchy_val]]
end
end
location_map
end
end
end
Process collins status_state
# JetCollins monkeypatches to add Collins integration
module Jetpants
class Pool
##### JETCOLLINS MIX-IN ####################################################
include Plugin::JetCollins
# Used at startup time, to keep track of parent/child shard relationships
attr_accessor :has_parent
# Collins accessors for configuration asset metadata
collins_attr_accessor :slave_pool_name, :aliases, :master_read_weight, :config_sort_order
# Returns a Collins::Asset for this pool. Can optionally create one if not found.
def collins_asset(create_if_missing=false)
selector = {
operation: 'and',
details: true,
type: 'CONFIGURATION',
primary_role: 'MYSQL_POOL',
pool: "^#{@name.upcase}$",
status: 'Allocated',
}
selector[:remoteLookup] = true if Jetpants.plugins['jetpants_collins']['remote_lookup']
results = Plugin::JetCollins.find selector, !create_if_missing
# If we got back multiple results, try ignoring the remote datacenter ones
if results.count > 1
filtered_results = results.select {|a| a.location.nil? || a.location.upcase == Plugin::JetCollins.datacenter}
results = filtered_results if filtered_results.count > 0
end
if results.count > 1
raise "Multiple configuration assets found for pool #{name}"
elsif results.count == 0 && create_if_missing
output "Could not find configuration asset for pool; creating now"
new_tag = 'mysql-' + @name
asset = Collins::Asset.new type: 'CONFIGURATION', tag: new_tag, status: 'Allocated'
begin
Plugin::JetCollins.create!(asset)
rescue
collins_set asset: asset,
status: 'Allocated'
end
collins_set asset: asset,
primary_role: 'MYSQL_POOL',
pool: @name.upcase
Plugin::JetCollins.get new_tag
elsif results.count == 0 && !create_if_missing
raise "Could not find configuration asset for pool #{name}"
else
results.first
end
end
##### METHOD OVERRIDES #####################################################
# Examines the current state of the pool (as known to Jetpants) and updates
# Collins to reflect this, in terms of the pool's configuration asset as
# well as the individual hosts.
def sync_configuration
asset = collins_asset(true)
collins_set asset: asset,
slave_pool_name: slave_name || '',
aliases: aliases.join(',') || '',
master_read_weight: master_read_weight
[@master, slaves].flatten.each do |db|
current_status = (db.collins_status || '').downcase
db.collins_status = 'Allocated:RUNNING' unless current_status == 'maintenance'
db.collins_pool = @name
end
@master.collins_secondary_role = 'MASTER'
slaves(:active).each do |db|
db.collins_secondary_role = 'ACTIVE_SLAVE'
weight = @active_slave_weights[db]
db.collins_slave_weight = (weight == 100 ? '' : weight)
end
slaves(:standby).each {|db| db.collins_secondary_role = 'STANDBY_SLAVE'}
slaves(:backup).each {|db| db.collins_secondary_role = 'BACKUP_SLAVE'}
true
end
# Return the count of Allocated:RUNNING slaves
def running_slaves(secondary_role=false)
slaves.select { |slave|
collins_secondary_role = Jetpants.topology.normalize_roles(slave.collins_secondary_role).first rescue false
(slave.collins_status_state.downcase == 'allocated:running') && (secondary_role ? collins_secondary_role == secondary_role : true)
}
end
# If the pool's master hasn't been probed yet, return active_slaves list
# based strictly on what we found in Collins. This is a major speed-up at
# start-up time, especially for tasks that need to iterate over all pools.
alias :active_slaves_from_probe :active_slaves
def active_slaves
if @master.probed?
active_slaves_from_probe
else
@active_slave_weights.keys
end
end
##### CALLBACKS ############################################################
# Pushes slave removal to Collins. (Normally this type of logic is handled by
# Pool#sync_configuration, but that won't handle this case, since
# sync_configuration only updates hosts still in the pool.)
def after_remove_slave!(slave_db)
slave_db.collins_pool = slave_db.collins_secondary_role = slave_db.collins_slave_weight = ''
current_status = (slave_db.collins_status || '').downcase
slave_db.collins_status = 'Unallocated' unless current_status == 'maintenance'
end
# If the demoted master was offline, record some info in Collins, otherwise
# there will be 2 masters listed
def after_master_promotion!(promoted, enslave_old_master=true)
Jetpants.topology.clear_asset_cache
# Find the master asset(s) for this pool, filtering down to only current datacenter
assets = Jetpants.topology.server_node_assets(@name, :master)
assets.reject! {|a| a.location && a.location.upcase != Plugin::JetCollins.datacenter}
assets.map(&:to_db).each do |db|
if db != @master || !db.running?
db.collins_pool = ''
db.collins_secondary_role = ''
if enslave_old_master
db.output 'REMINDER: you must manually put this host into Maintenance status in Collins' unless db.collins_status.downcase == 'maintenance'
else
db.collins_status = 'Unallocated'
end
end
end
# Clean up any slaves that are no longer slaving (again only looking at current datacenter)
assets = Jetpants.topology.server_node_assets(@name, :slave)
assets.reject! {|a| a.location && a.location.upcase != Plugin::JetCollins.datacenter}
assets.map(&:to_db).each do |db|
if !db.running? || db.pool != self
db.output "Not replicating from new master, removing from pool #{self}"
db.collins_pool = ''
db.collins_secondary_role = ''
db.collins_status = 'Unallocated'
end
end
end
##### NEW METHODS ##########################################################
# Returns the pool's creation time (as a unix timestamp) according to Collins.
# (note: may be off by a few hours until https://github.com/tumblr/collins/issues/80
# is resolved)
# Not called from anything in jetpants_collins, but available to your own
# custom automation if useful
def collins_creation_timestamp
collins_asset.created.to_time.to_i
end
# Called from DB#after_probe_master and DB#after_probe_slave for machines
# that are unreachable via SSH, or reachable but MySQL isn't running.
def slaves_according_to_collins
results = []
Jetpants.topology.server_node_assets(@name, :slave).each do |asset|
slave = asset.to_db
output "Collins found slave #{slave.ip} (#{slave.hostname})"
results << slave
end
results
end
def db_layout
raise "Database physical location hierarchy not set!" if Jetpants.plugins['jetpants_collins']['location_hierarchy'].nil?
location_hierarchy = Jetpants.plugins['jetpants_collins']['location_hierarchy'].map(&:to_sym)
dbs = [ master, slaves ].flatten
location_map = {}
db_locs = dbs.map{|db| [ db, db.location_hash ]}.flatten
locations = Hash[*db_locs]
locations.each do |db,db_loc|
location_hierarchy.reduce(location_map) do |map,hierarchy_val|
if hierarchy_val == location_hierarchy.last
map[db_loc[hierarchy_val]] ||= []
map[db_loc[hierarchy_val]] << db
else
map[db_loc[hierarchy_val]] ||= {}
end
map[db_loc[hierarchy_val]]
end
end
location_map
end
end
end
|
module Tentacles
class ApplicationController < ::ApplicationController
protect_from_forgery with: :exception
layout 'tentacles/application'
end
end
remove protect_from_forget, it's the responsiblity of the application not the engine
module Tentacles
class ApplicationController < ::ApplicationController
layout 'tentacles/application'
end
end
|
# -*- encoding: utf-8 -*-
require 'thor'
class GrowthForecast::CLI < Thor
desc 'delete <url>', 'delete a graph or graphs under a url'
long_desc <<-LONGDESC
Delete a graph or graphs under a <url> where <url> is the one obtained from the GrowthForecast URI, e.g.,
http://{hostname}:{port}/list/{service_name}/{section_name}?t=sh
or
http://{hostname}:{port}/view_graph/{service_name}/{section_name}/{graph_name}?t=sh
ex) growthforecast-client delete 'http://{hostname}:{port}/list/{service_name}/{section_name}'
LONGDESC
def delete(url)
uri = URI.parse(url)
client = client(uri)
service_name, section_name, graph_name = split_path(uri.path)
graphs = client.list_graph(service_name, section_name, graph_name)
graphs.each do |graph|
begin
client.delete_graph(graph['service_name'], graph['section_name'], graph['graph_name'])
puts "Deleted #{e graph['service_name']}/#{e graph['section_name']}/#{e graph['graph_name']}"
rescue => e
puts "\tclass:#{e.class}\t#{e.message}"
end
end
graphs = client.list_complex(service_name, section_name, graph_name)
graphs.each do |graph|
begin
client.delete_complex(graph['service_name'], graph['section_name'], graph['graph_name'])
puts "Deleted #{e graph['service_name']}/#{e graph['section_name']}/#{e graph['graph_name']}"
rescue => e
puts "\tclass:#{e.class}\t#{e.message}"
end
end
end
desc 'color <url>', 'change the color of graphs'
long_desc <<-LONGDESC
Change the color of graphs
ex) growthforecast-client color 'http://{hostname}:{port}/list/{service_name}/{section_name}' -c '2xx_count:#1111cc' '3xx_count:#11cc11'
LONGDESC
option :colors, :type => :hash, :aliases => '-c', :required => true, :banner => 'GRAPH_NAME:COLOR ...'
def color(url)
colors = options[:colors]
uri = URI.parse(url)
client = client(uri)
service_name, section_name, graph_name = split_path(uri.path)
graphs = client.list_graph(service_name, section_name, graph_name)
graphs.each do |graph|
service_name, section_name, graph_name = graph['service_name'], graph['section_name'], graph['graph_name']
next unless colors[graph_name]
params = {
'color' => colors[graph_name],
'unit' => 'count',
'sort' => 1, # order to display, 19 is the top
'adjust' => '/',
'adjustval' => '1',
}
begin
puts "Setup #{service_name}/#{section_name}/#{graph_name} with #{colors[graph_name]}"
client.edit_graph(service_name, section_name, graph_name, params)
rescue GrowthForecast::NotFound => e
$stderr.puts "\tclass:#{e.class}\t#{e.message}"
end
end
end
desc 'create_complex <url>', 'create complex graphs'
long_desc <<-LONGDESC
Create complex graphs under a url
ex) growthforecast-client create_complex 'http://{hostname}:{port}/list/{service_name}' -f 2xx_count 3xx_count -t status_count
LONGDESC
option :from_graphs, :type => :array, :aliases => '-f', :required => true, :banner => 'GRAPH_NAMES ...'
option :to_complex, :type => :string, :aliases => '-t', :required => true
def create_complex(url)
from_graphs, to_complex = options[:from_graphs], options[:to_complex]
uri = URI.parse(url)
client = client(uri)
service_name, section_name, graph_name = split_path(uri.path)
sections = client.list_section(service_name, section_name, graph_name)
sections.each do |service_name, sections|
sections.each do |section_name|
base = { "service_name" => service_name, "section_name" => section_name, "gmode" => 'gauge', "stack" => true, "type" => 'AREA' }
from_graphs_params = from_graphs.map {|graph_name| base.merge('graph_name' => graph_name) }
to_complex_params = { "service_name" => service_name, "section_name" => section_name, "graph_name" => to_complex, "sort" => 1 }
begin
puts "Setup /#{service_name}/#{section_name}/#{to_complex} with #{from_graphs}"
client.create_complex(from_graphs_params, to_complex_params)
rescue GrowthForecast::AlreadyExists => e
$stderr.puts "\tclass:#{e.class}\t#{e.message}"
rescue GrowthForecast::NotFound => e
$stderr.puts "\tclass:#{e.class}\t#{e.message}"
end
end
end
end
no_tasks do
def e(str)
CGI.escape(str).gsub('+', '%20') if str
end
def split_path(path)
path = path.gsub(/.*list\/?/, '').gsub(/.*view_graph\/?/, '')
path.split('/').map {|p| CGI.unescape(p.gsub('%20', '+')) }
end
def client(uri)
GrowthForecast::Client.new("#{uri.scheme}://#{uri.host}:#{uri.port}")
end
end
end
refactoring cli
# -*- encoding: utf-8 -*-
require 'thor'
class GrowthForecast::CLI < Thor
class_option :silent, :aliases => ["-S"], :type => :boolean
def initialize(args = [], opts = [], config = {})
super(args, opts, config)
end
desc 'delete <url>', 'delete a graph or graphs under a url'
long_desc <<-LONGDESC
Delete a graph or graphs under a <url> where <url> is the one obtained from the GrowthForecast URI, e.g.,
http://{hostname}:{port}/list/{service_name}/{section_name}?t=sh
or
http://{hostname}:{port}/view_graph/{service_name}/{section_name}/{graph_name}?t=sh
ex) growthforecast-client delete 'http://{hostname}:{port}/list/{service_name}/{section_name}'
LONGDESC
def delete(url)
base_uri, service_name, section_name, graph_name = split_url(url)
@client = client(base_uri)
graphs = @client.list_graph(service_name, section_name, graph_name)
delete_graphs(graphs)
complexes = @client.list_complex(service_name, section_name, graph_name)
delete_complexes(complexes)
end
desc 'color <url>', 'change the color of graphs'
long_desc <<-LONGDESC
Change the color of graphs
ex) growthforecast-client color 'http://{hostname}:{port}/list/{service_name}/{section_name}' -c '2xx_count:#1111cc' '3xx_count:#11cc11'
LONGDESC
option :colors, :type => :hash, :aliases => '-c', :required => true, :banner => 'GRAPH_NAME:COLOR ...'
def color(url)
colors = options[:colors]
base_uri, service_name, section_name, graph_name = split_url(url)
@client = client(base_uri)
graphs = @client.list_graph(service_name, section_name, graph_name)
setup_colors(colors, graphs)
end
desc 'create_complex <url>', 'create complex graphs'
long_desc <<-LONGDESC
Create complex graphs under a url
ex) growthforecast-client create_complex 'http://{hostname}:{port}/list/{service_name}' -f 2xx_count 3xx_count -t status_count
LONGDESC
option :from_graphs, :type => :array, :aliases => '-f', :required => true, :banner => 'GRAPH_NAMES ...'
option :to_complex, :type => :string, :aliases => '-t', :required => true
def create_complex(url)
from_graphs, to_complex = options[:from_graphs], options[:to_complex]
base_uri, service_name, section_name, graph_name = split_url(url)
@client = client(base_uri)
graphs = @client.list_graph(service_name, section_name, graph_name)
setup_complex(from_graphs, to_complex, graphs)
end
no_tasks do
def delete_graphs(graphs)
graphs.each do |graph|
puts "Delete #{e graph['service_name']}/#{e graph['section_name']}/#{e graph['graph_name']}" unless @options[:silent]
exec { @client.delete_graph(graph['service_name'], graph['section_name'], graph['graph_name']) }
end
end
def delete_complexes(complexes)
complexes.each do |graph|
puts "Delete #{e graph['service_name']}/#{e graph['section_name']}/#{e graph['graph_name']}" unless @options[:silent]
exec { @client.delete_complex(graph['service_name'], graph['section_name'], graph['graph_name']) }
end
end
def setup_colors(colors, graphs)
graphs.each do |graph|
service_name, section_name, graph_name = graph['service_name'], graph['section_name'], graph['graph_name']
next unless color = colors[graph_name]
params = {
'color' => color,
'unit' => 'count',
'sort' => 1, # order to display, 19 is the top
'adjust' => '/',
'adjustval' => '1',
}
puts "Setup #{service_name}/#{section_name}/#{graph_name} with #{color}" unless @options[:silent]
exec { @client.edit_graph(service_name, section_name, graph_name, params) }
end
end
def setup_complex(from_graphs, to_complex, graphs)
from_graph_first = from_graphs.first
graphs.each do |graph|
service_name, section_name, graph_name = graph['service_name'], graph['section_name'], graph['graph_name']
next unless graph_name == from_graph_first
base = { "service_name" => service_name, "section_name" => section_name, "gmode" => 'gauge', "stack" => true, "type" => 'AREA' }
from_graphs_params = from_graphs.map {|graph_name| base.merge('graph_name' => graph_name) }
to_complex_params = { "service_name" => service_name, "section_name" => section_name, "graph_name" => to_complex, "sort" => 1 }
puts "Setup /#{service_name}/#{section_name}/#{to_complex} with #{from_graphs}" unless @options[:silent]
exec { @client.create_complex(from_graphs_params, to_complex_params) }
end
end
def exec(&blk)
begin
yield
rescue => e
$stderr.puts "\tclass:#{e.class}\t#{e.message}"
end
end
def e(str)
CGI.escape(str).gsub('+', '%20') if str
end
def client(base_uri)
GrowthForecast::Client.new(base_uri)
end
def split_url(url)
uri = URI.parse(url)
base_uri = "#{uri.scheme}://#{uri.host}:#{uri.port}"
[base_uri] + split_path(uri.path)
end
def split_path(path)
path = path.gsub(/.*list\/?/, '').gsub(/.*view_graph\/?/, '')
path.split('/').map {|p| CGI.unescape(p.gsub('%20', '+')) }
end
end
end
|
module Voluntary
module Api
module V1
class BaseController < ActionController::Base #ActionController::Metal
#include ActionController::Rendering # enables rendering
#include ActionController::MimeResponds # enables serving different content types like :xml or :json
#include AbstractController::Callbacks # callbacks for your authentication logic
end
end
end
end
refs #72 CORS: Access-Control-(Allow-Origin|Request-Method)=* (API)
module Voluntary
module Api
module V1
class BaseController < ActionController::Base #ActionController::Metal
#include ActionController::Rendering # enables rendering
#include ActionController::MimeResponds # enables serving different content types like :xml or :json
#include AbstractController::Callbacks # callbacks for your authentication logic
after_filter :set_access_control_headers
def set_access_control_headers
headers['Access-Control-Allow-Origin'] = '*'
headers['Access-Control-Request-Method'] = '*'
end
end
end
end
end |
require 'drb/drb'
module Guard
class RSpec
class Runner
attr_reader :rspec_version
FAILURE_EXIT_CODE = 2
def initialize(options = {})
@options = {
:bundler => true,
:binstubs => false,
:rvm => nil,
:cli => nil,
:env => nil,
:notification => true,
:turnip => false
}.merge(options)
deprecations_warnings
end
def run(paths, options = {})
return false if paths.empty?
message = options[:message] || "Running: #{paths.join(' ')}"
UI.info(message, :reset => true)
options = @options.merge(options)
if drb_used?
run_via_drb(paths, options)
else
run_via_shell(paths, options)
end
end
def rspec_version
@rspec_version ||= @options[:version] || determine_rspec_version
end
def rspec_executable
@rspec_executable ||= begin
exec = rspec_class.downcase
binstubs? ? "#{binstubs}/#{exec}" : exec
end
end
def failure_exit_code_supported?
@failure_exit_code_supported ||= begin
cmd_parts = []
cmd_parts << environment_variables
cmd_parts << "bundle exec" if bundle_exec?
cmd_parts << rspec_executable
cmd_parts << "--help"
`#{cmd_parts.join(' ')}`.include? "--failure-exit-code"
end
end
def rspec_class
@rspec_class ||= case rspec_version
when 1
"Spec"
when 2
"RSpec"
end
end
def parsed_or_default_formatter
@parsed_or_default_formatter ||= begin
file_name = "#{Dir.pwd}/.rspec"
parsed_formatter = if File.exist?(file_name)
formatters = File.read(file_name).scan(formatter_regex).flatten
formatters.map { |formatter| "-f #{formatter}" }.join(' ')
end
parsed_formatter.nil? || parsed_formatter.empty? ? '-f progress' : parsed_formatter
end
end
private
def environment_variables
return if @options[:env].nil?
"export " + @options[:env].map {|key, value| "#{key}=#{value}"}.join(' ') + ';'
end
def rspec_arguments(paths, options)
arg_parts = []
arg_parts << options[:cli]
if @options[:notification]
arg_parts << parsed_or_default_formatter unless options[:cli] =~ formatter_regex
arg_parts << "-r #{File.dirname(__FILE__)}/formatters/notification_#{rspec_class.downcase}.rb"
arg_parts << "-f Guard::RSpec::Formatter::Notification#{rspec_class}#{rspec_version == 1 ? ":" : " --out "}/dev/null"
end
arg_parts << "--failure-exit-code #{FAILURE_EXIT_CODE}" if failure_exit_code_supported?
arg_parts << "-r turnip/rspec" if @options[:turnip]
arg_parts << paths.join(' ')
arg_parts.compact.join(' ')
end
def rspec_command(paths, options)
cmd_parts = []
cmd_parts << "rvm #{@options[:rvm].join(',')} exec" if @options[:rvm].respond_to?(:join)
cmd_parts << environment_variables
cmd_parts << "bundle exec" if bundle_exec?
cmd_parts << rspec_executable
cmd_parts << rspec_arguments(paths, options)
cmd_parts.compact.join(' ')
end
def run_via_shell(paths, options)
success = system(rspec_command(paths, options))
if @options[:notification] && !drb_used? && !success && rspec_command_exited_with_an_exception?
Notifier.notify("Failed", :title => "RSpec results", :image => :failed, :priority => 2)
end
success
end
def rspec_command_exited_with_an_exception?
failure_exit_code_supported? && $?.exitstatus != FAILURE_EXIT_CODE
end
# We can optimize this path by hitting up the drb server directly, circumventing the overhead
# of the user's shell, bundler and ruby environment.
def run_via_drb(paths, options)
require "shellwords"
argv = rspec_arguments(paths, options).shellsplit
# The user can specify --drb-port for rspec, we need to honor it.
if idx = argv.index("--drb-port")
port = argv[idx + 1].to_i
end
port = ENV["RSPEC_DRB"] || 8989 unless port && port > 0
ret = drb_service(port.to_i).run(argv, $stderr, $stdout)
[0, true].include?(ret)
rescue DRb::DRbConnError
# Fall back to the shell runner; we don't want to mangle the environment!
run_via_shell(paths, options)
end
def drb_used?
if @drb_used.nil?
@drb_used = @options[:cli] && @options[:cli].include?('--drb')
else
@drb_used
end
end
# RSpec 1 & 2 use the same DRb call signature, and we can avoid loading a large chunk of rspec
# just to let DRb know what to do.
#
# For reference:
#
# * RSpec 1: https://github.com/myronmarston/rspec-1/blob/master/lib/spec/runner/drb_command_line.rb
# * RSpec 2: https://github.com/rspec/rspec-core/blob/master/lib/rspec/core/drb_command_line.rb
def drb_service(port)
require "drb/drb"
# Make sure we have a listener running
unless @drb_listener_running
begin
DRb.start_service("druby://localhost:0")
rescue SocketError, Errno::EADDRNOTAVAIL
DRb.start_service("druby://:0")
end
@drb_listener_running = true
end
@drb_services ||= {}
@drb_services[port.to_i] ||= DRbObject.new_with_uri("druby://127.0.0.1:#{port}")
end
def bundler_allowed?
if @bundler_allowed.nil?
@bundler_allowed = File.exist?("#{Dir.pwd}/Gemfile")
else
@bundler_allowed
end
end
def bundler?
if @bundler.nil?
@bundler = bundler_allowed? && @options[:bundler]
else
@bundler
end
end
def binstubs?
if @binstubs.nil?
@binstubs = !!@options[:binstubs]
else
@binstubs
end
end
def binstubs
if @options[:binstubs] == true
"bin"
else
@options[:binstubs]
end
end
def bundle_exec?
bundler? && !binstubs?
end
def determine_rspec_version
if File.exist?("#{Dir.pwd}/spec/spec_helper.rb")
File.new("#{Dir.pwd}/spec/spec_helper.rb").read.include?("Spec::Runner") ? 1 : 2
elsif bundler_allowed?
ENV['BUNDLE_GEMFILE'] = "#{Dir.pwd}/Gemfile"
`bundle show rspec`.include?("/rspec-1.") ? 1 : 2
else
2
end
end
def deprecations_warnings
[:color, :drb, [:fail_fast, "fail-fast"], [:formatter, "format"]].each do |option|
key, value = option.is_a?(Array) ? option : [option, option.to_s]
if @options.key?(key)
@options.delete(key)
UI.info %{DEPRECATION WARNING: The :#{key} option is deprecated. Pass standard command line argument "--#{value}" to RSpec with the :cli option.}
end
end
end
def formatter_regex
@formatter_regex ||= /(?:^|\s)(?:-f\s*|--format(?:=|\s+))([\w:]+)/
end
end
end
end
export environment before rvm exec
require 'drb/drb'
module Guard
class RSpec
class Runner
attr_reader :rspec_version
FAILURE_EXIT_CODE = 2
def initialize(options = {})
@options = {
:bundler => true,
:binstubs => false,
:rvm => nil,
:cli => nil,
:env => nil,
:notification => true,
:turnip => false
}.merge(options)
deprecations_warnings
end
def run(paths, options = {})
return false if paths.empty?
message = options[:message] || "Running: #{paths.join(' ')}"
UI.info(message, :reset => true)
options = @options.merge(options)
if drb_used?
run_via_drb(paths, options)
else
run_via_shell(paths, options)
end
end
def rspec_version
@rspec_version ||= @options[:version] || determine_rspec_version
end
def rspec_executable
@rspec_executable ||= begin
exec = rspec_class.downcase
binstubs? ? "#{binstubs}/#{exec}" : exec
end
end
def failure_exit_code_supported?
@failure_exit_code_supported ||= begin
cmd_parts = []
cmd_parts << "bundle exec" if bundle_exec?
cmd_parts << rspec_executable
cmd_parts << "--help"
`#{cmd_parts.join(' ')}`.include? "--failure-exit-code"
end
end
def rspec_class
@rspec_class ||= case rspec_version
when 1
"Spec"
when 2
"RSpec"
end
end
def parsed_or_default_formatter
@parsed_or_default_formatter ||= begin
file_name = "#{Dir.pwd}/.rspec"
parsed_formatter = if File.exist?(file_name)
formatters = File.read(file_name).scan(formatter_regex).flatten
formatters.map { |formatter| "-f #{formatter}" }.join(' ')
end
parsed_formatter.nil? || parsed_formatter.empty? ? '-f progress' : parsed_formatter
end
end
private
def environment_variables
return if @options[:env].nil?
"export " + @options[:env].map {|key, value| "#{key}=#{value}"}.join(' ') + ';'
end
def rspec_arguments(paths, options)
arg_parts = []
arg_parts << options[:cli]
if @options[:notification]
arg_parts << parsed_or_default_formatter unless options[:cli] =~ formatter_regex
arg_parts << "-r #{File.dirname(__FILE__)}/formatters/notification_#{rspec_class.downcase}.rb"
arg_parts << "-f Guard::RSpec::Formatter::Notification#{rspec_class}#{rspec_version == 1 ? ":" : " --out "}/dev/null"
end
arg_parts << "--failure-exit-code #{FAILURE_EXIT_CODE}" if failure_exit_code_supported?
arg_parts << "-r turnip/rspec" if @options[:turnip]
arg_parts << paths.join(' ')
arg_parts.compact.join(' ')
end
def rspec_command(paths, options)
cmd_parts = []
cmd_parts << environment_variables
cmd_parts << "rvm #{@options[:rvm].join(',')} exec" if @options[:rvm].respond_to?(:join)
cmd_parts << "bundle exec" if bundle_exec?
cmd_parts << rspec_executable
cmd_parts << rspec_arguments(paths, options)
cmd_parts.compact.join(' ')
end
def run_via_shell(paths, options)
success = system(rspec_command(paths, options))
if @options[:notification] && !drb_used? && !success && rspec_command_exited_with_an_exception?
Notifier.notify("Failed", :title => "RSpec results", :image => :failed, :priority => 2)
end
success
end
def rspec_command_exited_with_an_exception?
failure_exit_code_supported? && $?.exitstatus != FAILURE_EXIT_CODE
end
# We can optimize this path by hitting up the drb server directly, circumventing the overhead
# of the user's shell, bundler and ruby environment.
def run_via_drb(paths, options)
require "shellwords"
argv = rspec_arguments(paths, options).shellsplit
# The user can specify --drb-port for rspec, we need to honor it.
if idx = argv.index("--drb-port")
port = argv[idx + 1].to_i
end
port = ENV["RSPEC_DRB"] || 8989 unless port && port > 0
ret = drb_service(port.to_i).run(argv, $stderr, $stdout)
[0, true].include?(ret)
rescue DRb::DRbConnError
# Fall back to the shell runner; we don't want to mangle the environment!
run_via_shell(paths, options)
end
def drb_used?
if @drb_used.nil?
@drb_used = @options[:cli] && @options[:cli].include?('--drb')
else
@drb_used
end
end
# RSpec 1 & 2 use the same DRb call signature, and we can avoid loading a large chunk of rspec
# just to let DRb know what to do.
#
# For reference:
#
# * RSpec 1: https://github.com/myronmarston/rspec-1/blob/master/lib/spec/runner/drb_command_line.rb
# * RSpec 2: https://github.com/rspec/rspec-core/blob/master/lib/rspec/core/drb_command_line.rb
def drb_service(port)
require "drb/drb"
# Make sure we have a listener running
unless @drb_listener_running
begin
DRb.start_service("druby://localhost:0")
rescue SocketError, Errno::EADDRNOTAVAIL
DRb.start_service("druby://:0")
end
@drb_listener_running = true
end
@drb_services ||= {}
@drb_services[port.to_i] ||= DRbObject.new_with_uri("druby://127.0.0.1:#{port}")
end
def bundler_allowed?
if @bundler_allowed.nil?
@bundler_allowed = File.exist?("#{Dir.pwd}/Gemfile")
else
@bundler_allowed
end
end
def bundler?
if @bundler.nil?
@bundler = bundler_allowed? && @options[:bundler]
else
@bundler
end
end
def binstubs?
if @binstubs.nil?
@binstubs = !!@options[:binstubs]
else
@binstubs
end
end
def binstubs
if @options[:binstubs] == true
"bin"
else
@options[:binstubs]
end
end
def bundle_exec?
bundler? && !binstubs?
end
def determine_rspec_version
if File.exist?("#{Dir.pwd}/spec/spec_helper.rb")
File.new("#{Dir.pwd}/spec/spec_helper.rb").read.include?("Spec::Runner") ? 1 : 2
elsif bundler_allowed?
ENV['BUNDLE_GEMFILE'] = "#{Dir.pwd}/Gemfile"
`bundle show rspec`.include?("/rspec-1.") ? 1 : 2
else
2
end
end
def deprecations_warnings
[:color, :drb, [:fail_fast, "fail-fast"], [:formatter, "format"]].each do |option|
key, value = option.is_a?(Array) ? option : [option, option.to_s]
if @options.key?(key)
@options.delete(key)
UI.info %{DEPRECATION WARNING: The :#{key} option is deprecated. Pass standard command line argument "--#{value}" to RSpec with the :cli option.}
end
end
end
def formatter_regex
@formatter_regex ||= /(?:^|\s)(?:-f\s*|--format(?:=|\s+))([\w:]+)/
end
end
end
end
|
require "redis_directory"
if RUBY_PLATFORM =~ /java/
require "java"
else
require "uuid"
end
class Harbor::Cache::Redis
TRACKER_KEY_NAME="cache-keys"
def initialize(connection, name = nil)
if connection.is_a?(Redis) || connection.is_a?(Redis::Distributed)
@redis = connection
else
@redis = Redis::Directory.new(connection).get("cache", name)
end
end
def get(key)
if (value = @redis.get(key))
item = load(key, value)
if item.expired?
@redis.srem(TRACKER_KEY_NAME, key)
nil
else
@redis.expire(key, item.ttl)
item
end
else
nil
end
end
alias [] get
def put(key, ttl, maximum_age, content, cached_at)
item = Harbor::Cache::Item.new(key, ttl, maximum_age, content, cached_at)
data = { "ttl" => item.ttl, "maximum_age" => item.maximum_age, "content" => item.content, "cached_at" => item.cached_at, "expires_at" => item.expires_at }
@redis.set(key, YAML::dump(data))
@redis.expire(key, ttl)
@redis.sadd(TRACKER_KEY_NAME, key)
item
end
def delete(key)
@redis.del(key)
@redis.srem(TRACKER_KEY_NAME, key)
end
def delete_matching(key_regex)
if (matches = keys_matching(key_regex)).empty?
nil
else
@redis.srem(TRACKER_KEY_NAME, *matches)
@redis.del(*matches)
end
end
def keys_matching(key_regex)
@redis.smembers(TRACKER_KEY_NAME).select { |key| key =~ key_regex }
end
def bump(key)
if item = get(key)
delete(key)
item.bump
put(key, item.ttl, item.maximum_age, item.content, item.cached_at)
end
end
def load(key, data)
value = YAML::load(data)
Harbor::Cache::Item.new(key, value["ttl"], value["maximum_age"], value["content"], value["cached_at"], value["expires_at"])
end
end
redis cache bug; loop through key matches to delete them since
@redis.srem doesn't support splatted arrays yet
require "redis_directory"
if RUBY_PLATFORM =~ /java/
require "java"
else
require "uuid"
end
class Harbor::Cache::Redis
TRACKER_KEY_NAME="cache-keys"
def initialize(connection, name = nil)
if connection.is_a?(Redis) || connection.is_a?(Redis::Distributed)
@redis = connection
else
@redis = Redis::Directory.new(connection).get("cache", name)
end
end
def get(key)
if (value = @redis.get(key))
item = load(key, value)
if item.expired?
@redis.srem(TRACKER_KEY_NAME, key)
nil
else
@redis.expire(key, item.ttl)
item
end
else
nil
end
end
alias [] get
def put(key, ttl, maximum_age, content, cached_at)
item = Harbor::Cache::Item.new(key, ttl, maximum_age, content, cached_at)
data = { "ttl" => item.ttl, "maximum_age" => item.maximum_age, "content" => item.content, "cached_at" => item.cached_at, "expires_at" => item.expires_at }
@redis.set(key, YAML::dump(data))
@redis.expire(key, ttl)
@redis.sadd(TRACKER_KEY_NAME, key)
item
end
def delete(key)
@redis.del(key)
@redis.srem(TRACKER_KEY_NAME, key)
end
def delete_matching(key_regex)
if (matches = keys_matching(key_regex)).empty?
nil
else
matches.each do |match|
@redis.srem(TRACKER_KEY_NAME, match)
end
@redis.srem(TRACKER_KEY_NAME, *matches)
@redis.del(*matches)
end
end
def keys_matching(key_regex)
@redis.smembers(TRACKER_KEY_NAME).select { |key| key =~ key_regex }
end
def bump(key)
if item = get(key)
delete(key)
item.bump
put(key, item.ttl, item.maximum_age, item.content, item.cached_at)
end
end
def load(key, data)
value = YAML::load(data)
Harbor::Cache::Item.new(key, value["ttl"], value["maximum_age"], value["content"], value["cached_at"], value["expires_at"])
end
end
|
module EffectiveTestBotControllerHelper
BODY_TAG = '</body>'
# This is included as an after_action in the controller
def assign_test_bot_payload(payload = {})
return unless response.content_type == 'text/html'.freeze
return unless !!(response.body[BODY_TAG])
payload.merge!({ response_code: response.code, assigns: test_bot_view_assigns, flash: flash.to_hash })
payload = view_context.content_tag(:script, id: 'test_bot_payload') do
[
'',
'window.effective_test_bot = {};',
payload.map { |k, v| "window.effective_test_bot.#{k} = #{v.respond_to?(:to_json) ? v.to_json : ("'" + v + "'")};" },
'',
].join("\n").html_safe
end
split = response.body.split(BODY_TAG)
response.body = "#{split.first}#{payload}#{BODY_TAG}#{split.last if split.size > 1}"
end
# This is called in an ActionController rescue_from.
def assign_test_bot_access_denied_exception(exception)
assign_test_bot_payload(test_bot_access_denied(exception))
end
private
def test_bot_access_denied(exception)
{
access_denied: exception,
action: exception.action,
subject: (
if exception.subject.kind_of?(Symbol)
":#{exception.subject}"
elsif exception.subject.class == Class
exception.subject.name
else
exception.subject.class.name
end
)
}
end
def test_bot_view_assigns
assigns = {}
view_assigns.each do |key, object|
case object
when ActiveRecord::Base
assigns[key] = object.attributes
assigns[key][:errors] = object.errors.messages.delete_if { |_, v| v.blank? } if object.errors.present?
when (ActiveModel::Model rescue nil)
assigns[key] = object.respond_to?(:attributes) ? object.attributes : { present_but_not_serialized: true }
assigns[key][:errors] = object.errors.messages.delete_if { |_, v| v.blank? } if object.errors.present?
when TrueClass, FalseClass, NilClass, String, Symbol, Numeric
assigns[key] = object
else
# We don't want to serialize them, but they should be present
assigns[key] = :present_but_not_serialized
end
end
assigns
end
end
assign JS payload too
module EffectiveTestBotControllerHelper
BODY_TAG = '</body>'
# This is included as an after_action in the controller
def assign_test_bot_payload(payload = {})
payload.merge!({ response_code: response.code, assigns: test_bot_view_assigns, flash: flash.to_hash })
if response.content_type == 'text/html' && response.body[BODY_TAG].present?
payload = view_context.content_tag(:script, build_payload_javascript(payload), id: 'test_bot_payload')
split = response.body.split(BODY_TAG)
response.body = "#{split.first}#{payload}#{BODY_TAG}#{split.last if split.size > 1}"
elsif response.content_type == 'text/javascript' && response.body.present?
payload = build_payload_javascript(payload)
response.body = "#{response.body};#{payload}"
end
end
# This is called in an ActionController rescue_from.
def assign_test_bot_access_denied_exception(exception)
assign_test_bot_payload(test_bot_access_denied(exception))
end
private
def build_payload_javascript(payload)
[
'',
'window.effective_test_bot = {};',
payload.map { |k, v| "window.effective_test_bot.#{k} = #{v.respond_to?(:to_json) ? v.to_json : ("'" + v + "'")};" },
'',
].join("\n").html_safe
end
def test_bot_access_denied(exception)
{
access_denied: exception,
action: exception.action,
subject: (
if exception.subject.kind_of?(Symbol)
":#{exception.subject}"
elsif exception.subject.class == Class
exception.subject.name
else
exception.subject.class.name
end
)
}
end
def test_bot_view_assigns
assigns = {}
view_assigns.each do |key, object|
case object
when ActiveRecord::Base
assigns[key] = object.attributes
assigns[key][:errors] = object.errors.messages.delete_if { |_, v| v.blank? } if object.errors.present?
when (ActiveModel::Model rescue nil)
assigns[key] = object.respond_to?(:attributes) ? object.attributes : { present_but_not_serialized: true }
assigns[key][:errors] = object.errors.messages.delete_if { |_, v| v.blank? } if object.errors.present?
when TrueClass, FalseClass, NilClass, String, Symbol, Numeric
assigns[key] = object
else
# We don't want to serialize them, but they should be present
assigns[key] = :present_but_not_serialized
end
end
assigns
end
end
|
#
# The MIT License (MIT)
#
# Copyright (C) 2014 hellosign.com
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
module HelloSign
VERSION = '3.2.14'
end
Bump version to 3.2.15
#
# The MIT License (MIT)
#
# Copyright (C) 2014 hellosign.com
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
module HelloSign
VERSION = '3.2.15'
end
|
module HelloSign
VERSION = '0.5.0'
end
Release 0.6.0
module HelloSign
VERSION = '0.6.0'
end
|
require 'securerandom'
require 'fileutils'
require 'net/ssh'
require 'net/scp'
require 'grid_schedulers/glite_facade'
require 'grid_schedulers/pbs_facade'
require_relative 'infrastructure_facade'
class PLGridFacade < InfrastructureFacade
def initialize
@ui_grid_host = 'ui.grid.cyfronet.pl'
end
def current_state(user)
jobs = PlGridJob.find_all_by_user_id(user.id)
jobs_count = if jobs.nil?
0
else
jobs.size
end
"Currently #{jobs_count} jobs are scheduled or running."
end
# for each job check
# 1. if the experiment is still running - destroy the job otherwise
# 2. if the job is started correctly and is not stuck in a queue - restart if yes
# 3. if the job is running more then 24 hours - restart if yes
def start_monitoring
while true do
lock = MongoLock.new('PlGridJob')
sleep(1) until lock.acquire
begin
Rails.logger.info("[plgrid] #{Time.now} - monitoring thread is working")
# group jobs by the user_id - for each group - login to the ui using the user credentials
PlGridJob.all.group_by(&:user_id).each do |user_id, job_list|
credentials = GridCredentials.find_by_user_id(user_id)
Net::SSH.start(credentials.host, credentials.login, password: credentials.password) do |ssh|
job_list.each do |job|
scheduler = create_scheduler_facade(job.scheduler_type)
ssh.exec!('voms-proxy-init --voms vo.plgrid.pl') if job.scheduler_type == 'glite' # generate new proxy if glite
experiment = Experiment.find_by_id(job.experiment_id)
all, sent, done = experiment.get_statistics unless experiment.nil?
Rails.logger.info("Experiment: #{job.experiment_id} --- nil?: #{experiment.nil?}")
if experiment.nil? or (not experiment.is_running) or (experiment.experiment_size == done)
Rails.logger.info("Experiment '#{job.experiment_id}' is no longer running => destroy the job and temp password")
destroy_and_clean_after(job, scheduler, ssh)
# if the job is not running although it should (create_at + 10.minutes > Time.now) - restart = cancel + start
elsif scheduler.is_job_queued(ssh, job) and (job.created_at + 10.seconds < Time.now)
Rails.logger.info("#{Time.now} - the job will be restarted due to not been run")
scheduler.restart(ssh, job)
elsif job.created_at + 24.hours < Time.now
# if the job is running more than 24 h then restart
Rails.logger.info("#{Time.now} - the job will be restarted due to being run for 24 hours")
scheduler.restart(ssh, job)
elsif scheduler.is_done(ssh, job) or (job.created_at + job.time_limit.minutes < Time.now)
Rails.logger.info("#{Time.now} - the job is done or should be already done - so we will destroy it")
scheduler.cancel(ssh, job)
destroy_and_clean_after(job, scheduler, ssh)
end
end
end
end
rescue Exception => e
Rails.logger.error("[plgrid] An exception occured in the monitoring thread --- #{e}")
end
lock.release
sleep(10)
end
end
def destroy_and_clean_after(job, scheduler, ssh)
Rails.logger.info("Destroying temp pass for #{job.sm_uuid}")
temp_pass = SimulationManagerTempPassword.find_by_sm_uuid(job.sm_uuid)
Rails.logger.info("It is nil ? --- #{temp_pass.nil?}")
temp_pass.destroy unless temp_pass.nil? || temp_pass.longlife
job.destroy
scheduler.clean_after_job(ssh, job)
end
def start_simulation_managers(user, instances_count, experiment_id, additional_params = {})
sm_uuid = SecureRandom.uuid
scheduler = create_scheduler_facade(additional_params['scheduler'])
# prepare locally code of a simulation manager to upload with a configuration file
prepare_configuration_for_simulation_manager(sm_uuid, user.id, experiment_id, additional_params['start_at'])
if credentials = GridCredentials.find_by_user_id(user.id)
# prepare job executable and descriptor
scheduler.prepare_job_files(sm_uuid)
# upload the code to the Grid user interface machine
begin
Net::SCP.start(credentials.host, credentials.login, password: credentials.password) do |scp|
scheduler.send_job_files(sm_uuid, scp)
end
Net::SSH.start(credentials.host, credentials.login, password: credentials.password) do |ssh|
1.upto(instances_count).each do
# retrieve job id and store it in the database for future usage
job = PlGridJob.new({ 'user_id' => user.id, 'experiment_id' => experiment_id, 'created_at' => Time.now,
'scheduler_type' => additional_params['scheduler'], 'sm_uuid' => sm_uuid,
'time_limit' => additional_params['time_limit'].to_i })
job.grant_id = additional_params['grant_id'] unless additional_params['grant_id'].blank?
if scheduler.submit_job(ssh, job)
job.save
else
return 'error', 'Could not submit job'
end
end
end
rescue Net::SSH::AuthenticationFailed => auth_exception
return 'error', I18n.t('plgrid.job_submission.authentication_failed', ex: auth_exception)
rescue Exception => ex
return 'error', I18n.t('plgrid.job_submission.error', ex: ex)
end
return 'ok', I18n.t('plgrid.job_submission.ok', instances_count: instances_count)
else
return 'error', I18n.t('plgrid.job_submission.no_credentials')
end
end
def stop_simulation_managers(user, instances_count, experiment = nil)
raise 'not implemented'
end
def get_running_simulation_managers(user, experiment = nil)
PlGridJob.find_all_by_user_id(user.id)
end
def add_credentials(user, params, session)
credentials = GridCredentials.find_by_user_id(user.id)
if credentials
credentials.login = params[:username]
credentials.password = params[:password]
credentials.host = params[:host]
else
credentials = GridCredentials.new({ 'user_id' => user.id, 'host' => params[:host], 'login' => params[:username] })
credentials.password = params[:password]
end
if params[:save_settings] == 'false'
session[:tmp_plgrid_credentials] = true
else
session.delete(:tmp_plgrid_credentials)
end
credentials.save
'ok'
end
def clean_tmp_credentials(user_id, session)
if session.include?(:tmp_plgrid_credentials)
GridCredentials.find_by_user_id(user_id).destroy
end
end
def create_scheduler_facade(type)
if type == 'qsub'
PBSFacade.new
elsif type == 'glite'
GliteFacade.new
end
end
def default_additional_params
{ 'scheduler' => 'qsub', 'time_limit' => 300 }
end
def retrieve_grants(credentials)
return [] if credentials.nil?
grants, grant_output = [], []
begin
Net::SSH.start(credentials.host, credentials.login, password: credentials.password) do |ssh|
grant_output = ssh.exec!('plg-show-grants').split("\n").select{|line| line.start_with?('|')}
end
grant_output.each do |line|
grant_id = line.split('|')[1].strip
grants << grant_id.split('(*)').first.strip unless grant_id.include?('GrantID')
end
rescue Exception => e
Rails.logger.error("Could not read user's grants - #{e}")
end
grants
end
end
Reverting grid_facade testing code changes
require 'securerandom'
require 'fileutils'
require 'net/ssh'
require 'net/scp'
require 'grid_schedulers/glite_facade'
require 'grid_schedulers/pbs_facade'
require_relative 'infrastructure_facade'
class PLGridFacade < InfrastructureFacade
def initialize
@ui_grid_host = 'ui.grid.cyfronet.pl'
end
def current_state(user)
jobs = PlGridJob.find_all_by_user_id(user.id)
jobs_count = if jobs.nil?
0
else
jobs.size
end
"Currently #{jobs_count} jobs are scheduled or running."
end
# for each job check
# 1. if the experiment is still running - destroy the job otherwise
# 2. if the job is started correctly and is not stuck in a queue - restart if yes
# 3. if the job is running more then 24 hours - restart if yes
def start_monitoring
while true do
lock = MongoLock.new('PlGridJob')
sleep(1) until lock.acquire
begin
Rails.logger.info("[plgrid] #{Time.now} - monitoring thread is working")
# group jobs by the user_id - for each group - login to the ui using the user credentials
PlGridJob.all.group_by(&:user_id).each do |user_id, job_list|
credentials = GridCredentials.find_by_user_id(user_id)
Net::SSH.start(credentials.host, credentials.login, password: credentials.password) do |ssh|
job_list.each do |job|
scheduler = create_scheduler_facade(job.scheduler_type)
ssh.exec!('voms-proxy-init --voms vo.plgrid.pl') if job.scheduler_type == 'glite' # generate new proxy if glite
experiment = Experiment.find_by_id(job.experiment_id)
all, sent, done = experiment.get_statistics unless experiment.nil?
Rails.logger.info("Experiment: #{job.experiment_id} --- nil?: #{experiment.nil?}")
if experiment.nil? or (not experiment.is_running) or (experiment.experiment_size == done)
Rails.logger.info("Experiment '#{job.experiment_id}' is no longer running => destroy the job and temp password")
destroy_and_clean_after(job, scheduler, ssh)
# if the job is not running although it should (create_at + 10.minutes > Time.now) - restart = cancel + start
elsif scheduler.is_job_queued(ssh, job) and (job.created_at + 10.minutes < Time.now)
Rails.logger.info("#{Time.now} - the job will be restarted due to not been run")
scheduler.restart(ssh, job)
elsif job.created_at + 24.hours < Time.now
# if the job is running more than 24 h then restart
Rails.logger.info("#{Time.now} - the job will be restarted due to being run for 24 hours")
scheduler.restart(ssh, job)
elsif scheduler.is_done(ssh, job) or (job.created_at + job.time_limit.minutes < Time.now)
Rails.logger.info("#{Time.now} - the job is done or should be already done - so we will destroy it")
scheduler.cancel(ssh, job)
destroy_and_clean_after(job, scheduler, ssh)
end
end
end
end
rescue Exception => e
Rails.logger.error("[plgrid] An exception occured in the monitoring thread --- #{e}")
end
lock.release
sleep(60)
end
end
def destroy_and_clean_after(job, scheduler, ssh)
Rails.logger.info("Destroying temp pass for #{job.sm_uuid}")
temp_pass = SimulationManagerTempPassword.find_by_sm_uuid(job.sm_uuid)
Rails.logger.info("It is nil ? --- #{temp_pass.nil?}")
temp_pass.destroy unless temp_pass.nil? || temp_pass.longlife
job.destroy
scheduler.clean_after_job(ssh, job)
end
def start_simulation_managers(user, instances_count, experiment_id, additional_params = {})
sm_uuid = SecureRandom.uuid
scheduler = create_scheduler_facade(additional_params['scheduler'])
# prepare locally code of a simulation manager to upload with a configuration file
prepare_configuration_for_simulation_manager(sm_uuid, user.id, experiment_id, additional_params['start_at'])
if credentials = GridCredentials.find_by_user_id(user.id)
# prepare job executable and descriptor
scheduler.prepare_job_files(sm_uuid)
# upload the code to the Grid user interface machine
begin
Net::SCP.start(credentials.host, credentials.login, password: credentials.password) do |scp|
scheduler.send_job_files(sm_uuid, scp)
end
Net::SSH.start(credentials.host, credentials.login, password: credentials.password) do |ssh|
1.upto(instances_count).each do
# retrieve job id and store it in the database for future usage
job = PlGridJob.new({ 'user_id' => user.id, 'experiment_id' => experiment_id, 'created_at' => Time.now,
'scheduler_type' => additional_params['scheduler'], 'sm_uuid' => sm_uuid,
'time_limit' => additional_params['time_limit'].to_i })
job.grant_id = additional_params['grant_id'] unless additional_params['grant_id'].blank?
if scheduler.submit_job(ssh, job)
job.save
else
return 'error', 'Could not submit job'
end
end
end
rescue Net::SSH::AuthenticationFailed => auth_exception
return 'error', I18n.t('plgrid.job_submission.authentication_failed', ex: auth_exception)
rescue Exception => ex
return 'error', I18n.t('plgrid.job_submission.error', ex: ex)
end
return 'ok', I18n.t('plgrid.job_submission.ok', instances_count: instances_count)
else
return 'error', I18n.t('plgrid.job_submission.no_credentials')
end
end
def stop_simulation_managers(user, instances_count, experiment = nil)
raise 'not implemented'
end
def get_running_simulation_managers(user, experiment = nil)
PlGridJob.find_all_by_user_id(user.id)
end
def add_credentials(user, params, session)
credentials = GridCredentials.find_by_user_id(user.id)
if credentials
credentials.login = params[:username]
credentials.password = params[:password]
credentials.host = params[:host]
else
credentials = GridCredentials.new({ 'user_id' => user.id, 'host' => params[:host], 'login' => params[:username] })
credentials.password = params[:password]
end
if params[:save_settings] == 'false'
session[:tmp_plgrid_credentials] = true
else
session.delete(:tmp_plgrid_credentials)
end
credentials.save
'ok'
end
def clean_tmp_credentials(user_id, session)
if session.include?(:tmp_plgrid_credentials)
GridCredentials.find_by_user_id(user_id).destroy
end
end
def create_scheduler_facade(type)
if type == 'qsub'
PBSFacade.new
elsif type == 'glite'
GliteFacade.new
end
end
def default_additional_params
{ 'scheduler' => 'qsub', 'time_limit' => 300 }
end
def retrieve_grants(credentials)
return [] if credentials.nil?
grants, grant_output = [], []
begin
Net::SSH.start(credentials.host, credentials.login, password: credentials.password) do |ssh|
grant_output = ssh.exec!('plg-show-grants').split("\n").select{|line| line.start_with?('|')}
end
grant_output.each do |line|
grant_id = line.split('|')[1].strip
grants << grant_id.split('(*)').first.strip unless grant_id.include?('GrantID')
end
rescue Exception => e
Rails.logger.error("Could not read user's grants - #{e}")
end
grants
end
end |
#
# trac.rb
#
# Plugin to rbot (http://ruby-rbot.org/), an irc bot, to provide
# services related to MacPorts trac systemfor the #macports channel
# on freenode.net, created from PortPlugin by James D. Berry
#
# By Andrea D'Amore
#
# $Id: $
require 'stringio'
class TracPlugin < Plugin
def help(plugin, topic="")
case topic
when "ticket"
return "ticket <ticket no.> => show http link for ticket # <ticket no.>"
else
return "trac commands: ticket"
end
end
def ticket(m, params)
number = params[:number][/^#?(\d*)$/,1]
if ( number )
url = "http://trac.macports.org/ticket/"+number
m.reply "#{url}"
else
m.reply "Use either #1234 or 1234 for ticket number"
end
end
end
plugin = TracPlugin.new
plugin.map 'ticket :number', :action => 'ticket'
More little steps for mpbot.
git-svn-id: 620571fa9b4bd0cbce9a0cf901e91ef896adbf27@50330 d073be05-634f-4543-b044-5fe20cf6d1d6
#
# trac.rb
#
# Plugin to rbot (http://ruby-rbot.org/), an irc bot, to provide
# services related to MacPorts trac systemfor the #macports channel
# on freenode.net, created from PortPlugin by James D. Berry
#
# By Andrea D'Amore
#
# $Id: $
require 'stringio'
class TracPlugin < Plugin
def help(plugin, topic="")
case topic
when "ticket"
return "ticket <ticket no.> => show http link for ticket # <ticket no.>"
when "faq"
return "faq [help] => show FAQs' URL or help"
when "guide"
return "guide [help] => show The Guide's URL or help. Don't Panic."
else
return "trac module provides: !ticket, !faq, !guide"
end
end
def ticket(m, params)
number = params[:number][/^#?(\d*)$/,1]
if ( number )
url = "http://trac.macports.org/ticket/"+number
m.reply "#{url}"
else
m.reply "Use either #1234 or 1234 for ticket number"
end
end
def faq(m, params)
if ( params[:parm] )
m.reply "Just type !faq for now"
else
m.reply "FAQs are at: http://trac.macports.org/wiki/FAQ"
end
end
def guide(m, params)
if ( params[:parm] == "chunked" )
m.reply "http://guide.macports.org/chunked/index.html"
elsif ( params[:parm] != "" )
m.reply "Just type !faq for now"
else
m.reply "FAQs are at: http://trac.macports.org/wiki/FAQ"
end
end
end
plugin = TracPlugin.new
plugin.map 'ticket :number', :action => 'ticket'
plugin.map 'faq :parm', :action => 'faq'
plugin.map 'guide :parm', :action => 'guide' |
require 'rubygems'
require 'bundler/setup'
require 'plist'
require_relative 'paths.rb'
# adapt this shell script in ruby
# http://www.merowing.info/2013/03/overlaying-application-version-on-top-of-your-icon/
def updateIcon (xcode_settings , deploy)
icons = [["icone_base.png" , "icone.png"] , ["icone_base@2x.png" , "icone@2x.png"]]
icons.each do |files|
source_file = "#{xcode_settings[:projectDirectory]}/#{files[0]}"
dest_file = "#{xcode_settings[:projectDirectory]}/#{files[1]}"
addInfosToIcon xcode_settings , deploy , source_file , dest_file
end
end
def addInfosToIcon (xcode_settings , deploy , source_file , dest_file)
projectInfosPath = xcode_settings[:projectInfosPath]
projectInfos = Plist::parse_xml(projectInfosPath)
version = projectInfos["CFBundleVersion"]
commit = `git rev-parse --short HEAD`.strip
branch = `git rev-parse --abbrev-ref HEAD`.strip
pjServerConf = fileNameForEnv deploy["PJServerConf"]
width = `identify -format %w #{source_file}`
command = "convert -background '#0008'"
command += " -fill white -gravity center"
command += " -size #{width}x40"
command += " caption:\"#{version} #{pjServerConf} #{commit}\" \"#{source_file}\""
command += " +swap -gravity south -composite \"#{dest_file}\""
system(command)
end
Parametrize the customization of the app's icon
require 'rubygems'
require 'bundler/setup'
require 'plist'
require_relative 'paths.rb'
# adapt this shell script in ruby
# http://www.merowing.info/2013/03/overlaying-application-version-on-top-of-your-icon/
def updateIcon (xcode_settings , deploy)
icons = [["icone_base.png" , "icone.png"] , ["icone_base@2x.png" , "icone@2x.png"]]
should_update_icon = deploy["icon"]["addExtraInfosInIcon"]
if !should_update_icon
return
end
icons.each do |files|
source_file = "#{xcode_settings[:projectDirectory]}/#{files[0]}"
dest_file = "#{xcode_settings[:projectDirectory]}/#{files[1]}"
addInfosToIcon xcode_settings , deploy , source_file , dest_file
end
end
def addInfosToIcon (xcode_settings , deploy , source_file , dest_file)
projectInfosPath = xcode_settings[:projectInfosPath]
projectInfos = Plist::parse_xml(projectInfosPath)
version = projectInfos["CFBundleVersion"]
commit = `git rev-parse --short HEAD`.strip
branch = `git rev-parse --abbrev-ref HEAD`.strip
pjServerConf = fileNameForEnv deploy["PJServerConf"]
width = `identify -format %w #{source_file}`
caption = iconCaptionForDeploy deploy
command = "convert -background '#0008'"
command += " -fill white -gravity center"
command += " -size #{width}x40"
command += " caption:\"#{caption}\" \"#{source_file}\""
command += " +swap -gravity south -composite \"#{dest_file}\""
system(command)
end
def iconCaptionForDeploy(deploy)
caption = ""
if !deploy["icon"]["addBuildNumber"]
caption += "#{version}"
end
if !deploy["icon"]["addCIMobEnv"]
caption += "#{pjServerConf}"
end
if !deploy["icon"]["addCommitId"]
caption += "#{commit}"
end
caption
end
|
module ProMotion
module TableViewCellModule
include Styling
attr_accessor :data_cell, :table_screen
def setup(data_cell, screen)
self.table_screen = WeakRef.new(screen)
self.data_cell = data_cell
check_deprecated_styles
set_styles
set_title
set_subtitle
set_image
set_remote_image
set_accessory_view
set_selection_style
end
protected
# TODO: Remove this in ProMotion 2.1. Just for migration purposes.
def check_deprecated_styles
whitelist = [ :title, :subtitle, :image, :remote_image, :accessory, :selection_style, :action, :arguments, :cell_style, :cell_class, :cell_identifier, :editing_style, :search_text, :keep_selection, :height ]
if (data_cell.keys - whitelist).length > 0
PM.logger.deprecated("In #{self.table_screen.class.to_s}#table_data, you should set :#{(data_cell.keys - whitelist).join(", :")} in a `styles:` hash. See TableScreen documentation.")
end
end
def set_styles
set_attributes self, data_cell[:style] if data_cell[:style]
end
def set_title
set_attributed_text(self.textLabel, data_cell[:title])
end
def set_subtitle
return unless data_cell[:subtitle] && self.detailTextLabel
set_attributed_text(self.detailTextLabel, data_cell[:subtitle])
self.detailTextLabel.backgroundColor = UIColor.clearColor
self.detailTextLabel.autoresizingMask = UIViewAutoresizingFlexibleWidth
end
def set_remote_image
return unless data_cell[:remote_image] && jm_image_cache?
self.imageView.image = remote_placeholder
JMImageCache.sharedCache.imageForURL(data_cell[:remote_image][:url].to_url, completionBlock:proc { |downloaded_image|
self.imageView.image = downloaded_image
self.setNeedsLayout
})
self.imageView.layer.masksToBounds = true
self.imageView.layer.cornerRadius = data_cell[:remote_image][:radius] if data_cell[:remote_image][:radius]
self.imageView.contentMode = map_content_mode_symbol(data_cell[:remote_image][:content_mode]) if data_cell[:remote_image][:content_mode]
end
def set_image
return unless data_cell[:image]
cell_image = data_cell[:image].is_a?(Hash) ? data_cell[:image][:image] : data_cell[:image]
cell_image = UIImage.imageNamed(cell_image) if cell_image.is_a?(String)
self.imageView.layer.masksToBounds = true
self.imageView.image = cell_image
self.imageView.layer.cornerRadius = data_cell[:image][:radius] if data_cell[:image].is_a?(Hash) && data_cell[:image][:radius]
end
def set_accessory_view
return self.accessoryView = nil unless data_cell[:accessory] && data_cell[:accessory][:view]
if data_cell[:accessory][:view] == :switch
self.accessoryView = switch_view
else
self.accessoryView = data_cell[:accessory][:view]
self.accessoryView.autoresizingMask = UIViewAutoresizingFlexibleWidth
end
end
def set_selection_style
self.selectionStyle = map_selection_style_symbol(data_cell[:selection_style]) if data_cell[:selection_style]
end
private
def jm_image_cache?
return true if self.imageView.respond_to?("setImageWithURL:placeholder:")
PM.logger.error "ProMotion Warning: to use remote_image with TableScreen you need to include the CocoaPod 'JMImageCache'."
false
end
def remote_placeholder
UIImage.imageNamed(data_cell[:remote_image][:placeholder]) if data_cell[:remote_image][:placeholder].is_a?(String)
end
def switch_view
switch = UISwitch.alloc.initWithFrame(CGRectZero)
switch.setAccessibilityLabel(data_cell[:accessory][:accessibility_label] || data_cell[:title])
switch.addTarget(self.table_screen, action: "accessory_toggled_switch:", forControlEvents:UIControlEventValueChanged)
switch.on = !!data_cell[:accessory][:value]
switch
end
def set_attributed_text(label, text)
text.is_a?(NSAttributedString) ? label.attributedText = text : label.text = text
end
def map_content_mode_symbol(symbol)
{
scale_to_fill: UIViewContentModeScaleToFill,
scale_aspect_fit: UIViewContentModeScaleAspectFit,
scale_aspect_fill: UIViewContentModeScaleAspectFill,
mode_redraw: UIViewContentModeRedraw
}[symbol] || symbol
end
def map_selection_style_symbol(symbol)
{
none: UITableViewCellSelectionStyleNone,
blue: UITableViewCellSelectionStyleBlue,
gray: UITableViewCellSelectionStyleGray,
default: UITableViewCellSelectionStyleDefault
}[symbol] || symbol
end
end
end
Adds ability to set the cell's accessoryType attribute.
module ProMotion
module TableViewCellModule
include Styling
attr_accessor :data_cell, :table_screen
def setup(data_cell, screen)
self.table_screen = WeakRef.new(screen)
self.data_cell = data_cell
check_deprecated_styles
set_styles
set_title
set_subtitle
set_image
set_remote_image
set_accessory_view
set_selection_style
set_accessory_type
end
protected
# TODO: Remove this in ProMotion 2.1. Just for migration purposes.
def check_deprecated_styles
whitelist = [ :title, :subtitle, :image, :remote_image, :accessory, :selection_style, :action, :arguments, :cell_style, :cell_class, :cell_identifier, :editing_style, :search_text, :keep_selection, :height, :accessory_type ]
if (data_cell.keys - whitelist).length > 0
PM.logger.deprecated("In #{self.table_screen.class.to_s}#table_data, you should set :#{(data_cell.keys - whitelist).join(", :")} in a `styles:` hash. See TableScreen documentation.")
end
end
def set_styles
set_attributes self, data_cell[:style] if data_cell[:style]
end
def set_title
set_attributed_text(self.textLabel, data_cell[:title])
end
def set_subtitle
return unless data_cell[:subtitle] && self.detailTextLabel
set_attributed_text(self.detailTextLabel, data_cell[:subtitle])
self.detailTextLabel.backgroundColor = UIColor.clearColor
self.detailTextLabel.autoresizingMask = UIViewAutoresizingFlexibleWidth
end
def set_remote_image
return unless data_cell[:remote_image] && jm_image_cache?
self.imageView.image = remote_placeholder
JMImageCache.sharedCache.imageForURL(data_cell[:remote_image][:url].to_url, completionBlock:proc { |downloaded_image|
self.imageView.image = downloaded_image
self.setNeedsLayout
})
self.imageView.layer.masksToBounds = true
self.imageView.layer.cornerRadius = data_cell[:remote_image][:radius] if data_cell[:remote_image][:radius]
self.imageView.contentMode = map_content_mode_symbol(data_cell[:remote_image][:content_mode]) if data_cell[:remote_image][:content_mode]
end
def set_image
return unless data_cell[:image]
cell_image = data_cell[:image].is_a?(Hash) ? data_cell[:image][:image] : data_cell[:image]
cell_image = UIImage.imageNamed(cell_image) if cell_image.is_a?(String)
self.imageView.layer.masksToBounds = true
self.imageView.image = cell_image
self.imageView.layer.cornerRadius = data_cell[:image][:radius] if data_cell[:image].is_a?(Hash) && data_cell[:image][:radius]
end
def set_accessory_view
return self.accessoryView = nil unless data_cell[:accessory] && data_cell[:accessory][:view]
if data_cell[:accessory][:view] == :switch
self.accessoryView = switch_view
else
self.accessoryView = data_cell[:accessory][:view]
self.accessoryView.autoresizingMask = UIViewAutoresizingFlexibleWidth
end
end
def set_selection_style
self.selectionStyle = map_selection_style_symbol(data_cell[:selection_style]) if data_cell[:selection_style]
end
def set_accessory_type
self.accessoryType = map_accessory_type_symbol(data_cell[:accessory_type]) if data_cell[:accessory_type]
end
private
def jm_image_cache?
return true if self.imageView.respond_to?("setImageWithURL:placeholder:")
PM.logger.error "ProMotion Warning: to use remote_image with TableScreen you need to include the CocoaPod 'JMImageCache'."
false
end
def remote_placeholder
UIImage.imageNamed(data_cell[:remote_image][:placeholder]) if data_cell[:remote_image][:placeholder].is_a?(String)
end
def switch_view
switch = UISwitch.alloc.initWithFrame(CGRectZero)
switch.setAccessibilityLabel(data_cell[:accessory][:accessibility_label] || data_cell[:title])
switch.addTarget(self.table_screen, action: "accessory_toggled_switch:", forControlEvents:UIControlEventValueChanged)
switch.on = !!data_cell[:accessory][:value]
switch
end
def set_attributed_text(label, text)
text.is_a?(NSAttributedString) ? label.attributedText = text : label.text = text
end
def map_content_mode_symbol(symbol)
{
scale_to_fill: UIViewContentModeScaleToFill,
scale_aspect_fit: UIViewContentModeScaleAspectFit,
scale_aspect_fill: UIViewContentModeScaleAspectFill,
mode_redraw: UIViewContentModeRedraw
}[symbol] || symbol
end
def map_selection_style_symbol(symbol)
{
none: UITableViewCellSelectionStyleNone,
blue: UITableViewCellSelectionStyleBlue,
gray: UITableViewCellSelectionStyleGray,
default: UITableViewCellSelectionStyleDefault
}[symbol] || symbol
end
def map_accessory_type_symbol(symbol)
{
none: UITableViewCellAccessoryNone,
disclosure_indicator: UITableViewCellAccessoryDisclosureIndicator,
disclosure_button: UITableViewCellAccessoryDetailDisclosureButton,
checkmark: UITableViewCellAccessoryCheckmark,
detail_button: UITableViewCellAccessoryDetailButton
}[symbol] || symbol
end
end
end
|
module Hieracles
module Registry
extend self
def farms(config)
Dir.glob(format(config.classpath, '*')).sort
end
def nodes(config)
Dir.glob(File.join(config.encpath, '*.yaml')).sort.map do |f|
File.basename(f, '.yaml')
end
end
def modules(config)
Dir.glob(File.join(config.modulepath, '*')).sort.map do |f|
File.basename(f)
end
end
end
end
fix registry farms
module Hieracles
module Registry
extend self
def farms(config)
Dir.glob(format(config.classpath, '*')).sort.map do |f|
sub = Regexp.new(".*#{config.classpath.sub(/%s/,'([^/]*)')}")
f.sub(sub, "\\1")
end
end
def nodes(config)
Dir.glob(File.join(config.encpath, '*.yaml')).sort.map do |f|
File.basename(f, '.yaml')
end
end
def modules(config)
Dir.glob(File.join(config.modulepath, '*')).sort.map do |f|
File.basename(f)
end
end
end
end
|
module Idobata::Hook
class Backlog < Base
screen_name 'backlog'
icon_url hook_image_url('icon.png')
template_name { "#{type}.html.haml" }
helper Helper
private
def type
case payload.type
when 1, 2, 3, 4, 14, 17
'issue'
end
end
def space_id
params[:space_id] || nil
end
end
end
add before render
module Idobata::Hook
class Backlog < Base
screen_name 'backlog'
icon_url hook_image_url('icon.png')
template_name { "#{type}.html.haml" }
helper Helper
before_render do
skip_processing! unless type
end
private
def type
case payload.type
when 1, 2, 3, 4, 14, 17
'issue'
else
nil
end
end
def space_id
params[:space_id] || nil
end
end
end
|
# frozen_string_literal: true
require 'cgi'
require 'how_is/report/base_report'
class HowIs
class HtmlReport < BaseReport
def format
:html
end
def title(_text)
@title = _text
@r += "\n<h1>#{_text}</h1>\n"
end
def header(_text)
@r += "\n<h2>#{_text}</h2>\n"
end
def link(_text, url)
%Q[<a href="#{url}">#{_text}</a>]
end
def text(_text)
@r += "<p>#{_text}</p>\n"
end
def unordered_list(arr)
@r += "\n<ul>\n"
arr.each do |item|
@r += " <li>#{item}</li>\n"
end
@r += "</ul>\n\n"
end
def horizontal_bar_graph(data)
if data.length == 1 && data[0][0] == "(No label)"
text "There are no open issues to graph."
return
end
biggest = data.map { |x| x[1] }.max
get_percentage = ->(number_of_issues) { number_of_issues * 100 / biggest }
longest_label_length = data.map(&:first).map(&:length).max
label_width = "#{longest_label_length}ch"
@r += "<table class=\"horizontal-bar-graph\">\n"
data.each do |row|
percentage = get_percentage.(row[1])
if row[2]
label_text = link(row[0], row[2])
else
label_text = row[0]
end
@r += <<-EOF
<tr>
<td style="width: #{label_width}">#{label_text}</td>
<td><span class="fill" style="width: #{percentage}%">#{row[1]}</span></td>
</tr>
EOF
end
@r += "</table>\n"
end
def export
@r = ''
generate_report_text!
end
def export_file(file)
report = export
File.open(file, 'w') do |f|
f.puts <<-EOF
<!DOCTYPE html>
<html>
<head>
<title>#{@title}</title>
<style>
body { font: sans-serif; }
main {
max-width: 600px;
max-width: 72ch;
margin: auto;
}
.horizontal-bar-graph {
position: relative;
width: 100%;
}
.horizontal-bar-graph .fill {
display: inline-block;
background: #CCC;
}
</style>
</head>
<body>
<main>
#{report}
</main>
</body>
</html>
EOF
end
end
end
end
[how_is/report/html] SQUIGGLE HEREDOCS FUCK YEAH
# frozen_string_literal: true
require 'cgi'
require 'how_is/report/base_report'
class HowIs
class HtmlReport < BaseReport
def format
:html
end
def title(_text)
@title = _text
@r += "\n<h1>#{_text}</h1>\n"
end
def header(_text)
@r += "\n<h2>#{_text}</h2>\n"
end
def link(_text, url)
%Q[<a href="#{url}">#{_text}</a>]
end
def text(_text)
@r += "<p>#{_text}</p>\n"
end
def unordered_list(arr)
@r += "\n<ul>\n"
arr.each do |item|
@r += " <li>#{item}</li>\n"
end
@r += "</ul>\n\n"
end
def horizontal_bar_graph(data)
if data.length == 1 && data[0][0] == "(No label)"
text "There are no open issues to graph."
return
end
biggest = data.map { |x| x[1] }.max
get_percentage = ->(number_of_issues) { number_of_issues * 100 / biggest }
longest_label_length = data.map(&:first).map(&:length).max
label_width = "#{longest_label_length}ch"
@r += "<table class=\"horizontal-bar-graph\">\n"
data.each do |row|
percentage = get_percentage.(row[1])
if row[2]
label_text = link(row[0], row[2])
else
label_text = row[0]
end
@r += <<-EOF
<tr>
<td style="width: #{label_width}">#{label_text}</td>
<td><span class="fill" style="width: #{percentage}%">#{row[1]}</span></td>
</tr>
EOF
end
@r += "</table>\n"
end
def export
@r = ''
generate_report_text!
end
def export_file(file)
report = export
File.open(file, 'w') do |f|
f.puts <<~EOF
<!DOCTYPE html>
<html>
<head>
<title>#{@title}</title>
<style>
body { font: sans-serif; }
main {
max-width: 600px;
max-width: 72ch;
margin: auto;
}
.horizontal-bar-graph {
position: relative;
width: 100%;
}
.horizontal-bar-graph .fill {
display: inline-block;
background: #CCC;
}
</style>
</head>
<body>
<main>
#{report}
</main>
</body>
</html>
EOF
end
end
end
end
|
module InboxSync
VERSION = "0.2.1"
end
version to 0.3.0
* filter DSL
module InboxSync
VERSION = "0.3.0"
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{preformatter}
s.version = "0.2.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Adrian Cuadros", "Eduardo Moreno", "Andres Paglayan"]
s.date = %q{2010-05-14}
s.description = %q{The gem helps you by deleting spaces, special characters or replacing accents in the fields you need.}
s.email = %q{adrian@innku.com}
s.extra_rdoc_files = [
"LICENSE",
"README.textile"
]
s.files = [
".document",
".gitignore",
"LICENSE",
"README.textile",
"Rakefile",
"VERSION",
"lib/preformatter.rb",
"preformatter.gemspec",
"test/helper.rb",
"test/test_preformatter.rb"
]
s.homepage = %q{http://github.com/adriancuadros/preformatter}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.6}
s.summary = %q{Does basic preformatting of information before creating a record}
s.test_files = [
"test/helper.rb",
"test/test_preformatter.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<thoughtbot-shoulda>, [">= 0"])
else
s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
end
else
s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
end
end
Regenerated gemspec for version 0.3.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{preformatter}
s.version = "0.3.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Adrian Cuadros", "Eduardo Moreno", "Andres Paglayan"]
s.date = %q{2010-05-18}
s.description = %q{The gem helps you by deleting spaces, special characters or replacing accents in the fields you need.}
s.email = %q{adrian@innku.com}
s.extra_rdoc_files = [
"LICENSE",
"README.textile"
]
s.files = [
".document",
".gitignore",
"LICENSE",
"README.textile",
"Rakefile",
"VERSION",
"lib/preformatter.rb",
"preformatter.gemspec",
"test/helper.rb",
"test/test_preformatter.rb"
]
s.homepage = %q{http://github.com/adriancuadros/preformatter}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.6}
s.summary = %q{Does basic preformatting of information before creating a record}
s.test_files = [
"test/helper.rb",
"test/test_preformatter.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<thoughtbot-shoulda>, [">= 0"])
else
s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
end
else
s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
end
end
|
module ActiveMerchant #:nodoc:
module Billing #:nodoc:
# Convenience methods that can be included into a custom Credit Card object, such as an ActiveRecord based Credit Card object.
module CreditCardMethods
CARD_COMPANY_DETECTORS = {
'visa' => ->(num) { num =~ /^4\d{12}(\d{3})?(\d{3})?$/ },
'master' => ->(num) { num&.size == 16 && in_bin_range?(num.slice(0, 6), MASTERCARD_RANGES) },
#'master' => ->(num) { num =~ /^(5[1-5]\d{4}|677189|222[1-9]\d{2}|22[3-9]\d{3}|2[3-6]\d{4}|27[01]\d{3}|2720\d{2})\d{10}$/ },
'elo' => ->(num) { num&.size == 16 && in_bin_range?(num.slice(0, 6), ELO_RANGES) },
'alelo' => ->(num) { num&.size == 16 && in_bin_range?(num.slice(0, 6), ALELO_RANGES) },
'discover' => ->(num) { num =~ /^(6011|65\d{2}|64[4-9]\d)\d{12,15}|(62\d{14,17})$/ },
'american_express' => ->(num) { num =~ /^3[47]\d{13}$/ },
'naranja' => ->(num) { num&.size == 16 && in_bin_range?(num.slice(0, 6), NARANJA_RANGES) },
'diners_club' => ->(num) { num =~ /^3(0[0-5]|[68]\d)\d{11}$/ },
'jcb' => ->(num) { num =~ /^35(28|29|[3-8]\d)\d{12}$/ },
'dankort' => ->(num) { num =~ /^5019\d{12}$/ },
'maestro' => ->(num) { (12..19).cover?(num&.size) && in_bin_range?(num.slice(0, 6), MAESTRO_RANGES) },
#'maestro' => ->(num) { num =~ /^(5018|5020|5038|6304|6759|6761|6763)[0-9]{8,15}$/ },
'forbrugsforeningen' => ->(num) { num =~ /^600722\d{10}$/ },
'sodexo' => ->(num) { num =~ /^(606071|603389|606070|606069|606068|600818)\d{10}$/ },
'vr' => ->(num) { num =~ /^(627416|637036)\d{10}$/ },
'cabal' => ->(num) { num&.size == 16 && in_bin_range?(num.slice(0, 8), CABAL_RANGES) },
'unionpay' => ->(num) { (16..19).cover?(num&.size) && in_bin_range?(num.slice(0, 8), UNIONPAY_RANGES) },
'carnet' => lambda { |num|
num&.size == 16 && (
in_bin_range?(num.slice(0, 6), CARNET_RANGES) ||
CARNET_BINS.any? { |bin| num.slice(0, bin.size) == bin }
)
},
'laser' => ->(num) { num =~ /^(6304|6706|6709|6771(?!89))\d{8}(\d{4}|\d{6,7})?$/ },
#'sodexo' => ->(num) { num =~ /^(606071|603389|606070|606069|606068|600818)\d{8}$/ },
#'vr' => ->(num) { num =~ /^(627416|637036)\d{8}$/ },
'colt' => ->(num) { num =~ /^1\d{4,7}$/ },
'msa' => ->(num) { num =~ /^(?:(?!70888[5-9]\d{8}\d{5}|(7088)?81003[0-9]{5}\d{5}|(7088)?8100[0-1][0-9]{5}))(5\d{7}$|^700000\d{8}|^(7088)?8\d{14})$/ },
'avcard' => ->(num) { num =~ /(^601029|^A)(\d{7,9})$/ },
'epiccard' => ->(num) { num =~ /^(7088)?8100[0-1][0-9]{5}\d{5}|7824(61|70)\d{10}$/ },
'aircard' => ->(num) { num =~ /^789682\d{10}$/ },
'shellaviation' => ->(num) { num =~ /^700055\d{10}|7005591\d{9}|7055\d{12}|^(7088)?81003[0-9]{5}\d{5}$/ },
'shellretail' => ->(num) { num =~ /^7070\d{10}|7575\d{14}$/ },
'avfuelretail' => ->(num) { num =~ /^708407(70)\d{9}$/ },
'avfuelpro' => ->(num) { num =~ /^708407(80)\d{9}$/ },
'avfuelcf' => ->(num) { num =~ /^708407(90)\d{9}$/ },
'uvair' => ->(num) { num =~ /^708308\d{8}$/ },
'msa_voyager' => ->(num) { num =~ /^70888[5-9]\d{8}\d{5}$/ }
}
VALIDATES_LUHN = [
'visa',
'master',
'discover',
'american_express',
'diners_club',
'jcb',
'switch',
'solo',
'dankort',
'maestro',
'forbrugsforeningen',
'laser',
]
# http://www.barclaycard. ->(num) { o.uk/business/files/bin_rules.pdf
ELECTRON_RANGES = [
[400115],
(400837..400839),
(412921..412923),
[417935],
(419740..419741),
(419773..419775),
[424519],
(424962..424963),
[437860],
[444000],
[459472],
(484406..484411),
(484413..484414),
(484418..484418),
(484428..484455),
(491730..491759),
]
CARNET_RANGES = [
(506199..506499),
]
CARNET_BINS = Set.new(
[
'286900', '502275', '606333', '627535', '636318', '636379', '639388',
'639484', '639559', '50633601', '50633606', '58877274', '62753500',
'60462203', '60462204', '588772'
]
)
# https://www.mastercard.us/content/dam/mccom/global/documents/mastercard-rules.pdf, page 73
MASTERCARD_RANGES = [
(222100..272099),
(510000..559999),
]
# https://www.mastercard.us/content/dam/mccom/global/documents/mastercard-rules.pdf, page 73
MAESTRO_RANGES = [
(561200..561269),
(561271..561299),
(561320..561356),
(581700..581751),
(581753..581800),
(589998..591259),
(591261..596770),
(596772..598744),
(598746..599999),
(600297..600314),
(600316..600335),
(600337..600362),
(600364..600382),
(601232..601254),
(601256..601276),
(601640..601652),
(601689..601700),
(602011..602050),
(639000..639099),
(670000..679999),
]
# https://dev.elo.com.br/apis/tabela-de-bins, download csv from left sidebar
ELO_RANGES = [
506707..506708, 506715..506715, 506718..506722, 506724..506724, 506726..506736, 506739..506739, 506741..506743,
506745..506747, 506753..506753, 506774..506776, 506778..506778, 509000..509001, 509003..509003, 509007..509007,
509020..509022, 509035..509035, 509039..509042, 509045..509045, 509048..509048, 509051..509071, 509073..509074,
509077..509080, 509084..509084, 509091..509094, 509098..509098, 509100..509100, 509104..509104, 509106..509109,
627780..627780, 636368..636368, 650031..650033, 650035..650045, 650047..650047, 650406..650410, 650434..650436,
650439..650439, 650485..650504, 650506..650530, 650577..650580, 650582..650591, 650721..650727, 650901..650922,
650928..650928, 650938..650939, 650946..650948, 650954..650955, 650962..650963, 650967..650967, 650971..650971,
651652..651667, 651675..651678, 655000..655010, 655012..655015, 655051..655052, 655056..655057
]
# Alelo provides BIN ranges by e-mailing them out periodically.
# The BINs beginning with the digit 4 overlap with Visa's range of valid card numbers.
# By placing the 'alelo' entry in CARD_COMPANY_DETECTORS below the 'visa' entry, we
# identify these cards as Visa. This works because transactions with such cards will
# run on Visa rails.
ALELO_RANGES = [
402588..402588, 404347..404347, 405876..405876, 405882..405882, 405884..405884,
405886..405886, 430471..430471, 438061..438061, 438064..438064, 470063..470066,
496067..496067, 506699..506704, 506706..506706, 506713..506714, 506716..506716,
506749..506750, 506752..506752, 506754..506756, 506758..506762, 506764..506767,
506770..506771, 509015..509019, 509880..509882, 509884..509885, 509987..509988
]
CABAL_RANGES = [
60420100..60440099,
58965700..58965799,
60352200..60352299
]
NARANJA_RANGES = [
589562..589562
]
# In addition to the BIN ranges listed here that all begin with 81, UnionPay cards
# include many ranges that start with 62.
# Prior to adding UnionPay, cards that start with 62 were all classified as Discover.
# Because UnionPay cards are able to run on Discover rails, this was kept the same.
UNIONPAY_RANGES = [
81000000..81099999, 81100000..81319999, 81320000..81519999, 81520000..81639999, 81640000..81719999
]
def self.included(base)
base.extend(ClassMethods)
end
def self.in_bin_range?(number, ranges)
bin = number.to_i
ranges.any? do |range|
range.cover?(bin)
end
end
def valid_month?(month)
(1..12).cover?(month.to_i)
end
def credit_card?
true
end
def valid_expiry_year?(year)
(Time.now.year..Time.now.year + 20).cover?(year.to_i)
end
def valid_start_year?(year)
((year.to_s =~ /^\d{4}$/) && (year.to_i > 1987))
end
# Credit card providers have 3 digit verification values
# This isn't standardised, these are called various names such as
# CVC, CVV, CID, CSC and more
# See: http://en.wikipedia.org/wiki/Card_security_code
# American Express is the exception with 4 digits
#
# Below are links from the card providers with their requirements
# visa: http://usa.visa.com/personal/security/3-digit-security-code.jsp
# master: http://www.mastercard.com/ca/merchant/en/getstarted/Anatomy_MasterCard.html
# jcb: http://www.jcbcard.com/security/info.html
# diners_club: http://www.dinersclub.com/assets/DinersClub_card_ID_features.pdf
# discover: https://www.discover.com/credit-cards/help-center/glossary.html
# american_express: https://online.americanexpress.com/myca/fuidfyp/us/action?request_type=un_fuid&Face=en_US
def valid_card_verification_value?(cvv, brand)
cvv.to_s =~ /^\d{#{card_verification_value_length(brand)}}$/
end
def card_verification_value_length(brand)
case brand
when 'american_express'
4
when 'maestro'
0
else
3
end
end
def valid_issue_number?(number)
(number.to_s =~ /^\d{1,2}$/)
end
# Returns if the card matches known Electron BINs
def electron?
self.class.electron?(number)
end
module ClassMethods
# Returns true if it validates. Optionally, you can pass a card brand as an argument and
# make sure it is of the correct brand.
#
# References:
# - http://perl.about.com/compute/perl/library/nosearch/P073000.htm
# - http://www.beachnet.com/~hstiles/cardtype.html
def valid_number?(number)
valid_test_mode_card_number?(number) ||
valid_card_number_length?(number) &&
valid_card_number_characters?(number) &&
valid_by_algorithm?(brand?(number), number)
end
def card_companies
CARD_COMPANY_DETECTORS.keys
end
# Returns a string containing the brand of card from the list of known information below.
def brand?(number)
return 'bogus' if valid_test_mode_card_number?(number)
CARD_COMPANY_DETECTORS.each do |company, func|
return company.dup if func.call(number)
end
return nil
end
def electron?(number)
return false unless [16, 19].include?(number&.length)
# don't recalculate for each range
bank_identification_number = first_digits(number).to_i
ELECTRON_RANGES.any? do |range|
range.include?(bank_identification_number)
end
end
def type?(number)
ActiveMerchant.deprecated 'CreditCard#type? is deprecated and will be removed from a future release of ActiveMerchant. Please use CreditCard#brand? instead.'
brand?(number)
end
def first_digits(number)
number&.slice(0, 6) || ''
end
def last_digits(number)
return '' if number.nil?
number.length <= 4 ? number : number.slice(-4..-1)
end
def mask(number)
"XXXX-XXXX-XXXX-#{last_digits(number)}"
end
# Checks to see if the calculated brand matches the specified brand
def matching_brand?(number, brand)
brand?(number) == brand
end
def matching_type?(number, brand)
ActiveMerchant.deprecated 'CreditCard#matching_type? is deprecated and will be removed from a future release of ActiveMerchant. Please use CreditCard#matching_brand? instead.'
matching_brand?(number, brand)
end
private
def valid_card_number_length?(number) #:nodoc:
return false if number.nil?
number.length >= 12
end
def valid_card_number_characters?(number) #:nodoc:
return false if number.nil?
!number.match(/\D/)
end
def valid_test_mode_card_number?(number) #:nodoc:
ActiveMerchant::Billing::Base.test? &&
%w[1 2 3 success failure error].include?(number)
end
def valid_by_algorithm?(brand, numbers) #:nodoc:
case brand
when 'naranja'
valid_naranja_algo?(numbers)
else
if VALIDATES_LUHN.include?(brand)
valid_luhn?(numbers)
else
true
end
end
end
ODD_LUHN_VALUE = {
48 => 0,
49 => 1,
50 => 2,
51 => 3,
52 => 4,
53 => 5,
54 => 6,
55 => 7,
56 => 8,
57 => 9,
nil => 0
}.freeze
EVEN_LUHN_VALUE = {
48 => 0, # 0 * 2
49 => 2, # 1 * 2
50 => 4, # 2 * 2
51 => 6, # 3 * 2
52 => 8, # 4 * 2
53 => 1, # 5 * 2 - 9
54 => 3, # 6 * 2 - 9
55 => 5, # etc ...
56 => 7,
57 => 9,
}.freeze
# Checks the validity of a card number by use of the Luhn Algorithm.
# Please see http://en.wikipedia.org/wiki/Luhn_algorithm for details.
# This implementation is from the luhn_checksum gem, https://github.com/zendesk/luhn_checksum.
def valid_luhn?(numbers) #:nodoc:
sum = 0
odd = true
numbers.reverse.bytes.each do |number|
if odd
odd = false
sum += ODD_LUHN_VALUE[number]
else
odd = true
sum += EVEN_LUHN_VALUE[number]
end
end
sum % 10 == 0
end
# Checks the validity of a card number by use of Naranja's specific algorithm.
def valid_naranja_algo?(numbers) #:nodoc:
num_array = numbers.to_s.chars.map(&:to_i)
multipliers = [4, 3, 2, 7, 6, 5, 4, 3, 2, 7, 6, 5, 4, 3, 2]
num_sum = num_array[0..14].zip(multipliers).map { |a, b| a*b }.reduce(:+)
intermediate = 11 - (num_sum % 11)
final_num = intermediate > 9 ? 0 : intermediate
final_num == num_array[15]
end
end
end
end
end
msa && avcard regexps
module ActiveMerchant #:nodoc:
module Billing #:nodoc:
# Convenience methods that can be included into a custom Credit Card object, such as an ActiveRecord based Credit Card object.
module CreditCardMethods
CARD_COMPANY_DETECTORS = {
'visa' => ->(num) { num =~ /^4\d{12}(\d{3})?(\d{3})?$/ },
'master' => ->(num) { num&.size == 16 && in_bin_range?(num.slice(0, 6), MASTERCARD_RANGES) },
#'master' => ->(num) { num =~ /^(5[1-5]\d{4}|677189|222[1-9]\d{2}|22[3-9]\d{3}|2[3-6]\d{4}|27[01]\d{3}|2720\d{2})\d{10}$/ },
'elo' => ->(num) { num&.size == 16 && in_bin_range?(num.slice(0, 6), ELO_RANGES) },
'alelo' => ->(num) { num&.size == 16 && in_bin_range?(num.slice(0, 6), ALELO_RANGES) },
'discover' => ->(num) { num =~ /^(6011|65\d{2}|64[4-9]\d)\d{12,15}|(62\d{14,17})$/ },
'american_express' => ->(num) { num =~ /^3[47]\d{13}$/ },
'naranja' => ->(num) { num&.size == 16 && in_bin_range?(num.slice(0, 6), NARANJA_RANGES) },
'diners_club' => ->(num) { num =~ /^3(0[0-5]|[68]\d)\d{11}$/ },
'jcb' => ->(num) { num =~ /^35(28|29|[3-8]\d)\d{12}$/ },
'dankort' => ->(num) { num =~ /^5019\d{12}$/ },
'maestro' => ->(num) { (12..19).cover?(num&.size) && in_bin_range?(num.slice(0, 6), MAESTRO_RANGES) },
#'maestro' => ->(num) { num =~ /^(5018|5020|5038|6304|6759|6761|6763)[0-9]{8,15}$/ },
'forbrugsforeningen' => ->(num) { num =~ /^600722\d{10}$/ },
'sodexo' => ->(num) { num =~ /^(606071|603389|606070|606069|606068|600818)\d{10}$/ },
'vr' => ->(num) { num =~ /^(627416|637036)\d{10}$/ },
'cabal' => ->(num) { num&.size == 16 && in_bin_range?(num.slice(0, 8), CABAL_RANGES) },
'unionpay' => ->(num) { (16..19).cover?(num&.size) && in_bin_range?(num.slice(0, 8), UNIONPAY_RANGES) },
'carnet' => lambda { |num|
num&.size == 16 && (
in_bin_range?(num.slice(0, 6), CARNET_RANGES) ||
CARNET_BINS.any? { |bin| num.slice(0, bin.size) == bin }
)
},
'laser' => ->(num) { num =~ /^(6304|6706|6709|6771(?!89))\d{8}(\d{4}|\d{6,7})?$/ },
#'sodexo' => ->(num) { num =~ /^(606071|603389|606070|606069|606068|600818)\d{8}$/ },
#'vr' => ->(num) { num =~ /^(627416|637036)\d{8}$/ },
'colt' => ->(num) { num =~ /^1\d{4,7}$/ },
'msa' => ->(num) { num =~ /^(?:(?!70888[5-9]\d{8}\d{5}|(7088)?81003[0-9]{5}\d{5}|(7088)?8100[0-1][0-9]{5}))([1-9]\d{7}$|^700000\d{8}|^(7088)?8\d{14})$/ },
'avcard' => ->(num) { num =~ /(^601029|^A)(\d{7,10})$/ },
'epiccard' => ->(num) { num =~ /^(7088)?8100[0-1][0-9]{5}\d{5}|7824(61|70)\d{10}$/ },
'aircard' => ->(num) { num =~ /^789682\d{10}$/ },
'shellaviation' => ->(num) { num =~ /^700055\d{10}|7005591\d{9}|7055\d{12}|^(7088)?81003[0-9]{5}\d{5}$/ },
'shellretail' => ->(num) { num =~ /^7070\d{10}|7575\d{14}$/ },
'avfuelretail' => ->(num) { num =~ /^708407(70)\d{9}$/ },
'avfuelpro' => ->(num) { num =~ /^708407(80)\d{9}$/ },
'avfuelcf' => ->(num) { num =~ /^708407(90)\d{9}$/ },
'uvair' => ->(num) { num =~ /^708308\d{8}$/ },
'msa_voyager' => ->(num) { num =~ /^70888[5-9]\d{8}\d{5}$/ }
}
VALIDATES_LUHN = [
'visa',
'master',
'discover',
'american_express',
'diners_club',
'jcb',
'switch',
'solo',
'dankort',
'maestro',
'forbrugsforeningen',
'laser',
]
# http://www.barclaycard. ->(num) { o.uk/business/files/bin_rules.pdf
ELECTRON_RANGES = [
[400115],
(400837..400839),
(412921..412923),
[417935],
(419740..419741),
(419773..419775),
[424519],
(424962..424963),
[437860],
[444000],
[459472],
(484406..484411),
(484413..484414),
(484418..484418),
(484428..484455),
(491730..491759),
]
CARNET_RANGES = [
(506199..506499),
]
CARNET_BINS = Set.new(
[
'286900', '502275', '606333', '627535', '636318', '636379', '639388',
'639484', '639559', '50633601', '50633606', '58877274', '62753500',
'60462203', '60462204', '588772'
]
)
# https://www.mastercard.us/content/dam/mccom/global/documents/mastercard-rules.pdf, page 73
MASTERCARD_RANGES = [
(222100..272099),
(510000..559999),
]
# https://www.mastercard.us/content/dam/mccom/global/documents/mastercard-rules.pdf, page 73
MAESTRO_RANGES = [
(561200..561269),
(561271..561299),
(561320..561356),
(581700..581751),
(581753..581800),
(589998..591259),
(591261..596770),
(596772..598744),
(598746..599999),
(600297..600314),
(600316..600335),
(600337..600362),
(600364..600382),
(601232..601254),
(601256..601276),
(601640..601652),
(601689..601700),
(602011..602050),
(639000..639099),
(670000..679999),
]
# https://dev.elo.com.br/apis/tabela-de-bins, download csv from left sidebar
ELO_RANGES = [
506707..506708, 506715..506715, 506718..506722, 506724..506724, 506726..506736, 506739..506739, 506741..506743,
506745..506747, 506753..506753, 506774..506776, 506778..506778, 509000..509001, 509003..509003, 509007..509007,
509020..509022, 509035..509035, 509039..509042, 509045..509045, 509048..509048, 509051..509071, 509073..509074,
509077..509080, 509084..509084, 509091..509094, 509098..509098, 509100..509100, 509104..509104, 509106..509109,
627780..627780, 636368..636368, 650031..650033, 650035..650045, 650047..650047, 650406..650410, 650434..650436,
650439..650439, 650485..650504, 650506..650530, 650577..650580, 650582..650591, 650721..650727, 650901..650922,
650928..650928, 650938..650939, 650946..650948, 650954..650955, 650962..650963, 650967..650967, 650971..650971,
651652..651667, 651675..651678, 655000..655010, 655012..655015, 655051..655052, 655056..655057
]
# Alelo provides BIN ranges by e-mailing them out periodically.
# The BINs beginning with the digit 4 overlap with Visa's range of valid card numbers.
# By placing the 'alelo' entry in CARD_COMPANY_DETECTORS below the 'visa' entry, we
# identify these cards as Visa. This works because transactions with such cards will
# run on Visa rails.
ALELO_RANGES = [
402588..402588, 404347..404347, 405876..405876, 405882..405882, 405884..405884,
405886..405886, 430471..430471, 438061..438061, 438064..438064, 470063..470066,
496067..496067, 506699..506704, 506706..506706, 506713..506714, 506716..506716,
506749..506750, 506752..506752, 506754..506756, 506758..506762, 506764..506767,
506770..506771, 509015..509019, 509880..509882, 509884..509885, 509987..509988
]
CABAL_RANGES = [
60420100..60440099,
58965700..58965799,
60352200..60352299
]
NARANJA_RANGES = [
589562..589562
]
# In addition to the BIN ranges listed here that all begin with 81, UnionPay cards
# include many ranges that start with 62.
# Prior to adding UnionPay, cards that start with 62 were all classified as Discover.
# Because UnionPay cards are able to run on Discover rails, this was kept the same.
UNIONPAY_RANGES = [
81000000..81099999, 81100000..81319999, 81320000..81519999, 81520000..81639999, 81640000..81719999
]
def self.included(base)
base.extend(ClassMethods)
end
def self.in_bin_range?(number, ranges)
bin = number.to_i
ranges.any? do |range|
range.cover?(bin)
end
end
def valid_month?(month)
(1..12).cover?(month.to_i)
end
def credit_card?
true
end
def valid_expiry_year?(year)
(Time.now.year..Time.now.year + 20).cover?(year.to_i)
end
def valid_start_year?(year)
((year.to_s =~ /^\d{4}$/) && (year.to_i > 1987))
end
# Credit card providers have 3 digit verification values
# This isn't standardised, these are called various names such as
# CVC, CVV, CID, CSC and more
# See: http://en.wikipedia.org/wiki/Card_security_code
# American Express is the exception with 4 digits
#
# Below are links from the card providers with their requirements
# visa: http://usa.visa.com/personal/security/3-digit-security-code.jsp
# master: http://www.mastercard.com/ca/merchant/en/getstarted/Anatomy_MasterCard.html
# jcb: http://www.jcbcard.com/security/info.html
# diners_club: http://www.dinersclub.com/assets/DinersClub_card_ID_features.pdf
# discover: https://www.discover.com/credit-cards/help-center/glossary.html
# american_express: https://online.americanexpress.com/myca/fuidfyp/us/action?request_type=un_fuid&Face=en_US
def valid_card_verification_value?(cvv, brand)
cvv.to_s =~ /^\d{#{card_verification_value_length(brand)}}$/
end
def card_verification_value_length(brand)
case brand
when 'american_express'
4
when 'maestro'
0
else
3
end
end
def valid_issue_number?(number)
(number.to_s =~ /^\d{1,2}$/)
end
# Returns if the card matches known Electron BINs
def electron?
self.class.electron?(number)
end
module ClassMethods
# Returns true if it validates. Optionally, you can pass a card brand as an argument and
# make sure it is of the correct brand.
#
# References:
# - http://perl.about.com/compute/perl/library/nosearch/P073000.htm
# - http://www.beachnet.com/~hstiles/cardtype.html
def valid_number?(number)
valid_test_mode_card_number?(number) ||
valid_card_number_length?(number) &&
valid_card_number_characters?(number) &&
valid_by_algorithm?(brand?(number), number)
end
def card_companies
CARD_COMPANY_DETECTORS.keys
end
# Returns a string containing the brand of card from the list of known information below.
def brand?(number)
return 'bogus' if valid_test_mode_card_number?(number)
CARD_COMPANY_DETECTORS.each do |company, func|
return company.dup if func.call(number)
end
return nil
end
def electron?(number)
return false unless [16, 19].include?(number&.length)
# don't recalculate for each range
bank_identification_number = first_digits(number).to_i
ELECTRON_RANGES.any? do |range|
range.include?(bank_identification_number)
end
end
def type?(number)
ActiveMerchant.deprecated 'CreditCard#type? is deprecated and will be removed from a future release of ActiveMerchant. Please use CreditCard#brand? instead.'
brand?(number)
end
def first_digits(number)
number&.slice(0, 6) || ''
end
def last_digits(number)
return '' if number.nil?
number.length <= 4 ? number : number.slice(-4..-1)
end
def mask(number)
"XXXX-XXXX-XXXX-#{last_digits(number)}"
end
# Checks to see if the calculated brand matches the specified brand
def matching_brand?(number, brand)
brand?(number) == brand
end
def matching_type?(number, brand)
ActiveMerchant.deprecated 'CreditCard#matching_type? is deprecated and will be removed from a future release of ActiveMerchant. Please use CreditCard#matching_brand? instead.'
matching_brand?(number, brand)
end
private
def valid_card_number_length?(number) #:nodoc:
return false if number.nil?
number.length >= 12
end
def valid_card_number_characters?(number) #:nodoc:
return false if number.nil?
!number.match(/\D/)
end
def valid_test_mode_card_number?(number) #:nodoc:
ActiveMerchant::Billing::Base.test? &&
%w[1 2 3 success failure error].include?(number)
end
def valid_by_algorithm?(brand, numbers) #:nodoc:
case brand
when 'naranja'
valid_naranja_algo?(numbers)
else
if VALIDATES_LUHN.include?(brand)
valid_luhn?(numbers)
else
true
end
end
end
ODD_LUHN_VALUE = {
48 => 0,
49 => 1,
50 => 2,
51 => 3,
52 => 4,
53 => 5,
54 => 6,
55 => 7,
56 => 8,
57 => 9,
nil => 0
}.freeze
EVEN_LUHN_VALUE = {
48 => 0, # 0 * 2
49 => 2, # 1 * 2
50 => 4, # 2 * 2
51 => 6, # 3 * 2
52 => 8, # 4 * 2
53 => 1, # 5 * 2 - 9
54 => 3, # 6 * 2 - 9
55 => 5, # etc ...
56 => 7,
57 => 9,
}.freeze
# Checks the validity of a card number by use of the Luhn Algorithm.
# Please see http://en.wikipedia.org/wiki/Luhn_algorithm for details.
# This implementation is from the luhn_checksum gem, https://github.com/zendesk/luhn_checksum.
def valid_luhn?(numbers) #:nodoc:
sum = 0
odd = true
numbers.reverse.bytes.each do |number|
if odd
odd = false
sum += ODD_LUHN_VALUE[number]
else
odd = true
sum += EVEN_LUHN_VALUE[number]
end
end
sum % 10 == 0
end
# Checks the validity of a card number by use of Naranja's specific algorithm.
def valid_naranja_algo?(numbers) #:nodoc:
num_array = numbers.to_s.chars.map(&:to_i)
multipliers = [4, 3, 2, 7, 6, 5, 4, 3, 2, 7, 6, 5, 4, 3, 2]
num_sum = num_array[0..14].zip(multipliers).map { |a, b| a*b }.reduce(:+)
intermediate = 11 - (num_sum % 11)
final_num = intermediate > 9 ? 0 : intermediate
final_num == num_array[15]
end
end
end
end
end
|
module ActiveMerchant #:nodoc:
module Billing #:nodoc:
module Integrations #:nodoc:
module Ipay88
autoload :Return, "active_merchant/billing/integrations/ipay88/return.rb"
autoload :Helper, "active_merchant/billing/integrations/ipay88/helper.rb"
autoload :Notification, "active_merchant/billing/integrations/ipay88/notification.rb"
mattr_accessor :merch_key
# The merchant key provided to you by iPay88.
def self.merchant_key
self.merch_key
end
# Set the merchant key (in a Rails initializer, for example)
#
# ActiveMerchant::Billing::Integrations::Ipay88.merchant_key = "foo"
#
def self.merchant_key=(key)
self.merch_key = key
end
# The requery URL upon returning from iPay88
def self.service_url
"https://www.mobile88.com/epayment/enquiry.asp"
end
# The URL to POST your payment form to
def self.entry_url
"https://www.mobile88.com/epayment/entry.asp"
end
def self.return(query_string)
Return.new(query_string)
end
end
end
end
end
flips entry and service url for ipay88
module ActiveMerchant #:nodoc:
module Billing #:nodoc:
module Integrations #:nodoc:
module Ipay88
autoload :Return, "active_merchant/billing/integrations/ipay88/return.rb"
autoload :Helper, "active_merchant/billing/integrations/ipay88/helper.rb"
autoload :Notification, "active_merchant/billing/integrations/ipay88/notification.rb"
mattr_accessor :merch_key
# The merchant key provided to you by iPay88.
def self.merchant_key
self.merch_key
end
# Set the merchant key (in a Rails initializer, for example)
#
# ActiveMerchant::Billing::Integrations::Ipay88.merchant_key = "foo"
#
def self.merchant_key=(key)
self.merch_key = key
end
# The requery URL upon returning from iPay88
def self.service_url
"https://www.mobile88.com/epayment/entry.asp"
end
# The URL to POST your payment form to
def self.entry_url
"https://www.mobile88.com/epayment/enquiry.asp"
end
def self.return(query_string)
Return.new(query_string)
end
end
end
end
end
|
module ActiveScaffold
module Helpers
# Helpers that assist with the rendering of a Form Column
module FormColumnHelpers
# This method decides which input to use for the given column.
# It does not do any rendering. It only decides which method is responsible for rendering.
def active_scaffold_input_for(column, scope = nil, options = {})
begin
options = active_scaffold_input_options(column, scope, options)
options = javascript_for_update_column(column, scope, options)
# first, check if the dev has created an override for this specific field
if override_form_field?(column)
send(override_form_field(column), @record, options)
# second, check if the dev has specified a valid form_ui for this column
elsif column.form_ui and override_input?(column.form_ui)
send(override_input(column.form_ui), column, options)
# fallback: we get to make the decision
else
if column.association
# if we get here, it's because the column has a form_ui but not one ActiveScaffold knows about.
raise "Unknown form_ui `#{column.form_ui}' for column `#{column.name}'"
elsif column.virtual?
active_scaffold_input_virtual(column, options)
else # regular model attribute column
# if we (or someone else) have created a custom render option for the column type, use that
if override_input?(column.column.type)
send(override_input(column.column.type), column, options)
# final ultimate fallback: use rails' generic input method
else
# for textual fields we pass different options
text_types = [:text, :string, :integer, :float, :decimal]
date_types = [:date, :datetime, :time]
options = active_scaffold_input_text_options(options) if text_types.include?(column.column.type)
options = active_scaffold_input_date_options(column, options) if date_types.include?(column.column.type)
if column.column.type == :string && options[:maxlength].blank?
options[:maxlength] = column.column.limit
options[:size] ||= ActionView::Helpers::InstanceTag::DEFAULT_FIELD_OPTIONS["size"]
end
options[:value] = format_number_value(@record.send(column.name), column.options) if column.column.number?
input(:record, column.name, options.merge(column.options))
end
end
end
rescue Exception => e
logger.error Time.now.to_s + "#{e.inspect} -- on the ActiveScaffold column = :#{column.name} in #{@controller.class}"
raise e
end
end
alias form_column active_scaffold_input_for
# the standard active scaffold options used for textual inputs
def active_scaffold_input_text_options(options = {})
options[:autocomplete] = 'off'
options[:class] = "#{options[:class]} text-input".strip
options
end
# the standard active scaffold options used for date, datetime and time inputs
def active_scaffold_input_date_options(column, options = {})
options[:include_blank] = true if column.column.null
options[:prefix] = options[:name].gsub("[#{column.name}]", '')
options
end
# the standard active scaffold options used for class, name and scope
def active_scaffold_input_options(column, scope = nil, options = {})
name = scope ? "record#{scope}[#{column.name}]" : "record[#{column.name}]"
# Fix for keeping unique IDs in subform
id_control = "record_#{column.name}_#{[params[:eid], params[:id]].compact.join '_'}"
id_control += scope.gsub(/(\[|\])/, '_').gsub('__', '_').gsub(/_$/, '') if scope
{ :name => name, :class => "#{column.name}-input", :id => id_control}.merge(options)
end
def javascript_for_update_column(column, scope, options)
if column.options[:update_column]
form_action = :create
form_action = :update if params[:action] == 'edit'
url_params = {:action => 'render_field', :id => params[:id], :column => column.name, :update_column => column.options[:update_column]}
url_params[:eid] = params[:eid] if params[:eid]
url_params[:controller] = controller.class.active_scaffold_controller_for(@record.class).controller_path if scope
url_params[:scope] = params[:scope] if scope
ajax_options = {:method => :get,
:url => url_for(url_params), :with => "'value=' + this.value",
:after => "$('#{loading_indicator_id(:action => :render_field, :id => params[:id])}').style.visibility = 'visible'; Form.disable('#{element_form_id(:action => form_action)}');",
:complete => "$('#{loading_indicator_id(:action => :render_field, :id => params[:id])}').style.visibility = 'hidden'; Form.enable('#{element_form_id(:action => form_action)}');"}
options[:onchange] = "#{remote_function(ajax_options)};#{options[:onchange]}"
end
options
end
##
## Form input methods
##
def active_scaffold_input_singular_association(column, html_options)
associated = @record.send(column.association.name)
select_options = options_for_association(column.association)
select_options.unshift([ associated.to_label, associated.id ]) unless associated.nil? or select_options.find {|label, id| id == associated.id}
selected = associated.nil? ? nil : associated.id
method = column.name
#html_options[:name] += '[id]'
options = {:selected => selected, :include_blank => as_(:_select_)}
html_options.update(column.options[:html_options] || {})
options.update(column.options)
select(:record, method, select_options.uniq, options, html_options)
end
def active_scaffold_input_plural_association(column, options)
associated_options = @record.send(column.association.name).collect {|r| [r.to_label, r.id]}
select_options = associated_options | options_for_association(column.association)
return content_tag(:span, as_(:no_options), :id => options[:id]) if select_options.empty?
html = "<ul class=\"checkbox-list\" id=\"#{options[:id]}\">"
associated_ids = associated_options.collect {|a| a[1]}
select_options.each_with_index do |option, i|
label, id = option
this_name = "#{options[:name]}[]"
this_id = "#{options[:id]}_#{i}_id"
html << "<li>"
html << check_box_tag(this_name, id, associated_ids.include?(id), :id => this_id)
html << "<label for='#{this_id}'>"
html << label
html << "</label>"
html << "</li>"
end
html << '</ul>'
html << javascript_tag("new DraggableLists('#{options[:id]}')") if column.options[:draggable_lists]
html
end
def active_scaffold_translated_option(column, text, value = nil)
value = text.to_s if value.nil?
[(text.is_a?(Symbol) ? column.active_record_class.human_attribute_name(text) : text), value]
end
def active_scaffold_translated_options(column)
column.options[:options].collect do |text, value|
active_scaffold_translated_option(column, text, value)
end
end
def active_scaffold_input_select(column, html_options)
if column.singular_association?
active_scaffold_input_singular_association(column, html_options)
elsif column.plural_association?
active_scaffold_input_plural_association(column, html_options)
else
options = { :selected => @record.send(column.name) }
options_for_select = active_scaffold_translated_options(column)
html_options.update(column.options[:html_options] || {})
options.update(column.options)
select(:record, column.name, options_for_select, options, html_options)
end
end
def active_scaffold_input_radio(column, html_options)
html_options.update(column.options[:html_options] || {})
column.options[:options].inject('') do |html, (text, value)|
text, value = active_scaffold_translated_option(column, text, value)
html << content_tag(:label, radio_button(:record, column.name, value, html_options.merge(:id => html_options[:id] + '-' + value.to_s)) + text)
end
end
# requires RecordSelect plugin to be installed and configured.
# ... maybe this should be provided in a bridge?
def active_scaffold_input_record_select(column, options)
if column.singular_association?
active_scaffold_record_select(column, options, @record.send(column.name), false)
elsif column.plural_association?
active_scaffold_record_select(column, options, @record.send(column.name), true)
end
end
def active_scaffold_record_select(column, options, value, multiple)
unless column.association
raise ArgumentError, "record_select can only work against associations (and #{column.name} is not). A common mistake is to specify the foreign key field (like :user_id), instead of the association (:user)."
end
remote_controller = active_scaffold_controller_for(column.association.klass).controller_path
# if the opposite association is a :belongs_to (in that case association in this class must be has_one or has_many)
# then only show records that have not been associated yet
if [:has_one, :has_many].include?(column.association.macro)
params.merge!({column.association.primary_key_name => ''})
end
record_select_options = {:controller => remote_controller, :id => options[:id]}
record_select_options.merge!(active_scaffold_input_text_options)
record_select_options.merge!(column.options)
if multiple
record_multi_select_field(options[:name], value || [], record_select_options)
else
record_select_field(options[:name], value || column.association.klass.new, record_select_options)
end
end
def active_scaffold_input_checkbox(column, options)
check_box(:record, column.name, options)
end
def active_scaffold_input_password(column, options)
options = active_scaffold_input_text_options(options)
password_field :record, column.name, options.merge(column.options)
end
def active_scaffold_input_textarea(column, options)
text_area(:record, column.name, options.merge(:cols => column.options[:cols], :rows => column.options[:rows], :size => column.options[:size]))
end
def active_scaffold_input_virtual(column, options)
options = active_scaffold_input_text_options(options)
text_field :record, column.name, options.merge(column.options)
end
#
# Column.type-based inputs
#
def active_scaffold_input_boolean(column, options)
select_options = []
select_options << [as_(:_select_), nil] if column.column.null
select_options << [as_(:true), true]
select_options << [as_(:false), false]
select_tag(options[:name], options_for_select(select_options, @record.send(column.name)), options)
end
def onsubmit
end
##
## Form column override signatures
##
# add functionality for overriding subform partials from association class path
def override_subform_partial?(column, subform_partial)
path, partial_name = partial_pieces(override_subform_partial(column, subform_partial))
template_exists?(File.join(path, "_#{partial_name}"))
end
def override_subform_partial(column, subform_partial)
File.join(active_scaffold_controller_for(column.association.klass).controller_path, subform_partial) if column_renders_as(column) == :subform
end
def override_form_field_partial?(column)
path, partial_name = partial_pieces(override_form_field_partial(column))
template_exists?(File.join(path, "_#{partial_name}"), true)
end
# the naming convention for overriding form fields with partials
def override_form_field_partial(column)
"#{column.name}_form_column"
end
def override_form_field?(column)
respond_to?(override_form_field(column))
end
# the naming convention for overriding form fields with helpers
def override_form_field(column)
"#{column.name}_form_column"
end
def override_input?(form_ui)
respond_to?(override_input(form_ui))
end
# the naming convention for overriding form input types with helpers
def override_input(form_ui)
"active_scaffold_input_#{form_ui}"
end
def form_partial_for_column(column)
if override_form_field_partial?(column)
override_form_field_partial(column)
elsif column_renders_as(column) == :field or override_form_field?(column)
"form_attribute"
elsif column_renders_as(column) == :subform
"form_association"
elsif column_renders_as(column) == :hidden
"form_hidden_attribute"
end
end
def subform_partial_for_column(column)
subform_partial = "#{active_scaffold_config_for(column.association.klass).subform.layout}_subform"
if override_subform_partial?(column, subform_partial)
override_subform_partial(column, subform_partial)
else
subform_partial
end
end
##
## Macro-level rendering decisions for columns
##
def column_renders_as(column)
if column.is_a? ActiveScaffold::DataStructures::ActionColumns
return :subsection
elsif column.active_record_class.locking_column.to_s == column.name.to_s or column.form_ui == :hidden
return :hidden
elsif column.association.nil? or column.form_ui or !active_scaffold_config_for(column.association.klass).actions.include?(:subform)
return :field
else
return :subform
end
end
def is_subsection?(column)
column_renders_as(column) == :subsection
end
def is_subform?(column)
column_renders_as(column) == :subform
end
def column_scope(column)
if column.plural_association?
"[#{column.name}][#{@record.id || generate_temporary_id}]"
else
"[#{column.name}]"
end
end
def active_scaffold_add_existing_input(options)
if controller.respond_to?(:record_select_config)
remote_controller = active_scaffold_controller_for(record_select_config.model).controller_path
options.merge!(:controller => remote_controller)
options.merge!(active_scaffold_input_text_options)
record_select_field(options[:name], @record, options)
else
column = active_scaffold_config_for(params[:parent_model]).columns[params[:parent_column]]
select_options = options_for_select(options_for_association(column.association)) unless column.through_association?
select_options ||= options_for_select(active_scaffold_config.model.find(:all).collect {|c| [h(c.to_label), c.id]})
select_tag 'associated_id', '<option value="">' + as_(:_select_) + '</option>' + select_options unless select_options.empty?
end
end
def active_scaffold_add_existing_label
if controller.respond_to?(:record_select_config)
record_select_config.model.human_name
else
active_scaffold_config.model.human_name
end
end
end
end
end
Fix for setting non-string options in :select form_ui
module ActiveScaffold
module Helpers
# Helpers that assist with the rendering of a Form Column
module FormColumnHelpers
# This method decides which input to use for the given column.
# It does not do any rendering. It only decides which method is responsible for rendering.
def active_scaffold_input_for(column, scope = nil, options = {})
begin
options = active_scaffold_input_options(column, scope, options)
options = javascript_for_update_column(column, scope, options)
# first, check if the dev has created an override for this specific field
if override_form_field?(column)
send(override_form_field(column), @record, options)
# second, check if the dev has specified a valid form_ui for this column
elsif column.form_ui and override_input?(column.form_ui)
send(override_input(column.form_ui), column, options)
# fallback: we get to make the decision
else
if column.association
# if we get here, it's because the column has a form_ui but not one ActiveScaffold knows about.
raise "Unknown form_ui `#{column.form_ui}' for column `#{column.name}'"
elsif column.virtual?
active_scaffold_input_virtual(column, options)
else # regular model attribute column
# if we (or someone else) have created a custom render option for the column type, use that
if override_input?(column.column.type)
send(override_input(column.column.type), column, options)
# final ultimate fallback: use rails' generic input method
else
# for textual fields we pass different options
text_types = [:text, :string, :integer, :float, :decimal]
date_types = [:date, :datetime, :time]
options = active_scaffold_input_text_options(options) if text_types.include?(column.column.type)
options = active_scaffold_input_date_options(column, options) if date_types.include?(column.column.type)
if column.column.type == :string && options[:maxlength].blank?
options[:maxlength] = column.column.limit
options[:size] ||= ActionView::Helpers::InstanceTag::DEFAULT_FIELD_OPTIONS["size"]
end
options[:value] = format_number_value(@record.send(column.name), column.options) if column.column.number?
input(:record, column.name, options.merge(column.options))
end
end
end
rescue Exception => e
logger.error Time.now.to_s + "#{e.inspect} -- on the ActiveScaffold column = :#{column.name} in #{@controller.class}"
raise e
end
end
alias form_column active_scaffold_input_for
# the standard active scaffold options used for textual inputs
def active_scaffold_input_text_options(options = {})
options[:autocomplete] = 'off'
options[:class] = "#{options[:class]} text-input".strip
options
end
# the standard active scaffold options used for date, datetime and time inputs
def active_scaffold_input_date_options(column, options = {})
options[:include_blank] = true if column.column.null
options[:prefix] = options[:name].gsub("[#{column.name}]", '')
options
end
# the standard active scaffold options used for class, name and scope
def active_scaffold_input_options(column, scope = nil, options = {})
name = scope ? "record#{scope}[#{column.name}]" : "record[#{column.name}]"
# Fix for keeping unique IDs in subform
id_control = "record_#{column.name}_#{[params[:eid], params[:id]].compact.join '_'}"
id_control += scope.gsub(/(\[|\])/, '_').gsub('__', '_').gsub(/_$/, '') if scope
{ :name => name, :class => "#{column.name}-input", :id => id_control}.merge(options)
end
def javascript_for_update_column(column, scope, options)
if column.options[:update_column]
form_action = :create
form_action = :update if params[:action] == 'edit'
url_params = {:action => 'render_field', :id => params[:id], :column => column.name, :update_column => column.options[:update_column]}
url_params[:eid] = params[:eid] if params[:eid]
url_params[:controller] = controller.class.active_scaffold_controller_for(@record.class).controller_path if scope
url_params[:scope] = params[:scope] if scope
ajax_options = {:method => :get,
:url => url_for(url_params), :with => "'value=' + this.value",
:after => "$('#{loading_indicator_id(:action => :render_field, :id => params[:id])}').style.visibility = 'visible'; Form.disable('#{element_form_id(:action => form_action)}');",
:complete => "$('#{loading_indicator_id(:action => :render_field, :id => params[:id])}').style.visibility = 'hidden'; Form.enable('#{element_form_id(:action => form_action)}');"}
options[:onchange] = "#{remote_function(ajax_options)};#{options[:onchange]}"
end
options
end
##
## Form input methods
##
def active_scaffold_input_singular_association(column, html_options)
associated = @record.send(column.association.name)
select_options = options_for_association(column.association)
select_options.unshift([ associated.to_label, associated.id ]) unless associated.nil? or select_options.find {|label, id| id == associated.id}
selected = associated.nil? ? nil : associated.id
method = column.name
#html_options[:name] += '[id]'
options = {:selected => selected, :include_blank => as_(:_select_)}
html_options.update(column.options[:html_options] || {})
options.update(column.options)
select(:record, method, select_options.uniq, options, html_options)
end
def active_scaffold_input_plural_association(column, options)
associated_options = @record.send(column.association.name).collect {|r| [r.to_label, r.id]}
select_options = associated_options | options_for_association(column.association)
return content_tag(:span, as_(:no_options), :id => options[:id]) if select_options.empty?
html = "<ul class=\"checkbox-list\" id=\"#{options[:id]}\">"
associated_ids = associated_options.collect {|a| a[1]}
select_options.each_with_index do |option, i|
label, id = option
this_name = "#{options[:name]}[]"
this_id = "#{options[:id]}_#{i}_id"
html << "<li>"
html << check_box_tag(this_name, id, associated_ids.include?(id), :id => this_id)
html << "<label for='#{this_id}'>"
html << label
html << "</label>"
html << "</li>"
end
html << '</ul>'
html << javascript_tag("new DraggableLists('#{options[:id]}')") if column.options[:draggable_lists]
html
end
def active_scaffold_translated_option(column, text, value = nil)
value = text if value.nil?
[(text.is_a?(Symbol) ? column.active_record_class.human_attribute_name(text) : text), value.to_s]
end
def active_scaffold_translated_options(column)
column.options[:options].collect do |text, value|
active_scaffold_translated_option(column, text, value)
end
end
def active_scaffold_input_select(column, html_options)
if column.singular_association?
active_scaffold_input_singular_association(column, html_options)
elsif column.plural_association?
active_scaffold_input_plural_association(column, html_options)
else
options = { :selected => @record.send(column.name).to_s }
options_for_select = active_scaffold_translated_options(column)
html_options.update(column.options[:html_options] || {})
options.update(column.options)
select(:record, column.name, options_for_select, options, html_options)
end
end
def active_scaffold_input_radio(column, html_options)
html_options.update(column.options[:html_options] || {})
column.options[:options].inject('') do |html, (text, value)|
text, value = active_scaffold_translated_option(column, text, value)
html << content_tag(:label, radio_button(:record, column.name, value, html_options.merge(:id => html_options[:id] + '-' + value.to_s)) + text)
end
end
# requires RecordSelect plugin to be installed and configured.
# ... maybe this should be provided in a bridge?
def active_scaffold_input_record_select(column, options)
if column.singular_association?
active_scaffold_record_select(column, options, @record.send(column.name), false)
elsif column.plural_association?
active_scaffold_record_select(column, options, @record.send(column.name), true)
end
end
def active_scaffold_record_select(column, options, value, multiple)
unless column.association
raise ArgumentError, "record_select can only work against associations (and #{column.name} is not). A common mistake is to specify the foreign key field (like :user_id), instead of the association (:user)."
end
remote_controller = active_scaffold_controller_for(column.association.klass).controller_path
# if the opposite association is a :belongs_to (in that case association in this class must be has_one or has_many)
# then only show records that have not been associated yet
if [:has_one, :has_many].include?(column.association.macro)
params.merge!({column.association.primary_key_name => ''})
end
record_select_options = {:controller => remote_controller, :id => options[:id]}
record_select_options.merge!(active_scaffold_input_text_options)
record_select_options.merge!(column.options)
if multiple
record_multi_select_field(options[:name], value || [], record_select_options)
else
record_select_field(options[:name], value || column.association.klass.new, record_select_options)
end
end
def active_scaffold_input_checkbox(column, options)
check_box(:record, column.name, options)
end
def active_scaffold_input_password(column, options)
options = active_scaffold_input_text_options(options)
password_field :record, column.name, options.merge(column.options)
end
def active_scaffold_input_textarea(column, options)
text_area(:record, column.name, options.merge(:cols => column.options[:cols], :rows => column.options[:rows], :size => column.options[:size]))
end
def active_scaffold_input_virtual(column, options)
options = active_scaffold_input_text_options(options)
text_field :record, column.name, options.merge(column.options)
end
#
# Column.type-based inputs
#
def active_scaffold_input_boolean(column, options)
select_options = []
select_options << [as_(:_select_), nil] if column.column.null
select_options << [as_(:true), true]
select_options << [as_(:false), false]
select_tag(options[:name], options_for_select(select_options, @record.send(column.name)), options)
end
def onsubmit
end
##
## Form column override signatures
##
# add functionality for overriding subform partials from association class path
def override_subform_partial?(column, subform_partial)
path, partial_name = partial_pieces(override_subform_partial(column, subform_partial))
template_exists?(File.join(path, "_#{partial_name}"))
end
def override_subform_partial(column, subform_partial)
File.join(active_scaffold_controller_for(column.association.klass).controller_path, subform_partial) if column_renders_as(column) == :subform
end
def override_form_field_partial?(column)
path, partial_name = partial_pieces(override_form_field_partial(column))
template_exists?(File.join(path, "_#{partial_name}"), true)
end
# the naming convention for overriding form fields with partials
def override_form_field_partial(column)
"#{column.name}_form_column"
end
def override_form_field?(column)
respond_to?(override_form_field(column))
end
# the naming convention for overriding form fields with helpers
def override_form_field(column)
"#{column.name}_form_column"
end
def override_input?(form_ui)
respond_to?(override_input(form_ui))
end
# the naming convention for overriding form input types with helpers
def override_input(form_ui)
"active_scaffold_input_#{form_ui}"
end
def form_partial_for_column(column)
if override_form_field_partial?(column)
override_form_field_partial(column)
elsif column_renders_as(column) == :field or override_form_field?(column)
"form_attribute"
elsif column_renders_as(column) == :subform
"form_association"
elsif column_renders_as(column) == :hidden
"form_hidden_attribute"
end
end
def subform_partial_for_column(column)
subform_partial = "#{active_scaffold_config_for(column.association.klass).subform.layout}_subform"
if override_subform_partial?(column, subform_partial)
override_subform_partial(column, subform_partial)
else
subform_partial
end
end
##
## Macro-level rendering decisions for columns
##
def column_renders_as(column)
if column.is_a? ActiveScaffold::DataStructures::ActionColumns
return :subsection
elsif column.active_record_class.locking_column.to_s == column.name.to_s or column.form_ui == :hidden
return :hidden
elsif column.association.nil? or column.form_ui or !active_scaffold_config_for(column.association.klass).actions.include?(:subform)
return :field
else
return :subform
end
end
def is_subsection?(column)
column_renders_as(column) == :subsection
end
def is_subform?(column)
column_renders_as(column) == :subform
end
def column_scope(column)
if column.plural_association?
"[#{column.name}][#{@record.id || generate_temporary_id}]"
else
"[#{column.name}]"
end
end
def active_scaffold_add_existing_input(options)
if controller.respond_to?(:record_select_config)
remote_controller = active_scaffold_controller_for(record_select_config.model).controller_path
options.merge!(:controller => remote_controller)
options.merge!(active_scaffold_input_text_options)
record_select_field(options[:name], @record, options)
else
column = active_scaffold_config_for(params[:parent_model]).columns[params[:parent_column]]
select_options = options_for_select(options_for_association(column.association)) unless column.through_association?
select_options ||= options_for_select(active_scaffold_config.model.find(:all).collect {|c| [h(c.to_label), c.id]})
select_tag 'associated_id', '<option value="">' + as_(:_select_) + '</option>' + select_options unless select_options.empty?
end
end
def active_scaffold_add_existing_label
if controller.respond_to?(:record_select_config)
record_select_config.model.human_name
else
active_scaffold_config.model.human_name
end
end
end
end
end
|
# coding: utf-8
module ActiveScaffold
module Helpers
# Helpers that assist with the rendering of a List Column
module ListColumnHelpers
def get_column_value(record, column)
method = get_column_method(record, column)
value = send(method, record, column)
value = ' '.html_safe if value.nil? || value.blank? # fix for IE 6
return value
rescue StandardError => e
logger.error "#{e.class.name}: #{e.message} -- on the ActiveScaffold column = :#{column.name} in #{controller.class}, record: #{record.inspect}"
raise e
end
def get_column_method(record, column)
# check for an override helper
column.list_method ||= begin
if (method = column_override(column))
# we only pass the record as the argument. we previously also passed the formatted_value,
# but mike perham pointed out that prohibited the usage of overrides to improve on the
# performance of our default formatting. see issue #138.
method
# second, check if the dev has specified a valid list_ui for this column
elsif column.list_ui && (method = override_column_ui(column.list_ui))
method
elsif column.column && (method = override_column_ui(column.column.type))
method
else
:format_column_value
end
end
end
# TODO: move empty_field_text and logic in here?
# TODO: we need to distinguish between the automatic links *we* create and the ones that the dev specified. some logic may not apply if the dev specified the link.
def render_list_column(text, column, record)
if column.link && !skip_action_link?(column.link, record)
link = column.link
associated = record.send(column.association.name) if column.association
render_action_link(link, record, :link => text, :authorized => link.action.nil? || column_link_authorized?(link, column, record, associated))
elsif inplace_edit?(record, column)
active_scaffold_inplace_edit(record, column, :formatted_column => text)
elsif active_scaffold_config.actions.include?(:list) && active_scaffold_config.list.wrap_tag
content_tag active_scaffold_config.list.wrap_tag, text
else
text
end
rescue StandardError => e
logger.error "#{e.class.name}: #{e.message} -- on the ActiveScaffold column = :#{column.name} in #{controller.class}"
raise e
end
# There are two basic ways to clean a column's value: h() and sanitize(). The latter is useful
# when the column contains *valid* html data, and you want to just disable any scripting. People
# can always use field overrides to clean data one way or the other, but having this override
# lets people decide which way it should happen by default.
#
# Why is it not a configuration option? Because it seems like a somewhat rare request. But it
# could eventually be an option in config.list (and config.show, I guess).
def clean_column_value(v)
h(v)
end
##
## Overrides
##
def active_scaffold_column_text(record, column)
# `to_s` is necessary to convert objects in serialized columns to string before truncation.
clean_column_value(truncate(record.send(column.name).to_s, :length => column.options[:truncate] || 50))
end
def active_scaffold_column_fulltext(record, column)
clean_column_value(record.send(column.name))
end
def active_scaffold_column_marked(record, column)
options = {:id => nil, :object => record}
content_tag(:span, check_box(:record, column.name, options), :class => 'in_place_editor_field', :data => {:ie_id => record.to_param})
end
def active_scaffold_column_checkbox(record, column)
options = {:disabled => true, :id => nil, :object => record}
options.delete(:disabled) if inplace_edit?(record, column)
check_box(:record, column.name, options)
end
def active_scaffold_column_slider(record, column)
options = column.options[:slider] || {}
options = options.merge(min: record.send(options[:min_method])) if options[:min_method]
options = options.merge(max: record.send(options[:max_method])) if options[:max_method]
value = record.send(options[:value_method]) if options[:value_method]
as_slider options.merge(value: value || record.send(column.name))
end
def column_override(column)
override_helper column, 'column'
end
alias_method :column_override?, :column_override
# the naming convention for overriding column types with helpers
def override_column_ui(list_ui)
@_column_ui_overrides ||= {}
return @_column_ui_overrides[list_ui] if @_column_ui_overrides.include? list_ui
method = "active_scaffold_column_#{list_ui}"
@_column_ui_overrides[list_ui] = (method if respond_to? method)
end
alias_method :override_column_ui?, :override_column_ui
##
## Formatting
##
def format_column_value(record, column, value = nil)
value ||= record.send(column.name) unless record.nil?
if column.association.nil?
if [:select, :radio].include?(column.form_ui) && column.options[:options]
text, val = column.options[:options].find { |text, val| (val.nil? ? text : val).to_s == value.to_s }
value = active_scaffold_translated_option(column, text, val).first if text
end
if value.is_a? Numeric
format_number_value(value, column.options)
else
format_value(value, column.options)
end
else
if column.plural_association?
associated_size = value.size if column.associated_number? # get count before cache association
cache_association(record.association(column.name), column, associated_size) unless value.loaded?
end
format_association_value(value, column, associated_size)
end
end
def format_number_value(value, options = {})
value = case options[:format]
when :size
number_to_human_size(value, options[:i18n_options] || {})
when :percentage
number_to_percentage(value, options[:i18n_options] || {})
when :currency
number_to_currency(value, options[:i18n_options] || {})
when :i18n_number
send("number_with_#{value.is_a?(Integer) ? 'delimiter' : 'precision'}", value, options[:i18n_options] || {})
else
value
end
clean_column_value(value)
end
def format_collection_association_value(value, column, label_method, size)
if column.associated_limit.nil?
firsts = value.collect(&label_method)
elsif column.associated_limit == 0
size if column.associated_number?
else
firsts = value.first(column.associated_limit)
firsts.collect!(&label_method)
firsts << '…' if value.size > column.associated_limit
text = firsts.join(h(active_scaffold_config.list.association_join_text)).html_safe
text << " (#{size})" if column.associated_number? && column.associated_limit && value.size > column.associated_limit
text
end
end
def format_singular_association_value(value, column, label_method)
if column.polymorphic_association?
"#{value.class.model_name.human}: #{value.send(label_method)}"
else
value.send(label_method)
end
end
def format_association_value(value, column, size)
method = column.options[:label_method] || :to_label
value =
if column.association.collection?
format_collection_association_value(value, column, method, size)
elsif value
format_singular_association_value(value, column, method)
end
format_value value
end
def format_value(column_value, options = {})
value =
if column_empty?(column_value)
empty_field_text
elsif column_value.is_a?(Time) || column_value.is_a?(Date)
l(column_value, :format => options[:format] || :default)
elsif [FalseClass, TrueClass].include?(column_value.class)
as_(column_value.to_s.to_sym)
else
column_value.to_s
end
clean_column_value(value)
end
def cache_association(association, column, size)
# we are not using eager loading, cache firsts records in order not to query the database for whole association in a future
if column.associated_limit.nil?
logger.warn "ActiveScaffold: Enable eager loading for #{column.name} association to reduce SQL queries"
elsif column.associated_limit > 0
# load at least one record more, is needed to display '...'
association.target = association.reader.limit(column.associated_limit + 1).select(column.select_associated_columns || "#{association.klass.quoted_table_name}.*").to_a
elsif @cache_associations
association.target = []
end
end
# ==========
# = Inline Edit =
# ==========
def inplace_edit?(record, column)
return unless column.inplace_edit
editable = controller.send(:update_authorized?, record) if controller.respond_to?(:update_authorized?, true)
editable || record.authorized_for?(:crud_type => :update, :column => column.name)
end
def inplace_edit_cloning?(column)
column.inplace_edit != :ajax && (override_form_field?(column) || column.form_ui || (column.column && override_input?(column.column.type)))
end
def active_scaffold_inplace_edit_tag_options(record, column)
id_options = {:id => record.id.to_s, :action => 'update_column', :name => column.name.to_s}
tag_options = {:id => element_cell_id(id_options), :class => 'in_place_editor_field',
:title => as_(:click_to_edit), :data => {:ie_id => record.to_param}}
tag_options[:data][:ie_update] = column.inplace_edit if column.inplace_edit != true
tag_options
end
def active_scaffold_inplace_edit(record, column, options = {})
formatted_column = options[:formatted_column] || format_column_value(record, column)
content_tag(:span, as_(:inplace_edit_handle), :class => 'handle') <<
content_tag(:span, formatted_column, active_scaffold_inplace_edit_tag_options(record, column))
end
def inplace_edit_control(column)
return unless inplace_edit?(active_scaffold_config.model, column) && inplace_edit_cloning?(column)
old_record, @record = @record, active_scaffold_config.model.new # TODO: remove when relying on @record is removed
column = column.clone
column.options = column.options.clone
column.form_ui = :select if column.association && column.form_ui.nil?
options = active_scaffold_input_options(column).merge(:object => active_scaffold_config.model.new)
options[:class] = "#{options[:class]} inplace_field"
options[:"data-id"] = options[:id]
options[:id] = nil
content_tag(:div, active_scaffold_input_for(column, nil, options), :style => 'display:none;', :class => inplace_edit_control_css_class).tap do
@record = old_record # TODO: remove when relying on @record is removed
end
end
def inplace_edit_control_css_class
'as_inplace_pattern'
end
def inplace_edit_data(column)
data = {}
data[:ie_url] = url_for(params_for(:action => 'update_column', :column => column.name, :id => '__id__'))
data[:ie_cancel_text] = column.options[:cancel_text] || as_(:cancel)
data[:ie_loading_text] = column.options[:loading_text] || as_(:loading)
data[:ie_save_text] = column.options[:save_text] || as_(:update)
data[:ie_saving_text] = column.options[:saving_text] || as_(:saving)
data[:ie_rows] = column.options[:rows] || 5 if column.column.try(:type) == :text
data[:ie_cols] = column.options[:cols] if column.options[:cols]
data[:ie_size] = column.options[:size] if column.options[:size]
data[:ie_use_html] = column.options[:use_html] if column.options[:use_html]
if column.list_ui == :checkbox
data[:ie_mode] = :inline_checkbox
elsif inplace_edit_cloning?(column)
data[:ie_mode] = :clone
elsif column.inplace_edit == :ajax
url = url_for(:controller => params_for[:controller], :action => 'render_field', :id => '__id__', :update_column => column.name)
plural = column.plural_association? && !override_form_field?(column) && [:select, :record_select].include?(column.form_ui)
data[:ie_render_url] = url
data[:ie_mode] = :ajax
data[:ie_plural] = plural
end
data
end
def all_marked?
if active_scaffold_config.mark.mark_all_mode == :page
@page.items.detect { |record| !marked_records.include?(record.id) }.nil?
else
marked_records.length >= @page.pager.count.to_i
end
end
def mark_column_heading
tag_options = {
:id => "#{controller_id}_mark_heading",
:class => 'mark_heading in_place_editor_field'
}
content_tag(:span, check_box_tag("#{controller_id}_mark_heading_span_input", '1', all_marked?), tag_options)
end
def column_heading_attributes(column, sorting, sort_direction)
{:id => active_scaffold_column_header_id(column), :class => column_heading_class(column, sorting), :title => strip_tags(column.description).presence}
end
def render_column_heading(column, sorting, sort_direction)
tag_options = column_heading_attributes(column, sorting, sort_direction)
if column.name == :as_marked
tag_options[:data] = {
:ie_mode => :inline_checkbox,
:ie_url => url_for(params_for(:action => 'mark', :id => '__id__'))
}
else
tag_options[:data] = inplace_edit_data(column) if column.inplace_edit
end
content_tag(:th, column_heading_value(column, sorting, sort_direction) + inplace_edit_control(column), tag_options)
end
def column_heading_value(column, sorting, sort_direction)
if column.name == :as_marked
mark_column_heading
elsif column.sortable?
options = {:id => nil, :class => 'as_sort',
'data-page-history' => controller_id,
:remote => true, :method => :get}
url_options = params_for(:action => :index, :page => 1,
:sort => column.name, :sort_direction => sort_direction)
unless active_scaffold_config.store_user_settings
url_options.merge!(:search => search_params) if search_params.present?
end
link_to column_heading_label(column), url_options, options
else
content_tag(:p, column_heading_label(column))
end
end
def column_heading_label(column)
column.label
end
end
end
end
fix list_ui name [skip ci]
# coding: utf-8
module ActiveScaffold
module Helpers
# Helpers that assist with the rendering of a List Column
module ListColumnHelpers
def get_column_value(record, column)
method = get_column_method(record, column)
value = send(method, record, column)
value = ' '.html_safe if value.nil? || value.blank? # fix for IE 6
return value
rescue StandardError => e
logger.error "#{e.class.name}: #{e.message} -- on the ActiveScaffold column = :#{column.name} in #{controller.class}, record: #{record.inspect}"
raise e
end
def get_column_method(record, column)
# check for an override helper
column.list_method ||= begin
if (method = column_override(column))
# we only pass the record as the argument. we previously also passed the formatted_value,
# but mike perham pointed out that prohibited the usage of overrides to improve on the
# performance of our default formatting. see issue #138.
method
# second, check if the dev has specified a valid list_ui for this column
elsif column.list_ui && (method = override_column_ui(column.list_ui))
method
elsif column.column && (method = override_column_ui(column.column.type))
method
else
:format_column_value
end
end
end
# TODO: move empty_field_text and logic in here?
# TODO: we need to distinguish between the automatic links *we* create and the ones that the dev specified. some logic may not apply if the dev specified the link.
def render_list_column(text, column, record)
if column.link && !skip_action_link?(column.link, record)
link = column.link
associated = record.send(column.association.name) if column.association
render_action_link(link, record, :link => text, :authorized => link.action.nil? || column_link_authorized?(link, column, record, associated))
elsif inplace_edit?(record, column)
active_scaffold_inplace_edit(record, column, :formatted_column => text)
elsif active_scaffold_config.actions.include?(:list) && active_scaffold_config.list.wrap_tag
content_tag active_scaffold_config.list.wrap_tag, text
else
text
end
rescue StandardError => e
logger.error "#{e.class.name}: #{e.message} -- on the ActiveScaffold column = :#{column.name} in #{controller.class}"
raise e
end
# There are two basic ways to clean a column's value: h() and sanitize(). The latter is useful
# when the column contains *valid* html data, and you want to just disable any scripting. People
# can always use field overrides to clean data one way or the other, but having this override
# lets people decide which way it should happen by default.
#
# Why is it not a configuration option? Because it seems like a somewhat rare request. But it
# could eventually be an option in config.list (and config.show, I guess).
def clean_column_value(v)
h(v)
end
##
## Overrides
##
def active_scaffold_column_text(record, column)
# `to_s` is necessary to convert objects in serialized columns to string before truncation.
clean_column_value(truncate(record.send(column.name).to_s, :length => column.options[:truncate] || 50))
end
def active_scaffold_column_fulltext(record, column)
clean_column_value(record.send(column.name))
end
def active_scaffold_column_marked(record, column)
options = {:id => nil, :object => record}
content_tag(:span, check_box(:record, column.name, options), :class => 'in_place_editor_field', :data => {:ie_id => record.to_param})
end
def active_scaffold_column_checkbox(record, column)
options = {:disabled => true, :id => nil, :object => record}
options.delete(:disabled) if inplace_edit?(record, column)
check_box(:record, column.name, options)
end
def active_scaffold_column_percentage(record, column)
options = column.options[:slider] || {}
options = options.merge(min: record.send(options[:min_method])) if options[:min_method]
options = options.merge(max: record.send(options[:max_method])) if options[:max_method]
value = record.send(options[:value_method]) if options[:value_method]
as_slider options.merge(value: value || record.send(column.name))
end
def column_override(column)
override_helper column, 'column'
end
alias_method :column_override?, :column_override
# the naming convention for overriding column types with helpers
def override_column_ui(list_ui)
@_column_ui_overrides ||= {}
return @_column_ui_overrides[list_ui] if @_column_ui_overrides.include? list_ui
method = "active_scaffold_column_#{list_ui}"
@_column_ui_overrides[list_ui] = (method if respond_to? method)
end
alias_method :override_column_ui?, :override_column_ui
##
## Formatting
##
def format_column_value(record, column, value = nil)
value ||= record.send(column.name) unless record.nil?
if column.association.nil?
if [:select, :radio].include?(column.form_ui) && column.options[:options]
text, val = column.options[:options].find { |text, val| (val.nil? ? text : val).to_s == value.to_s }
value = active_scaffold_translated_option(column, text, val).first if text
end
if value.is_a? Numeric
format_number_value(value, column.options)
else
format_value(value, column.options)
end
else
if column.plural_association?
associated_size = value.size if column.associated_number? # get count before cache association
cache_association(record.association(column.name), column, associated_size) unless value.loaded?
end
format_association_value(value, column, associated_size)
end
end
def format_number_value(value, options = {})
value = case options[:format]
when :size
number_to_human_size(value, options[:i18n_options] || {})
when :percentage
number_to_percentage(value, options[:i18n_options] || {})
when :currency
number_to_currency(value, options[:i18n_options] || {})
when :i18n_number
send("number_with_#{value.is_a?(Integer) ? 'delimiter' : 'precision'}", value, options[:i18n_options] || {})
else
value
end
clean_column_value(value)
end
def format_collection_association_value(value, column, label_method, size)
if column.associated_limit.nil?
firsts = value.collect(&label_method)
elsif column.associated_limit == 0
size if column.associated_number?
else
firsts = value.first(column.associated_limit)
firsts.collect!(&label_method)
firsts << '…' if value.size > column.associated_limit
text = firsts.join(h(active_scaffold_config.list.association_join_text)).html_safe
text << " (#{size})" if column.associated_number? && column.associated_limit && value.size > column.associated_limit
text
end
end
def format_singular_association_value(value, column, label_method)
if column.polymorphic_association?
"#{value.class.model_name.human}: #{value.send(label_method)}"
else
value.send(label_method)
end
end
def format_association_value(value, column, size)
method = column.options[:label_method] || :to_label
value =
if column.association.collection?
format_collection_association_value(value, column, method, size)
elsif value
format_singular_association_value(value, column, method)
end
format_value value
end
def format_value(column_value, options = {})
value =
if column_empty?(column_value)
empty_field_text
elsif column_value.is_a?(Time) || column_value.is_a?(Date)
l(column_value, :format => options[:format] || :default)
elsif [FalseClass, TrueClass].include?(column_value.class)
as_(column_value.to_s.to_sym)
else
column_value.to_s
end
clean_column_value(value)
end
def cache_association(association, column, size)
# we are not using eager loading, cache firsts records in order not to query the database for whole association in a future
if column.associated_limit.nil?
logger.warn "ActiveScaffold: Enable eager loading for #{column.name} association to reduce SQL queries"
elsif column.associated_limit > 0
# load at least one record more, is needed to display '...'
association.target = association.reader.limit(column.associated_limit + 1).select(column.select_associated_columns || "#{association.klass.quoted_table_name}.*").to_a
elsif @cache_associations
association.target = []
end
end
# ==========
# = Inline Edit =
# ==========
def inplace_edit?(record, column)
return unless column.inplace_edit
editable = controller.send(:update_authorized?, record) if controller.respond_to?(:update_authorized?, true)
editable || record.authorized_for?(:crud_type => :update, :column => column.name)
end
def inplace_edit_cloning?(column)
column.inplace_edit != :ajax && (override_form_field?(column) || column.form_ui || (column.column && override_input?(column.column.type)))
end
def active_scaffold_inplace_edit_tag_options(record, column)
id_options = {:id => record.id.to_s, :action => 'update_column', :name => column.name.to_s}
tag_options = {:id => element_cell_id(id_options), :class => 'in_place_editor_field',
:title => as_(:click_to_edit), :data => {:ie_id => record.to_param}}
tag_options[:data][:ie_update] = column.inplace_edit if column.inplace_edit != true
tag_options
end
def active_scaffold_inplace_edit(record, column, options = {})
formatted_column = options[:formatted_column] || format_column_value(record, column)
content_tag(:span, as_(:inplace_edit_handle), :class => 'handle') <<
content_tag(:span, formatted_column, active_scaffold_inplace_edit_tag_options(record, column))
end
def inplace_edit_control(column)
return unless inplace_edit?(active_scaffold_config.model, column) && inplace_edit_cloning?(column)
old_record, @record = @record, active_scaffold_config.model.new # TODO: remove when relying on @record is removed
column = column.clone
column.options = column.options.clone
column.form_ui = :select if column.association && column.form_ui.nil?
options = active_scaffold_input_options(column).merge(:object => active_scaffold_config.model.new)
options[:class] = "#{options[:class]} inplace_field"
options[:"data-id"] = options[:id]
options[:id] = nil
content_tag(:div, active_scaffold_input_for(column, nil, options), :style => 'display:none;', :class => inplace_edit_control_css_class).tap do
@record = old_record # TODO: remove when relying on @record is removed
end
end
def inplace_edit_control_css_class
'as_inplace_pattern'
end
def inplace_edit_data(column)
data = {}
data[:ie_url] = url_for(params_for(:action => 'update_column', :column => column.name, :id => '__id__'))
data[:ie_cancel_text] = column.options[:cancel_text] || as_(:cancel)
data[:ie_loading_text] = column.options[:loading_text] || as_(:loading)
data[:ie_save_text] = column.options[:save_text] || as_(:update)
data[:ie_saving_text] = column.options[:saving_text] || as_(:saving)
data[:ie_rows] = column.options[:rows] || 5 if column.column.try(:type) == :text
data[:ie_cols] = column.options[:cols] if column.options[:cols]
data[:ie_size] = column.options[:size] if column.options[:size]
data[:ie_use_html] = column.options[:use_html] if column.options[:use_html]
if column.list_ui == :checkbox
data[:ie_mode] = :inline_checkbox
elsif inplace_edit_cloning?(column)
data[:ie_mode] = :clone
elsif column.inplace_edit == :ajax
url = url_for(:controller => params_for[:controller], :action => 'render_field', :id => '__id__', :update_column => column.name)
plural = column.plural_association? && !override_form_field?(column) && [:select, :record_select].include?(column.form_ui)
data[:ie_render_url] = url
data[:ie_mode] = :ajax
data[:ie_plural] = plural
end
data
end
def all_marked?
if active_scaffold_config.mark.mark_all_mode == :page
@page.items.detect { |record| !marked_records.include?(record.id) }.nil?
else
marked_records.length >= @page.pager.count.to_i
end
end
def mark_column_heading
tag_options = {
:id => "#{controller_id}_mark_heading",
:class => 'mark_heading in_place_editor_field'
}
content_tag(:span, check_box_tag("#{controller_id}_mark_heading_span_input", '1', all_marked?), tag_options)
end
def column_heading_attributes(column, sorting, sort_direction)
{:id => active_scaffold_column_header_id(column), :class => column_heading_class(column, sorting), :title => strip_tags(column.description).presence}
end
def render_column_heading(column, sorting, sort_direction)
tag_options = column_heading_attributes(column, sorting, sort_direction)
if column.name == :as_marked
tag_options[:data] = {
:ie_mode => :inline_checkbox,
:ie_url => url_for(params_for(:action => 'mark', :id => '__id__'))
}
else
tag_options[:data] = inplace_edit_data(column) if column.inplace_edit
end
content_tag(:th, column_heading_value(column, sorting, sort_direction) + inplace_edit_control(column), tag_options)
end
def column_heading_value(column, sorting, sort_direction)
if column.name == :as_marked
mark_column_heading
elsif column.sortable?
options = {:id => nil, :class => 'as_sort',
'data-page-history' => controller_id,
:remote => true, :method => :get}
url_options = params_for(:action => :index, :page => 1,
:sort => column.name, :sort_direction => sort_direction)
unless active_scaffold_config.store_user_settings
url_options.merge!(:search => search_params) if search_params.present?
end
link_to column_heading_label(column), url_options, options
else
content_tag(:p, column_heading_label(column))
end
end
def column_heading_label(column)
column.label
end
end
end
end
|
require 'instrumental/rack/middleware'
require 'instrumental/version'
require 'logger'
require 'thread'
require 'socket'
if RUBY_VERSION < "1.9"
require 'system_timer'
else
require 'timeout'
end
# Sets up a connection to the collector.
#
# Instrumental::Agent.new(API_KEY)
module Instrumental
class Agent
BACKOFF = 2.0
MAX_RECONNECT_DELAY = 15
MAX_BUFFER = 5000
REPLY_TIMEOUT = 10
CONNECT_TIMEOUT = 20
EXIT_FLUSH_TIMEOUT = 10
attr_accessor :host, :port, :synchronous, :queue
attr_reader :connection, :enabled
def self.logger=(l)
@logger = l
end
def self.logger
if !@logger
@logger = Logger.new(STDERR)
@logger.level = Logger::WARN
end
@logger
end
def self.all
@agents ||= []
end
def self.new(*args)
inst = super
all << inst
inst
end
# Sets up a connection to the collector.
#
# Instrumental::Agent.new(API_KEY)
# Instrumental::Agent.new(API_KEY, :collector => 'hostname:port')
def initialize(api_key, options = {})
default_options = {
:collector => 'instrumentalapp.com:8000',
:enabled => true,
:test_mode => false,
:synchronous => false
}
options = default_options.merge(options)
collector = options[:collector].split(':')
@api_key = api_key
@host = collector[0]
@port = (collector[1] || 8000).to_i
@enabled = options[:enabled]
@test_mode = options[:test_mode]
@synchronous = options[:synchronous]
@allow_reconnect = true
@pid = Process.pid
if @enabled
@failures = 0
@queue = Queue.new
@sync_mutex = Mutex.new
start_connection_worker
setup_cleanup_at_exit
end
end
# Store a gauge for a metric, optionally at a specific time.
#
# agent.gauge('load', 1.23)
def gauge(metric, value, time = Time.now)
if valid?(metric, value, time) &&
send_command("gauge", metric, value, time.to_i)
value
else
nil
end
rescue Exception => e
report_exception(e)
nil
end
# Store the duration of a block in a metric. multiplier can be used
# to scale the duration to desired unit or change the duration in
# some meaningful way.
#
# agent.time('response_time') do
# # potentially slow stuff
# end
#
# agent.time('response_time_in_ms', 1000) do
# # potentially slow stuff
# end
#
# ids = [1, 2, 3]
# agent.time('find_time_per_post', 1 / ids.size.to_f) do
# Post.find(ids)
# end
def time(metric, multiplier = 1)
start = Time.now
begin
result = yield
ensure
finish = Time.now
duration = finish - start
gauge(metric, duration * multiplier, start)
end
result
end
# Calls time and changes durations into milliseconds.
def time_ms(metric, &block)
time(metric, 1000, &block)
end
# Increment a metric, optionally more than one or at a specific time.
#
# agent.increment('users')
def increment(metric, value = 1, time = Time.now)
if valid?(metric, value, time) &&
send_command("increment", metric, value, time.to_i)
value
else
nil
end
rescue Exception => e
report_exception(e)
nil
end
# Send a notice to the server (deploys, downtime, etc.)
#
# agent.notice('A notice')
def notice(note, time = Time.now, duration = 0)
if valid_note?(note)
send_command("notice", time.to_i, duration.to_i, note)
note
else
nil
end
rescue Exception => e
report_exception(e)
nil
end
# Synchronously flush all pending metrics out to the server
# By default will not try to reconnect to the server if a
# connection failure happens during the flush, though you
# may optionally override this behavior by passing true.
#
# agent.flush
def flush(allow_reconnect = false)
queue_message('flush', {
:synchronous => true,
:allow_reconnect => allow_reconnect
})
end
def enabled?
@enabled
end
def connected?
@socket && !@socket.closed?
end
def logger=(logger)
@logger = logger
end
def logger
@logger ||= self.class.logger
end
private
def with_timeout(time, &block)
tmr_klass = RUBY_VERSION < "1.9" ? SystemTimer : Timeout
tmr_klass.timeout(time) { yield }
end
def valid_note?(note)
note !~ /[\n\r]/
end
def valid?(metric, value, time)
valid_metric = metric =~ /^([\d\w\-_]+\.)*[\d\w\-_]+$/i
valid_value = value.to_s =~ /^-?\d+(\.\d+)?(e-\d+)?$/
return true if valid_metric && valid_value
report_invalid_metric(metric) unless valid_metric
report_invalid_value(metric, value) unless valid_value
false
end
def report_invalid_metric(metric)
increment "agent.invalid_metric"
logger.warn "Invalid metric #{metric}"
end
def report_invalid_value(metric, value)
increment "agent.invalid_value"
logger.warn "Invalid value #{value.inspect} for #{metric}"
end
def report_exception(e)
logger.error "Exception occurred: #{e.message}"
logger.error e.backtrace.join("\n")
end
def send_command(cmd, *args)
if enabled?
if @pid != Process.pid
logger.info "Detected fork"
@pid = Process.pid
@socket = nil
@queue = Queue.new
start_connection_worker
end
cmd = "%s %s\n" % [cmd, args.collect { |a| a.to_s }.join(" ")]
if @queue.size < MAX_BUFFER
logger.debug "Queueing: #{cmd.chomp}"
queue_message(cmd, { :synchronous => @synchronous })
else
logger.warn "Dropping command, queue full(#{@queue.size}): #{cmd.chomp}"
nil
end
end
end
def queue_message(message, options = {})
if @enabled
options ||= {}
if options[:allow_reconnect].nil?
options[:allow_reconnect] = @allow_reconnect
end
synchronous = options.delete(:synchronous)
if synchronous
options[:sync_resource] ||= ConditionVariable.new
@queue << [message, options]
@sync_mutex.synchronize {
options[:sync_resource].wait(@sync_mutex)
}
else
@queue << [message, options]
end
end
message
end
def test_connection
# FIXME: Test connection state hack
begin
@socket.read_nonblock(1) # TODO: put data back?
rescue Errno::EAGAIN
# noop
end
end
def start_connection_worker
if enabled?
disconnect
logger.info "Starting thread"
@thread = Thread.new do
run_worker_loop
end
end
end
def send_with_reply_timeout(message)
@socket.puts message
with_timeout(REPLY_TIMEOUT) do
response = @socket.gets
if response.to_s.chomp != "ok"
raise "Bad Response #{response.inspect} to #{message.inspect}"
end
end
end
def run_worker_loop
command_and_args = nil
command_options = nil
logger.info "connecting to collector"
@socket = with_timeout(CONNECT_TIMEOUT) { TCPSocket.new(host, port) }
logger.info "connected to collector at #{host}:#{port}"
send_with_reply_timeout "hello version #{Instrumental::VERSION} test_mode #{@test_mode}"
send_with_reply_timeout "authenticate #{@api_key}"
@failures = 0
loop do
command_and_args, command_options = @queue.pop
sync_resource = command_options && command_options[:sync_resource]
test_connection
case command_and_args
when 'exit'
logger.info "exiting, #{@queue.size} commands remain"
return true
when 'flush'
release_resource = true
else
logger.debug "Sending: #{command_and_args.chomp}"
@socket.puts command_and_args
end
command_and_args = nil
command_options = nil
if sync_resource
@sync_mutex.synchronize do
sync_resource.signal
end
end
end
rescue Exception => err
logger.error err.backtrace.join("\n")
if @allow_reconnect == false ||
(command_options && command_options[:allow_reconnect] == false)
logger.error "Not trying to reconnect"
return
end
if command_and_args
logger.debug "requeueing: #{command_and_args}"
@queue << command_and_args
end
disconnect
@failures += 1
delay = [(@failures - 1) ** BACKOFF, MAX_RECONNECT_DELAY].min
logger.error "disconnected, #{@failures} failures in a row, reconnect in #{delay}..."
sleep delay
retry
ensure
disconnect
end
def setup_cleanup_at_exit
at_exit do
logger.info "Cleaning up agent, queue empty: #{@queue.empty?}, thread running: #{@thread.alive?}"
@allow_reconnect = false
logger.info "exit received, currently #{@queue.size} commands to be sent"
queue_message('exit')
begin
with_timeout(EXIT_FLUSH_TIMEOUT) { @thread.join }
rescue Timeout::Error
if @queue.size > 0
logger.error "Timed out working agent thread on exit, dropping #{@queue.size} metrics"
else
logger.error "Timed out Instrumental Agent, exiting"
end
end
end
end
def disconnect
if connected?
logger.info "Disconnecting..."
@socket.flush
@socket.close
end
@socket = nil
end
end
end
Hiding some debug info.
require 'instrumental/rack/middleware'
require 'instrumental/version'
require 'logger'
require 'thread'
require 'socket'
if RUBY_VERSION < "1.9"
require 'system_timer'
else
require 'timeout'
end
# Sets up a connection to the collector.
#
# Instrumental::Agent.new(API_KEY)
module Instrumental
class Agent
BACKOFF = 2.0
MAX_RECONNECT_DELAY = 15
MAX_BUFFER = 5000
REPLY_TIMEOUT = 10
CONNECT_TIMEOUT = 20
EXIT_FLUSH_TIMEOUT = 10
attr_accessor :host, :port, :synchronous, :queue
attr_reader :connection, :enabled
def self.logger=(l)
@logger = l
end
def self.logger
if !@logger
@logger = Logger.new(STDERR)
@logger.level = Logger::WARN
end
@logger
end
def self.all
@agents ||= []
end
def self.new(*args)
inst = super
all << inst
inst
end
# Sets up a connection to the collector.
#
# Instrumental::Agent.new(API_KEY)
# Instrumental::Agent.new(API_KEY, :collector => 'hostname:port')
def initialize(api_key, options = {})
default_options = {
:collector => 'instrumentalapp.com:8000',
:enabled => true,
:test_mode => false,
:synchronous => false
}
options = default_options.merge(options)
collector = options[:collector].split(':')
@api_key = api_key
@host = collector[0]
@port = (collector[1] || 8000).to_i
@enabled = options[:enabled]
@test_mode = options[:test_mode]
@synchronous = options[:synchronous]
@allow_reconnect = true
@pid = Process.pid
if @enabled
@failures = 0
@queue = Queue.new
@sync_mutex = Mutex.new
start_connection_worker
setup_cleanup_at_exit
end
end
# Store a gauge for a metric, optionally at a specific time.
#
# agent.gauge('load', 1.23)
def gauge(metric, value, time = Time.now)
if valid?(metric, value, time) &&
send_command("gauge", metric, value, time.to_i)
value
else
nil
end
rescue Exception => e
report_exception(e)
nil
end
# Store the duration of a block in a metric. multiplier can be used
# to scale the duration to desired unit or change the duration in
# some meaningful way.
#
# agent.time('response_time') do
# # potentially slow stuff
# end
#
# agent.time('response_time_in_ms', 1000) do
# # potentially slow stuff
# end
#
# ids = [1, 2, 3]
# agent.time('find_time_per_post', 1 / ids.size.to_f) do
# Post.find(ids)
# end
def time(metric, multiplier = 1)
start = Time.now
begin
result = yield
ensure
finish = Time.now
duration = finish - start
gauge(metric, duration * multiplier, start)
end
result
end
# Calls time and changes durations into milliseconds.
def time_ms(metric, &block)
time(metric, 1000, &block)
end
# Increment a metric, optionally more than one or at a specific time.
#
# agent.increment('users')
def increment(metric, value = 1, time = Time.now)
if valid?(metric, value, time) &&
send_command("increment", metric, value, time.to_i)
value
else
nil
end
rescue Exception => e
report_exception(e)
nil
end
# Send a notice to the server (deploys, downtime, etc.)
#
# agent.notice('A notice')
def notice(note, time = Time.now, duration = 0)
if valid_note?(note)
send_command("notice", time.to_i, duration.to_i, note)
note
else
nil
end
rescue Exception => e
report_exception(e)
nil
end
# Synchronously flush all pending metrics out to the server
# By default will not try to reconnect to the server if a
# connection failure happens during the flush, though you
# may optionally override this behavior by passing true.
#
# agent.flush
def flush(allow_reconnect = false)
queue_message('flush', {
:synchronous => true,
:allow_reconnect => allow_reconnect
})
end
def enabled?
@enabled
end
def connected?
@socket && !@socket.closed?
end
def logger=(logger)
@logger = logger
end
def logger
@logger ||= self.class.logger
end
private
def with_timeout(time, &block)
tmr_klass = RUBY_VERSION < "1.9" ? SystemTimer : Timeout
tmr_klass.timeout(time) { yield }
end
def valid_note?(note)
note !~ /[\n\r]/
end
def valid?(metric, value, time)
valid_metric = metric =~ /^([\d\w\-_]+\.)*[\d\w\-_]+$/i
valid_value = value.to_s =~ /^-?\d+(\.\d+)?(e-\d+)?$/
return true if valid_metric && valid_value
report_invalid_metric(metric) unless valid_metric
report_invalid_value(metric, value) unless valid_value
false
end
def report_invalid_metric(metric)
increment "agent.invalid_metric"
logger.warn "Invalid metric #{metric}"
end
def report_invalid_value(metric, value)
increment "agent.invalid_value"
logger.warn "Invalid value #{value.inspect} for #{metric}"
end
def report_exception(e)
logger.error "Exception occurred: #{e.message}"
logger.error e.backtrace.join("\n")
end
def send_command(cmd, *args)
if enabled?
if @pid != Process.pid
logger.info "Detected fork"
@pid = Process.pid
@socket = nil
@queue = Queue.new
start_connection_worker
end
cmd = "%s %s\n" % [cmd, args.collect { |a| a.to_s }.join(" ")]
if @queue.size < MAX_BUFFER
logger.debug "Queueing: #{cmd.chomp}"
queue_message(cmd, { :synchronous => @synchronous })
else
logger.warn "Dropping command, queue full(#{@queue.size}): #{cmd.chomp}"
nil
end
end
end
def queue_message(message, options = {})
if @enabled
options ||= {}
if options[:allow_reconnect].nil?
options[:allow_reconnect] = @allow_reconnect
end
synchronous = options.delete(:synchronous)
if synchronous
options[:sync_resource] ||= ConditionVariable.new
@queue << [message, options]
@sync_mutex.synchronize {
options[:sync_resource].wait(@sync_mutex)
}
else
@queue << [message, options]
end
end
message
end
def test_connection
# FIXME: Test connection state hack
begin
@socket.read_nonblock(1) # TODO: put data back?
rescue Errno::EAGAIN
# noop
end
end
def start_connection_worker
if enabled?
disconnect
logger.info "Starting thread"
@thread = Thread.new do
run_worker_loop
end
end
end
def send_with_reply_timeout(message)
@socket.puts message
with_timeout(REPLY_TIMEOUT) do
response = @socket.gets
if response.to_s.chomp != "ok"
raise "Bad Response #{response.inspect} to #{message.inspect}"
end
end
end
def run_worker_loop
command_and_args = nil
command_options = nil
logger.info "connecting to collector"
@socket = with_timeout(CONNECT_TIMEOUT) { TCPSocket.new(host, port) }
logger.info "connected to collector at #{host}:#{port}"
send_with_reply_timeout "hello version #{Instrumental::VERSION} test_mode #{@test_mode}"
send_with_reply_timeout "authenticate #{@api_key}"
@failures = 0
loop do
command_and_args, command_options = @queue.pop
sync_resource = command_options && command_options[:sync_resource]
test_connection
case command_and_args
when 'exit'
logger.info "exiting, #{@queue.size} commands remain"
return true
when 'flush'
release_resource = true
else
logger.debug "Sending: #{command_and_args.chomp}"
@socket.puts command_and_args
end
command_and_args = nil
command_options = nil
if sync_resource
@sync_mutex.synchronize do
sync_resource.signal
end
end
end
rescue Exception => err
logger.debug err.backtrace.join("\n")
if @allow_reconnect == false ||
(command_options && command_options[:allow_reconnect] == false)
logger.error "Not trying to reconnect"
return
end
if command_and_args
logger.debug "requeueing: #{command_and_args}"
@queue << command_and_args
end
disconnect
@failures += 1
delay = [(@failures - 1) ** BACKOFF, MAX_RECONNECT_DELAY].min
logger.error "disconnected, #{@failures} failures in a row, reconnect in #{delay}..."
sleep delay
retry
ensure
disconnect
end
def setup_cleanup_at_exit
at_exit do
logger.info "Cleaning up agent, queue empty: #{@queue.empty?}, thread running: #{@thread.alive?}"
@allow_reconnect = false
logger.info "exit received, currently #{@queue.size} commands to be sent"
queue_message('exit')
begin
with_timeout(EXIT_FLUSH_TIMEOUT) { @thread.join }
rescue Timeout::Error
if @queue.size > 0
logger.error "Timed out working agent thread on exit, dropping #{@queue.size} metrics"
else
logger.error "Timed out Instrumental Agent, exiting"
end
end
end
end
def disconnect
if connected?
logger.info "Disconnecting..."
@socket.flush
@socket.close
end
@socket = nil
end
end
end
|
# coding: utf-8
module ActiveScaffold
module Helpers
# Helpers that assist with the rendering of a List Column
module ListColumnHelpers
def get_column_value(record, column)
begin
# check for an override helper
value = if column_override? column
# we only pass the record as the argument. we previously also passed the formatted_value,
# but mike perham pointed out that prohibited the usage of overrides to improve on the
# performance of our default formatting. see issue #138.
send(column_override(column), record)
# second, check if the dev has specified a valid list_ui for this column
elsif column.list_ui and override_column_ui?(column.list_ui)
send(override_column_ui(column.list_ui), column, record)
elsif inplace_edit?(record, column)
active_scaffold_inplace_edit(record, column)
elsif column.column and override_column_ui?(column.column.type)
send(override_column_ui(column.column.type), column, record)
else
format_column_value(record, column)
end
value = ' '.html_safe if value.nil? or (value.respond_to?(:empty?) and value.empty?) # fix for IE 6
return value
rescue Exception => e
logger.error Time.now.to_s + "#{e.inspect} -- on the ActiveScaffold column = :#{column.name} in #{@controller.class}"
raise e
end
end
# TODO: move empty_field_text and logic in here?
# TODO: move active_scaffold_inplace_edit in here?
# TODO: we need to distinguish between the automatic links *we* create and the ones that the dev specified. some logic may not apply if the dev specified the link.
def render_list_column(text, column, record)
if column.link
link = column.link
associated = record.send(column.association.name) if column.association
url_options = params_for(:action => nil, :id => record.id, :link => text)
url_options[:parent_controller] = params[:controller] if link.controller and link.controller.to_s != params[:controller]
url_options[:id] = associated.id if associated and link.controller and link.controller.to_s != params[:controller]
# setup automatic link
if column.autolink? # link to nested scaffold or inline form
link = action_link_to_inline_form(column, associated) if link.crud_type.nil? # automatic link to inline form (singular association)
return text if link.crud_type.nil?
if link.crud_type == :create
url_options[:link] = as_(:create_new)
url_options[:parent_id] = record.id
url_options[:parent_column] = column.association.reverse
url_options[:parent_model] = record.class.name # needed for polymorphic associations
url_options.delete :id
end
end
# check authorization
if column.association
associated_for_authorized = if associated.nil? || (associated.respond_to?(:empty?) && associated.empty?)
column.association.klass
elsif column.plural_association?
associated.first
else
associated
end
authorized = associated_for_authorized.authorized_for?(:crud_type => link.crud_type)
authorized = authorized and record.authorized_for?(:crud_type => :update, :column => column.name) if link.crud_type == :create
else
authorized = record.authorized_for?(:crud_type => link.crud_type)
end
return "<a class='disabled'>#{text}</a>" unless authorized
render_action_link(link, url_options, record)
else
text
end
end
# setup the action link to inline form
def action_link_to_inline_form(column, associated)
link = column.link.clone
if column_empty?(associated) # if association is empty, we only can link to create form
if column.actions_for_association_links.include?(:new)
link.action = 'new'
link.crud_type = :create
end
elsif column.actions_for_association_links.include?(:edit)
link.action = 'edit'
link.crud_type = :update
elsif column.actions_for_association_links.include?(:show)
link.action = 'show'
link.crud_type = :read
end
link
end
# There are two basic ways to clean a column's value: h() and sanitize(). The latter is useful
# when the column contains *valid* html data, and you want to just disable any scripting. People
# can always use field overrides to clean data one way or the other, but having this override
# lets people decide which way it should happen by default.
#
# Why is it not a configuration option? Because it seems like a somewhat rare request. But it
# could eventually be an option in config.list (and config.show, I guess).
def clean_column_value(v)
h(v)
end
##
## Overrides
##
def active_scaffold_column_text(column, record)
truncate(clean_column_value(record.send(column.name)), :length => column.options[:truncate] || 50)
end
def active_scaffold_column_select(column, record)
if column.association
format_column_value(record, column)
else
value = record.send(column.name)
text, val = column.options[:options].find {|text, val| (val.nil? ? text : val).to_s == value.to_s}
value = active_scaffold_translated_option(column, text, val).first if text
format_column_value(record, column, value)
end
end
def active_scaffold_column_checkbox(column, record)
if inplace_edit?(record, column)
id_options = {:id => record.id.to_s, :action => 'update_column', :name => column.name.to_s}
tag_options = {:id => element_cell_id(id_options), :class => "in_place_editor_field"}
content_tag(:span, format_column_checkbox(record, column), tag_options)
else
check_box(:record, column.name, :disabled => true, :id => nil, :object => record)
end
end
def column_override_name(column, old = false)
"#{clean_class_name(column.active_record_class.name) + '_' unless old}#{clean_column_name(column.name)}_column"
end
def column_override(column)
method = column_override_name(column)
return method if respond_to?(method)
old_method = column_override_name(column, true)
if respond_to?(old_method)
ActiveSupport::Deprecation.warn("You are using an old naming schema for overrides, you should name the helper #{method} instead of #{old_method}")
old_method
end
end
alias_method :column_override?, :column_override
def override_column_ui?(list_ui)
respond_to?(override_column_ui(list_ui))
end
# the naming convention for overriding column types with helpers
def override_column_ui(list_ui)
"active_scaffold_column_#{list_ui}"
end
##
## Formatting
##
def format_column_checkbox(record, column)
checked = ActionView::Helpers::InstanceTag.check_box_checked?(record.send(column.name), '1')
script = remote_function(:method => 'POST', :url => {:controller => params_for[:controller], :action => "update_column", :column => column.name, :id => record.id.to_s, :value => checked ? false : 1, :eid => params[:eid]})
check_box(:record, column.name, :onclick => script, :id => nil, :object => record)
end
def format_column_value(record, column, value = nil)
value ||= record.send(column.name) unless record.nil?
if value && column.association # cache association size before calling column_empty?
associated_size = value.size if column.plural_association? and column.associated_number? # get count before cache association
cache_association(value, column)
end
if column.association.nil? or column_empty?(value)
if value.is_a? Numeric
format_number_value(value, column.options)
else
format_value(value, column.options)
end
else
format_association_value(value, column, associated_size)
end
end
def format_number_value(value, options = {})
value = case options[:format]
when :size
number_to_human_size(value, options[:i18n_options] || {})
when :percentage
number_to_percentage(value, options[:i18n_options] || {})
when :currency
number_to_currency(value, options[:i18n_options] || {})
when :i18n_number
number_with_delimiter(value, options[:i18n_options] || {})
else
value
end
clean_column_value(value)
end
def format_association_value(value, column, size)
case column.association.macro
when :has_one, :belongs_to
format_value(value.to_label)
when :has_many, :has_and_belongs_to_many
if column.associated_limit.nil?
firsts = value.collect { |v| v.to_label }
else
firsts = value.first(column.associated_limit)
firsts.collect! { |v| v.to_label }
firsts[column.associated_limit] = '…' if value.size > column.associated_limit
end
if column.associated_limit == 0
size if column.associated_number?
else
joined_associated = format_value(firsts.join(', '))
joined_associated << " (#{size})" if column.associated_number? and column.associated_limit and value.size > column.associated_limit
joined_associated
end
end
end
def format_value(column_value, options = {})
value = if column_empty?(column_value)
active_scaffold_config.list.empty_field_text
elsif column_value.is_a?(Time) || column_value.is_a?(Date)
l(column_value, :format => options[:format] || :default)
elsif [FalseClass, TrueClass].include?(column_value.class)
as_(column_value.to_s.to_sym)
else
column_value.to_s
end
clean_column_value(value)
end
def cache_association(value, column)
# we are not using eager loading, cache firsts records in order not to query the database in a future
unless value.loaded?
# load at least one record, is needed for column_empty? and checking permissions
if column.associated_limit.nil?
Rails.logger.warn "ActiveScaffold: Enable eager loading for #{column.name} association to reduce SQL queries"
else
value.target = value.find(:all, :limit => column.associated_limit + 1, :select => column.select_columns)
end
end
end
# ==========
# = Inline Edit =
# ==========
def inplace_edit?(record, column)
column.inplace_edit and record.authorized_for?(:crud_type => :update, :column => column.name)
end
def inplace_edit_cloning?(column)
column.inplace_edit != :ajax and (override_form_field?(column) or column.form_ui or (column.column and override_input?(column.column.type)))
end
def format_inplace_edit_column(record,column)
if column.list_ui == :checkbox
format_column_checkbox(record, column)
else
format_column_value(record, column)
end
end
def active_scaffold_inplace_edit(record, column, options = {})
formatted_column = options[:formatted_column] || format_column_value(record, column)
id_options = {:id => record.id.to_s, :action => 'update_column', :name => column.name.to_s}
tag_options = {:id => element_cell_id(id_options), :class => "in_place_editor_field"}
in_place_editor_options = {
:url => {:controller => params_for[:controller], :action => "update_column", :column => column.name, :id => record.id.to_s},
:with => params[:eid] ? "Form.serialize(form) + '&eid=#{params[:eid]}'" : nil,
:click_to_edit_text => as_(:click_to_edit),
:cancel_text => as_(:cancel),
:loading_text => as_(:loading),
:save_text => as_(:update),
:saving_text => as_(:saving),
:ajax_options => "{method: 'post'}",
:script => true
}
if inplace_edit_cloning?(column)
in_place_editor_options.merge!(
:inplace_pattern_selector => "##{active_scaffold_column_header_id(column)} .#{inplace_edit_control_css_class}",
:node_id_suffix => record.id.to_s,
:form_customization => 'element.clonePatternField();'
)
elsif column.inplace_edit == :ajax
url = url_for(:controller => params_for[:controller], :action => 'render_field', :id => record.id, :column => column.name, :update_column => column.name, :in_place_editing => true, :escape => false)
plural = column.plural_association? && !override_form_field?(column) && [:select, :record_select].include?(column.form_ui)
in_place_editor_options[:form_customization] = "element.setFieldFromAjax('#{escape_javascript(url)}', {plural: #{!!plural}});"
elsif column.column.try(:type) == :text
in_place_editor_options[:rows] = column.options[:rows] || 5
end
in_place_editor_options.merge!(column.options)
content_tag(:span, formatted_column, tag_options) + active_scaffold_in_place_editor(tag_options[:id], in_place_editor_options)
end
def inplace_edit_control(column)
if inplace_edit?(active_scaffold_config.model, column) and inplace_edit_cloning?(column)
@record = active_scaffold_config.model.new
column = column.clone
column.update_column = nil
column.form_ui = :select if (column.association && column.form_ui.nil?)
content_tag(:div, active_scaffold_input_for(column), {:style => "display:none;", :class => inplace_edit_control_css_class})
end
end
def inplace_edit_control_css_class
"as_inplace_pattern"
end
def active_scaffold_in_place_editor(field_id, options = {})
function = "new ActiveScaffold.InPlaceEditor("
function << "'#{field_id}', "
function << "'#{url_for(options[:url])}'"
js_options = {}
if protect_against_forgery?
options[:with] ||= "Form.serialize(form)"
options[:with] += " + '&authenticity_token=' + encodeURIComponent('#{form_authenticity_token}')"
end
js_options['cancelText'] = %('#{options[:cancel_text]}') if options[:cancel_text]
js_options['okText'] = %('#{options[:save_text]}') if options[:save_text]
js_options['okControl'] = %('#{options[:save_control_type]}') if options[:save_control_type]
js_options['cancelControl'] = %('#{options[:cancel_control_type]}') if options[:cancel_control_type]
js_options['loadingText'] = %('#{options[:loading_text]}') if options[:loading_text]
js_options['savingText'] = %('#{options[:saving_text]}') if options[:saving_text]
js_options['rows'] = options[:rows] if options[:rows]
js_options['cols'] = options[:cols] if options[:cols]
js_options['size'] = options[:size] if options[:size]
js_options['externalControl'] = "'#{options[:external_control]}'" if options[:external_control]
js_options['externalControlOnly'] = "true" if options[:external_control_only]
js_options['submitOnBlur'] = "'#{options[:submit_on_blur]}'" if options[:submit_on_blur]
js_options['loadTextURL'] = "'#{url_for(options[:load_text_url])}'" if options[:load_text_url]
js_options['ajaxOptions'] = options[:ajax_options] if options[:ajax_options]
js_options['htmlResponse'] = !options[:script] if options[:script]
js_options['callback'] = "function(form) { return #{options[:with]} }" if options[:with]
js_options['clickToEditText'] = %('#{options[:click_to_edit_text]}') if options[:click_to_edit_text]
js_options['textBetweenControls'] = %('#{options[:text_between_controls]}') if options[:text_between_controls]
js_options['highlightcolor'] = %('#{options[:highlight_color]}') if options[:highlight_color]
js_options['highlightendcolor'] = %('#{options[:highlight_end_color]}') if options[:highlight_end_color]
js_options['onFailure'] = "function(element, transport) { #{options[:failure]} }" if options[:failure]
js_options['onComplete'] = "function(transport, element) { #{options[:complete]} }" if options[:complete]
js_options['onEnterEditMode'] = "function(element) { #{options[:enter_editing]} }" if options[:enter_editing]
js_options['onLeaveEditMode'] = "function(element) { #{options[:exit_editing]} }" if options[:exit_editing]
js_options['onFormCustomization'] = "function(element, form) { #{options[:form_customization]} }" if options[:form_customization]
js_options['inplacePatternSelector'] = %('#{options[:inplace_pattern_selector]}') if options[:inplace_pattern_selector]
js_options['nodeIdSuffix'] = %('#{options[:node_id_suffix]}') if options[:node_id_suffix]
function << (', ' + options_for_javascript(js_options)) unless js_options.empty?
function << ')'
javascript_tag(function)
end
def mark_record(checked, url_params = {})
url_params.reverse_merge!(:controller => params_for[:controller], :action => 'mark', :eid => params[:eid])
ajax_options = {:method => :put,
:url => url_for(url_params),
:with => "'value=' + this.checked",
:after => "var checkbox = this; this.disable();",
:complete => "checkbox.enable();"}
script = remote_function(ajax_options)
check_box_tag('mark', '1', checked, :onclick => script, :class => 'mark_record')
end
end
end
end
Fix :select form_ui with inplace edit
# coding: utf-8
module ActiveScaffold
module Helpers
# Helpers that assist with the rendering of a List Column
module ListColumnHelpers
def get_column_value(record, column)
begin
# check for an override helper
value = if column_override? column
# we only pass the record as the argument. we previously also passed the formatted_value,
# but mike perham pointed out that prohibited the usage of overrides to improve on the
# performance of our default formatting. see issue #138.
send(column_override(column), record)
# second, check if the dev has specified a valid list_ui for this column
elsif column.list_ui and override_column_ui?(column.list_ui)
send(override_column_ui(column.list_ui), column, record)
elsif inplace_edit?(record, column)
active_scaffold_inplace_edit(record, column)
elsif column.column and override_column_ui?(column.column.type)
send(override_column_ui(column.column.type), column, record)
else
format_column_value(record, column)
end
value = ' '.html_safe if value.nil? or (value.respond_to?(:empty?) and value.empty?) # fix for IE 6
return value
rescue Exception => e
logger.error Time.now.to_s + "#{e.inspect} -- on the ActiveScaffold column = :#{column.name} in #{@controller.class}"
raise e
end
end
# TODO: move empty_field_text and logic in here?
# TODO: move active_scaffold_inplace_edit in here?
# TODO: we need to distinguish between the automatic links *we* create and the ones that the dev specified. some logic may not apply if the dev specified the link.
def render_list_column(text, column, record)
if column.link
link = column.link
associated = record.send(column.association.name) if column.association
url_options = params_for(:action => nil, :id => record.id, :link => text)
url_options[:parent_controller] = params[:controller] if link.controller and link.controller.to_s != params[:controller]
url_options[:id] = associated.id if associated and link.controller and link.controller.to_s != params[:controller]
# setup automatic link
if column.autolink? # link to nested scaffold or inline form
link = action_link_to_inline_form(column, associated) if link.crud_type.nil? # automatic link to inline form (singular association)
return text if link.crud_type.nil?
if link.crud_type == :create
url_options[:link] = as_(:create_new)
url_options[:parent_id] = record.id
url_options[:parent_column] = column.association.reverse
url_options[:parent_model] = record.class.name # needed for polymorphic associations
url_options.delete :id
end
end
# check authorization
if column.association
associated_for_authorized = if associated.nil? || (associated.respond_to?(:empty?) && associated.empty?)
column.association.klass
elsif column.plural_association?
associated.first
else
associated
end
authorized = associated_for_authorized.authorized_for?(:crud_type => link.crud_type)
authorized = authorized and record.authorized_for?(:crud_type => :update, :column => column.name) if link.crud_type == :create
else
authorized = record.authorized_for?(:crud_type => link.crud_type)
end
return "<a class='disabled'>#{text}</a>" unless authorized
render_action_link(link, url_options, record)
else
text
end
end
# setup the action link to inline form
def action_link_to_inline_form(column, associated)
link = column.link.clone
if column_empty?(associated) # if association is empty, we only can link to create form
if column.actions_for_association_links.include?(:new)
link.action = 'new'
link.crud_type = :create
end
elsif column.actions_for_association_links.include?(:edit)
link.action = 'edit'
link.crud_type = :update
elsif column.actions_for_association_links.include?(:show)
link.action = 'show'
link.crud_type = :read
end
link
end
# There are two basic ways to clean a column's value: h() and sanitize(). The latter is useful
# when the column contains *valid* html data, and you want to just disable any scripting. People
# can always use field overrides to clean data one way or the other, but having this override
# lets people decide which way it should happen by default.
#
# Why is it not a configuration option? Because it seems like a somewhat rare request. But it
# could eventually be an option in config.list (and config.show, I guess).
def clean_column_value(v)
h(v)
end
##
## Overrides
##
def active_scaffold_column_text(column, record)
truncate(clean_column_value(record.send(column.name)), :length => column.options[:truncate] || 50)
end
def active_scaffold_column_checkbox(column, record)
if inplace_edit?(record, column)
id_options = {:id => record.id.to_s, :action => 'update_column', :name => column.name.to_s}
tag_options = {:id => element_cell_id(id_options), :class => "in_place_editor_field"}
content_tag(:span, format_column_checkbox(record, column), tag_options)
else
check_box(:record, column.name, :disabled => true, :id => nil, :object => record)
end
end
def column_override_name(column, old = false)
"#{clean_class_name(column.active_record_class.name) + '_' unless old}#{clean_column_name(column.name)}_column"
end
def column_override(column)
method = column_override_name(column)
return method if respond_to?(method)
old_method = column_override_name(column, true)
if respond_to?(old_method)
ActiveSupport::Deprecation.warn("You are using an old naming schema for overrides, you should name the helper #{method} instead of #{old_method}")
old_method
end
end
alias_method :column_override?, :column_override
def override_column_ui?(list_ui)
respond_to?(override_column_ui(list_ui))
end
# the naming convention for overriding column types with helpers
def override_column_ui(list_ui)
"active_scaffold_column_#{list_ui}"
end
##
## Formatting
##
def format_column_checkbox(record, column)
checked = ActionView::Helpers::InstanceTag.check_box_checked?(record.send(column.name), '1')
script = remote_function(:method => 'POST', :url => {:controller => params_for[:controller], :action => "update_column", :column => column.name, :id => record.id.to_s, :value => checked ? false : 1, :eid => params[:eid]})
check_box(:record, column.name, :onclick => script, :id => nil, :object => record)
end
def format_column_value(record, column, value = nil)
value ||= record.send(column.name) unless record.nil?
if value && column.association # cache association size before calling column_empty?
associated_size = value.size if column.plural_association? and column.associated_number? # get count before cache association
cache_association(value, column)
end
if column.association.nil? or column_empty?(value)
if column.form_ui == :select
text, val = column.options[:options].find {|text, val| (val.nil? ? text : val).to_s == value.to_s}
value = active_scaffold_translated_option(column, text, val).first if text
end
if value.is_a? Numeric
format_number_value(value, column.options)
else
format_value(value, column.options)
end
else
format_association_value(value, column, associated_size)
end
end
def format_number_value(value, options = {})
value = case options[:format]
when :size
number_to_human_size(value, options[:i18n_options] || {})
when :percentage
number_to_percentage(value, options[:i18n_options] || {})
when :currency
number_to_currency(value, options[:i18n_options] || {})
when :i18n_number
number_with_delimiter(value, options[:i18n_options] || {})
else
value
end
clean_column_value(value)
end
def format_association_value(value, column, size)
case column.association.macro
when :has_one, :belongs_to
format_value(value.to_label)
when :has_many, :has_and_belongs_to_many
if column.associated_limit.nil?
firsts = value.collect { |v| v.to_label }
else
firsts = value.first(column.associated_limit)
firsts.collect! { |v| v.to_label }
firsts[column.associated_limit] = '…' if value.size > column.associated_limit
end
if column.associated_limit == 0
size if column.associated_number?
else
joined_associated = format_value(firsts.join(', '))
joined_associated << " (#{size})" if column.associated_number? and column.associated_limit and value.size > column.associated_limit
joined_associated
end
end
end
def format_value(column_value, options = {})
value = if column_empty?(column_value)
active_scaffold_config.list.empty_field_text
elsif column_value.is_a?(Time) || column_value.is_a?(Date)
l(column_value, :format => options[:format] || :default)
elsif [FalseClass, TrueClass].include?(column_value.class)
as_(column_value.to_s.to_sym)
else
column_value.to_s
end
clean_column_value(value)
end
def cache_association(value, column)
# we are not using eager loading, cache firsts records in order not to query the database in a future
unless value.loaded?
# load at least one record, is needed for column_empty? and checking permissions
if column.associated_limit.nil?
Rails.logger.warn "ActiveScaffold: Enable eager loading for #{column.name} association to reduce SQL queries"
else
value.target = value.find(:all, :limit => column.associated_limit + 1, :select => column.select_columns)
end
end
end
# ==========
# = Inline Edit =
# ==========
def inplace_edit?(record, column)
column.inplace_edit and record.authorized_for?(:crud_type => :update, :column => column.name)
end
def inplace_edit_cloning?(column)
column.inplace_edit != :ajax and (override_form_field?(column) or column.form_ui or (column.column and override_input?(column.column.type)))
end
def format_inplace_edit_column(record,column)
if column.list_ui == :checkbox
format_column_checkbox(record, column)
else
format_column_value(record, column)
end
end
def active_scaffold_inplace_edit(record, column, options = {})
formatted_column = options[:formatted_column] || format_column_value(record, column)
id_options = {:id => record.id.to_s, :action => 'update_column', :name => column.name.to_s}
tag_options = {:id => element_cell_id(id_options), :class => "in_place_editor_field"}
in_place_editor_options = {
:url => {:controller => params_for[:controller], :action => "update_column", :column => column.name, :id => record.id.to_s},
:with => params[:eid] ? "Form.serialize(form) + '&eid=#{params[:eid]}'" : nil,
:click_to_edit_text => as_(:click_to_edit),
:cancel_text => as_(:cancel),
:loading_text => as_(:loading),
:save_text => as_(:update),
:saving_text => as_(:saving),
:ajax_options => "{method: 'post'}",
:script => true
}
if inplace_edit_cloning?(column)
in_place_editor_options.merge!(
:inplace_pattern_selector => "##{active_scaffold_column_header_id(column)} .#{inplace_edit_control_css_class}",
:node_id_suffix => record.id.to_s,
:form_customization => 'element.clonePatternField();'
)
elsif column.inplace_edit == :ajax
url = url_for(:controller => params_for[:controller], :action => 'render_field', :id => record.id, :column => column.name, :update_column => column.name, :in_place_editing => true, :escape => false)
plural = column.plural_association? && !override_form_field?(column) && [:select, :record_select].include?(column.form_ui)
in_place_editor_options[:form_customization] = "element.setFieldFromAjax('#{escape_javascript(url)}', {plural: #{!!plural}});"
elsif column.column.try(:type) == :text
in_place_editor_options[:rows] = column.options[:rows] || 5
end
in_place_editor_options.merge!(column.options)
content_tag(:span, formatted_column, tag_options) + active_scaffold_in_place_editor(tag_options[:id], in_place_editor_options)
end
def inplace_edit_control(column)
if inplace_edit?(active_scaffold_config.model, column) and inplace_edit_cloning?(column)
@record = active_scaffold_config.model.new
column = column.clone
column.update_column = nil
column.form_ui = :select if (column.association && column.form_ui.nil?)
content_tag(:div, active_scaffold_input_for(column), {:style => "display:none;", :class => inplace_edit_control_css_class})
end
end
def inplace_edit_control_css_class
"as_inplace_pattern"
end
def active_scaffold_in_place_editor(field_id, options = {})
function = "new ActiveScaffold.InPlaceEditor("
function << "'#{field_id}', "
function << "'#{url_for(options[:url])}'"
js_options = {}
if protect_against_forgery?
options[:with] ||= "Form.serialize(form)"
options[:with] += " + '&authenticity_token=' + encodeURIComponent('#{form_authenticity_token}')"
end
js_options['cancelText'] = %('#{options[:cancel_text]}') if options[:cancel_text]
js_options['okText'] = %('#{options[:save_text]}') if options[:save_text]
js_options['okControl'] = %('#{options[:save_control_type]}') if options[:save_control_type]
js_options['cancelControl'] = %('#{options[:cancel_control_type]}') if options[:cancel_control_type]
js_options['loadingText'] = %('#{options[:loading_text]}') if options[:loading_text]
js_options['savingText'] = %('#{options[:saving_text]}') if options[:saving_text]
js_options['rows'] = options[:rows] if options[:rows]
js_options['cols'] = options[:cols] if options[:cols]
js_options['size'] = options[:size] if options[:size]
js_options['externalControl'] = "'#{options[:external_control]}'" if options[:external_control]
js_options['externalControlOnly'] = "true" if options[:external_control_only]
js_options['submitOnBlur'] = "'#{options[:submit_on_blur]}'" if options[:submit_on_blur]
js_options['loadTextURL'] = "'#{url_for(options[:load_text_url])}'" if options[:load_text_url]
js_options['ajaxOptions'] = options[:ajax_options] if options[:ajax_options]
js_options['htmlResponse'] = !options[:script] if options[:script]
js_options['callback'] = "function(form) { return #{options[:with]} }" if options[:with]
js_options['clickToEditText'] = %('#{options[:click_to_edit_text]}') if options[:click_to_edit_text]
js_options['textBetweenControls'] = %('#{options[:text_between_controls]}') if options[:text_between_controls]
js_options['highlightcolor'] = %('#{options[:highlight_color]}') if options[:highlight_color]
js_options['highlightendcolor'] = %('#{options[:highlight_end_color]}') if options[:highlight_end_color]
js_options['onFailure'] = "function(element, transport) { #{options[:failure]} }" if options[:failure]
js_options['onComplete'] = "function(transport, element) { #{options[:complete]} }" if options[:complete]
js_options['onEnterEditMode'] = "function(element) { #{options[:enter_editing]} }" if options[:enter_editing]
js_options['onLeaveEditMode'] = "function(element) { #{options[:exit_editing]} }" if options[:exit_editing]
js_options['onFormCustomization'] = "function(element, form) { #{options[:form_customization]} }" if options[:form_customization]
js_options['inplacePatternSelector'] = %('#{options[:inplace_pattern_selector]}') if options[:inplace_pattern_selector]
js_options['nodeIdSuffix'] = %('#{options[:node_id_suffix]}') if options[:node_id_suffix]
function << (', ' + options_for_javascript(js_options)) unless js_options.empty?
function << ')'
javascript_tag(function)
end
def mark_record(checked, url_params = {})
url_params.reverse_merge!(:controller => params_for[:controller], :action => 'mark', :eid => params[:eid])
ajax_options = {:method => :put,
:url => url_for(url_params),
:with => "'value=' + this.checked",
:after => "var checkbox = this; this.disable();",
:complete => "checkbox.enable();"}
script = remote_function(ajax_options)
check_box_tag('mark', '1', checked, :onclick => script, :class => 'mark_record')
end
end
end
end
|
module Integration
class Order < Base
attr_reader :object
def initialize(config, object = {})
@object = object
super(config)
end
# Related docs:
#
# http://www.salesforce.com/us/developer/docs/api/Content/sforce_api_objects_opportunity.htm
# http://www.salesforce.com/us/developer/docs/api/Content/sforce_api_objects_opportunitylineitem.htm
def upsert!
# Create or Update the Contact. Set the account id
contact_account = ContactAccount.new(config, object[:order])
account_id = contact_account.account_id
if object[:order][:sf_record_type_id]
contact_account.person_contact_update account_id
elsif has_address?
contact_account.upsert! AccountId: account_id
end
product_integration = Product.new(config, object[:order])
line_item_integration = LineItem.new(config)
params = order_params.merge AccountId: account_id
if !object[:order][:line_items].to_a.empty? || !object[:order][:sf_pricebook_name].to_s.empty?
# Opportunity lines needs to ref a product pricebook entry
standard_id = product_integration.standard_pricebook(object[:order][:sf_pricebook_name])["Id"]
params = params.merge Pricebook2Id: standard_id
end
# Create or Update the Opportunity. Set the opportunity id
opportunity_id = order_service.upsert! params
object[:order][:line_items].to_a.each do |line_item|
# Create Product in case it doesn't exist in Salesforce yet
unless product_id = product_integration.find_id_by_code(line_item[:product_id])
attributes = Builder::Product.new(line_item).build.except "DefaultPrice"
attributes.merge Currency: object[:order][:currency]
product_id = product_integration.create! attributes
end
# Create (if not found) pricebook entry
pricebook_entry_id = product_integration.setup_pricebook_entry standard_id, product_id, line_item[:price]
# Create or Update Opportunity line
line_item_integration.upsert! line_item, opportunity_id, pricebook_entry_id
end
payment_integration = Payment.new(config)
object[:order][:payments].to_a.each do |payment|
payment_integration.upsert! payment, opportunity_id
end
custom_objects_upsert object[:order][:salesforce_custom]
end
def fetch_updates
latest_opportunities.map do |o|
account = accounts_by_id.find { |a| a[:Id] == o[:Account][:Id] }
contact = account[:Contacts].to_a.first.to_h
order = {
id: o[:Name],
email: contact['Email'],
placed_on: o[:CloseDate],
channel: 'salesforce',
updated_at: o[:LastModifiedDate],
totals: {
order: o[:Amount]
},
line_items: build_line_items(o[:OpportunityLineItems]),
payments: build_payments(o[:Notes]),
shipping_address: build_address(account, "Shipping"),
billing_address: build_address(account),
salesforce_id: o[:Id],
sf_account_name: account.to_h['Name']
}
grab_custom_fields(o).merge order
end
end
def latest_opportunities
@latest_opportunities ||= order_service.latest_updates(config[:salesforce_orders_since]).to_a
end
def accounts_by_id
ids = latest_opportunities.map { |o| "'#{o[:Account][:Id]}'" }
@accounts_by_id ||= account_service.fetch_contacts_along ids
end
def latest_timestamp_update(orders = nil)
if order = (orders || latest_opportunities).last
Time.parse(order["LastModifiedDate"]).utc.iso8601
else
Time.now.utc.iso8601
end
end
private
def has_address?
object[:order][:billing_address] || object[:order][:shipping_address]
end
def grab_custom_fields(opportunity)
order_service.custom_fields.each_with_object({}) do |field, customs|
customs[field] = opportunity[field]
end
end
def build_payments(notes)
notes.to_a.map do |note|
{
title: note[:Title],
body: note[:Body]
}
end
end
def build_address(account, kind = "Billing")
{
address1: account["#{kind}Street"],
zipcode: account["#{kind}PostalCode"],
city: account["#{kind}City"],
country: account["#{kind}Country"],
state: account["#{kind}State"],
phone: account["Phone"]
}
end
def build_line_items(opportunity_lines)
opportunity_lines.to_a.map do |line|
{
name: line[:PricebookEntry][:Product2][:Name],
product_id: line[:PricebookEntry][:Product2][:ProductCode],
price: line[:UnitPrice],
quantity: line[:Quantity]
}
end
end
def order_params
Builder::Order.new(object[:order]).build
end
end
class LineItem < Base
def upsert!(item, opportunity_id, pricebook_entry_id)
params = Builder::LineItem.new(item).build
line_item_service.upsert!(params, opportunity_id, pricebook_entry_id)
end
end
class Payment < Base
def upsert!(payment, opportunity_id)
attributes = Integration::Builder::Payment.new(payment).build
note_service.upsert! attributes, opportunity_id
end
end
end
Dont set account id if order has no email
Another use case where customer might want to update something in the
Opportuniry but doesnt have the order.email, when sending shipments info
from shipstation to salesforce for example
module Integration
class Order < Base
attr_reader :object
def initialize(config, object = {})
@object = object
super(config)
end
# Related docs:
#
# http://www.salesforce.com/us/developer/docs/api/Content/sforce_api_objects_opportunity.htm
# http://www.salesforce.com/us/developer/docs/api/Content/sforce_api_objects_opportunitylineitem.htm
def upsert!
product_integration = Product.new(config, object[:order])
line_item_integration = LineItem.new(config)
# Create or Update the Contact. Set the account id is email is present
params = order_params.merge handle_contact_account
if !object[:order][:line_items].to_a.empty? || !object[:order][:sf_pricebook_name].to_s.empty?
# Opportunity lines needs to ref a product pricebook entry
standard_id = product_integration.standard_pricebook(object[:order][:sf_pricebook_name])["Id"]
params = params.merge Pricebook2Id: standard_id
end
# Create or Update the Opportunity. Set the opportunity id
opportunity_id = order_service.upsert! params
object[:order][:line_items].to_a.each do |line_item|
# Create Product in case it doesn't exist in Salesforce yet
unless product_id = product_integration.find_id_by_code(line_item[:product_id])
attributes = Builder::Product.new(line_item).build.except "DefaultPrice"
attributes.merge Currency: object[:order][:currency]
product_id = product_integration.create! attributes
end
# Create (if not found) pricebook entry
pricebook_entry_id = product_integration.setup_pricebook_entry standard_id, product_id, line_item[:price]
# Create or Update Opportunity line
line_item_integration.upsert! line_item, opportunity_id, pricebook_entry_id
end
payment_integration = Payment.new(config)
object[:order][:payments].to_a.each do |payment|
payment_integration.upsert! payment, opportunity_id
end
custom_objects_upsert object[:order][:salesforce_custom]
end
def fetch_updates
latest_opportunities.map do |o|
account = accounts_by_id.find { |a| a[:Id] == o[:Account][:Id] }
contact = account[:Contacts].to_a.first.to_h
order = {
id: o[:Name],
email: contact['Email'],
placed_on: o[:CloseDate],
channel: 'salesforce',
updated_at: o[:LastModifiedDate],
totals: {
order: o[:Amount]
},
line_items: build_line_items(o[:OpportunityLineItems]),
payments: build_payments(o[:Notes]),
shipping_address: build_address(account, "Shipping"),
billing_address: build_address(account),
salesforce_id: o[:Id],
sf_account_name: account.to_h['Name']
}
grab_custom_fields(o).merge order
end
end
def latest_opportunities
@latest_opportunities ||= order_service.latest_updates(config[:salesforce_orders_since]).to_a
end
def accounts_by_id
ids = latest_opportunities.map { |o| "'#{o[:Account][:Id]}'" }
@accounts_by_id ||= account_service.fetch_contacts_along ids
end
def latest_timestamp_update(orders = nil)
if order = (orders || latest_opportunities).last
Time.parse(order["LastModifiedDate"]).utc.iso8601
else
Time.now.utc.iso8601
end
end
private
def handle_contact_account
if object[:order][:email].to_s.empty?
{}
else
contact_account = ContactAccount.new(config, object[:order])
account_id = contact_account.account_id
if object[:order][:sf_record_type_id]
contact_account.person_contact_update account_id
elsif has_address?
contact_account.upsert! AccountId: account_id
end
{ AccountId: account_id }
end
end
def has_address?
object[:order][:billing_address] || object[:order][:shipping_address]
end
def grab_custom_fields(opportunity)
order_service.custom_fields.each_with_object({}) do |field, customs|
customs[field] = opportunity[field]
end
end
def build_payments(notes)
notes.to_a.map do |note|
{
title: note[:Title],
body: note[:Body]
}
end
end
def build_address(account, kind = "Billing")
{
address1: account["#{kind}Street"],
zipcode: account["#{kind}PostalCode"],
city: account["#{kind}City"],
country: account["#{kind}Country"],
state: account["#{kind}State"],
phone: account["Phone"]
}
end
def build_line_items(opportunity_lines)
opportunity_lines.to_a.map do |line|
{
name: line[:PricebookEntry][:Product2][:Name],
product_id: line[:PricebookEntry][:Product2][:ProductCode],
price: line[:UnitPrice],
quantity: line[:Quantity]
}
end
end
def order_params
Builder::Order.new(object[:order]).build
end
end
class LineItem < Base
def upsert!(item, opportunity_id, pricebook_entry_id)
params = Builder::LineItem.new(item).build
line_item_service.upsert!(params, opportunity_id, pricebook_entry_id)
end
end
class Payment < Base
def upsert!(payment, opportunity_id)
attributes = Integration::Builder::Payment.new(payment).build
note_service.upsert! attributes, opportunity_id
end
end
end
|
require 'active_support/core_ext/hash/indifferent_access'
class AnalyticsMapping
def self.createMappings
# TODO: Reload this on Rails Auto-reload
mappings = HashWithIndifferentAccess.new
@mappingFiles = Dir.glob("config/analytics/*.yml")
for file in @mappingFiles
yaml = YAML.load_file(file)
mappings.merge! yaml
end
mappings
end
@@mappings = AnalyticsMapping.createMappings
def self.to_event(params, view_assigns)
methodName = "#{params[:controller]}##{params[:action]}"
analysis = @@mappings[methodName]
return nil if analysis.nil?
replaceAllTokens(analysis, params, view_assigns)
analysis[:parameters] ||= {}
HashWithIndifferentAccess.new(analysis)
end
def self.replaceAllTokens(obj, params, view_assigns)
if obj.is_a? String
replaceTokens(obj, params, view_assigns)
elsif obj.is_a? Hash
obj.each {|k, v| replaceAllTokens(v, params, view_assigns)}
end
end
def self.replaceTokens(str, params, view_assigns)
return if str["@"].nil? && str["params["].nil?
properties = {}
view_assigns.each {|k, v| properties.instance_variable_set "@#{k}", v}
properties["params"] = params
result = ERB.new("<%= #{str} %>").result(properties.instance_eval {binding})
str[0..-1] = result
end
end
Use string instead of symbol to look up parameters
require 'active_support/core_ext/hash/indifferent_access'
class AnalyticsMapping
def self.createMappings
# TODO: Reload this on Rails Auto-reload
mappings = HashWithIndifferentAccess.new
@mappingFiles = Dir.glob("config/analytics/*.yml")
for file in @mappingFiles
yaml = YAML.load_file(file)
mappings.merge! yaml
end
mappings
end
@@mappings = AnalyticsMapping.createMappings
def self.to_event(params, view_assigns)
methodName = "#{params[:controller]}##{params[:action]}"
analysis = @@mappings[methodName]
return nil if analysis.nil?
replaceAllTokens(analysis, params, view_assigns)
analysis["parameters"] ||= {}
HashWithIndifferentAccess.new(analysis)
end
def self.replaceAllTokens(obj, params, view_assigns)
if obj.is_a? String
replaceTokens(obj, params, view_assigns)
elsif obj.is_a? Hash
obj.each {|k, v| replaceAllTokens(v, params, view_assigns)}
end
end
def self.replaceTokens(str, params, view_assigns)
return if str["@"].nil? && str["params["].nil?
properties = {}
view_assigns.each {|k, v| properties.instance_variable_set "@#{k}", v}
properties["params"] = params
result = ERB.new("<%= #{str} %>").result(properties.instance_eval {binding})
str[0..-1] = result
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{preposterous}
s.version = "0.0.5"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Ray Hernandez"]
s.date = %q{2010-05-08}
s.description = %q{This gem supports posting and listing sites on Posterous}
s.email = %q{hernan43@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".gitignore",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION.yml",
"examples/.gitignore",
"examples/post.rb",
"lib/preposterous.rb",
"lib/preposterous/base.rb",
"lib/preposterous/httpauth.rb",
"lib/preposterous/request.rb",
"preposterous.gemspec"
]
s.homepage = %q{http://github.com/hernan43/preposterous}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.6}
s.summary = %q{a wrapper library for the Posterous API}
s.test_files = [
"examples/post.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
else
end
else
end
end
Version bump
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{preposterous}
s.version = "0.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Ray Hernandez"]
s.date = %q{2010-05-08}
s.description = %q{This gem supports posting and listing sites on Posterous}
s.email = %q{hernan43@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".gitignore",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION.yml",
"examples/.gitignore",
"examples/post.rb",
"lib/preposterous.rb",
"lib/preposterous/base.rb",
"lib/preposterous/connection.rb",
"lib/preposterous/httpauth.rb",
"lib/preposterous/post.rb",
"lib/preposterous/request.rb",
"preposterous.gemspec"
]
s.homepage = %q{http://github.com/hernan43/preposterous}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.6}
s.summary = %q{a wrapper library for the Posterous API}
s.test_files = [
"examples/post.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::RubyGemsVersion) >= Gem::Version.new('1.2.0') then
else
end
else
end
end
|
require 'pry'
require 'pry-doc'
require 'pry-git'
require 'pry-remote'
require 'awesome_print'
require 'jazz_hands/hirb_ext'
# Enable pry-nav by default on MRI 1.9.3 only
require 'pry-nav' if RUBY_VERSION >= '1.9.3'
module JazzHands
class Railtie < Rails::Railtie
initializer 'jazz_hands.initialize' do |app|
silence_warnings do
::IRB = Pry # Replace IRB with Pry completely
# Rails 3.2 injects commands into IRB::ExtendCommandBundle. Make sure
# Pry is compatible enough so Rails boot works.
unless defined? IRB::ExtendCommandBundle # Latest Pry defines it
module IRB::ExtendCommandBundle; end
end
# Add Rails 3.2 console commands as Pry commands
if defined? Rails::ConsoleMethods
class Pry::RailsCommands
extend Rails::ConsoleMethods
end
Rails::ConsoleMethods.instance_methods.each do |name|
Pry::Commands.command(name.to_s) do
Pry::RailsCommands.send(name)
end
end
end
# We're managing the loading of plugins, especially pry-nav which
# shouldn't be loaded on 1.9.2. So don't let pry autoload them.
Pry.config.should_load_plugins = false
Pry.config.plugins.enabled = false
# Use awesome_print for output, but keep pry's pager. If Hirb is
# enabled, try printing with it first.
Pry.config.print = ->(output, value) do
return if JazzHands.hirb_output && Hirb::View.view_or_page_output(value)
pretty = value.ai(indent: 2)
Pry::Helpers::BaseHelpers.stagger_output("=> #{pretty}", output)
end
# Friendlier prompt - nesting levels look like directory paths
name = app.class.parent_name.underscore
colored_name = Pry::Helpers::Text.blue(name)
raquo = Pry::Helpers::Text.red("\u00BB")
line = ->(pry) { "[#{Pry::Helpers::Text.bold(pry.input_array.size)}] " }
target_string = ->(object, level) do
unless (string = Pry.view_clip(object)) == 'main'
"(#{'../' * level}#{string})"
else
''
end
end
Pry.config.prompt = [
->(object, level, pry) do
"#{line.(pry)}#{colored_name}#{target_string.(object, level)} #{raquo} "
end,
->(object, level, pry) do
spaces = ' ' * (
"[#{pry.input_array.size}] ".size + # Uncolored `line.(pry)`
name.size +
target_string.(object, level).size
)
"#{spaces} #{raquo} "
end
]
end
end
end
end
Use Railtie#console to properly initialize Pry commands
require 'pry'
require 'pry-doc'
require 'pry-git'
require 'pry-remote'
require 'awesome_print'
require 'jazz_hands/hirb_ext'
# Enable pry-nav by default on MRI 1.9.3 only
require 'pry-nav' if RUBY_VERSION >= '1.9.3'
module JazzHands
class Railtie < Rails::Railtie
initializer 'jazz_hands.initialize' do |app|
silence_warnings do
::IRB = Pry # Replace IRB with Pry completely
# Rails 3.2 injects commands into IRB::ExtendCommandBundle. Make sure
# Pry is compatible enough so Rails boot works.
unless defined? IRB::ExtendCommandBundle # Latest Pry defines it
module IRB::ExtendCommandBundle; end
end
# We're managing the loading of plugins, especially pry-nav which
# shouldn't be loaded on 1.9.2. So don't let pry autoload them.
Pry.config.should_load_plugins = false
Pry.config.plugins.enabled = false
# Use awesome_print for output, but keep pry's pager. If Hirb is
# enabled, try printing with it first.
Pry.config.print = ->(output, value) do
return if JazzHands.hirb_output && Hirb::View.view_or_page_output(value)
pretty = value.ai(indent: 2)
Pry::Helpers::BaseHelpers.stagger_output("=> #{pretty}", output)
end
# Friendlier prompt - nesting levels look like directory paths
name = app.class.parent_name.underscore
colored_name = Pry::Helpers::Text.blue(name)
raquo = Pry::Helpers::Text.red("\u00BB")
line = ->(pry) { "[#{Pry::Helpers::Text.bold(pry.input_array.size)}] " }
target_string = ->(object, level) do
unless (string = Pry.view_clip(object)) == 'main'
"(#{'../' * level}#{string})"
else
''
end
end
Pry.config.prompt = [
->(object, level, pry) do
"#{line.(pry)}#{colored_name}#{target_string.(object, level)} #{raquo} "
end,
->(object, level, pry) do
spaces = ' ' * (
"[#{pry.input_array.size}] ".size + # Uncolored `line.(pry)`
name.size +
target_string.(object, level).size
)
"#{spaces} #{raquo} "
end
]
end
end
console do
# Add Rails 3.2 console commands as Pry commands
if defined? Rails::ConsoleMethods
class Pry::RailsCommands
extend Rails::ConsoleMethods
end
Rails::ConsoleMethods.instance_methods.each do |name|
Pry::Commands.command(name.to_s) do
Pry::RailsCommands.send(name)
end
end
end
end
end
end
|
module Appium
module Core
module Ios
module Xcuitest
module SearchContext
# @!method ios_class_chain_find
# Only for XCUITest(WebDriverAgent)
# find_element/s can be used with a [class chain]( https://github.com/facebook/WebDriverAgent/wiki/Queries)
#
# @example
#
# # select the third child button of the first child window element
# find_elements :class_chain, 'XCUIElementTypeWindow/XCUIElementTypeButton[3]'
#
# # select all the children windows
# find_elements :class_chain, 'XCUIElementTypeWindow'
#
# # select the second last child of the second child window
# find_elements :class_chain, 'XCUIElementTypeWindow[2]/XCUIElementTypeAny[-2]'
#
# # matching predicate. <code>`</code> is the mark.
# find_elements :class_chain, 'XCUIElementTypeWindow[`visible = 1][`name = \"bla\"`]'
#
# # containing predicate. `$` is the mark.
# # Require appium-xcuitest-driver 2.54.0+
# # PR: https://github.com/facebook/WebDriverAgent/pull/707/files
# find_elements :class_chain, 'XCUIElementTypeWindow[$name = \"bla$$$bla\"$]'
# find_elements :class_chain, "**/XCUIElementTypeWindow[$name == 'Buttons'$]"
# find_elements :class_chain, "**/XCUIElementTypeStaticText[$name == 'Buttons'$]"
#
def self.extend
::Appium::Core::Base::SearchContext.add_finders(class_chain: '-ios class chain')
end
end
end # class << self
end # module Ios
end # module Core
end # module Appium
docs: add examples and tag_name (#702)
module Appium
module Core
module Ios
module Xcuitest
module SearchContext
# @!method ios_class_chain_find
# Only for XCUITest(WebDriverAgent)
# find_element/s can be used with a [class chain]( https://github.com/facebook/WebDriverAgent/wiki/Queries)
#
# @example
#
# # select the third child button of the first child window element
# find_elements :class_chain, 'XCUIElementTypeWindow/XCUIElementTypeButton[3]'
#
# # select all the children windows
# find_elements :class_chain, 'XCUIElementTypeWindow'
#
# # select the second last child of the second child window
# find_elements :class_chain, 'XCUIElementTypeWindow[2]/XCUIElementTypeAny[-2]'
#
# # matching predicate. <code>`</code> is the mark.
# find_elements :class_chain, 'XCUIElementTypeWindow[`visible = 1][`name = \"bla\"`]'
#
# # containing predicate. `$` is the mark.
# # Require appium-xcuitest-driver 2.54.0+
# # PR: https://github.com/facebook/WebDriverAgent/pull/707/files
# find_elements :class_chain, 'XCUIElementTypeWindow[$name = \"bla$$$bla\"$]'
# e = find_element :class_chain, "**/XCUIElementTypeWindow[$name == 'Buttons'$]"
# e.tag_name #=> "XCUIElementTypeWindow"
# e = find_element :class_chain, "**/XCUIElementTypeStaticText[$name == 'Buttons'$]"
# e.tag_name #=> "XCUIElementTypeStaticText"
#
def self.extend
::Appium::Core::Base::SearchContext.add_finders(class_chain: '-ios class chain')
end
end
end # class << self
end # module Ios
end # module Core
end # module Appium
|
module Jekyll
module S3
class Uploader
def self.run(site_dir, config, in_headless_mode = false)
puts "Deploying _site/* to #{config['s3_bucket']}"
s3 = AWS::S3.new(:access_key_id => config['s3_id'],
:secret_access_key => config['s3_secret'],
:s3_endpoint => Endpoint.new(config['s3_endpoint']).hostname )
new_files_count, changed_files_count, changed_files = upload_files(
s3, config, site_dir
)
deleted_files_count = remove_superfluous_files(
s3, config['s3_bucket'], site_dir, in_headless_mode)
print_done_report config
[new_files_count, changed_files_count, deleted_files_count, changed_files]
end
private
def self.print_done_report(config)
bucket_name = config['s3_bucket']
website_hostname_suffix = Endpoint.new(config['s3_endpoint']).website_hostname
website_hostname_with_bucket =
"%s.%s" % [bucket_name, website_hostname_suffix]
puts "Done! Go visit: http://#{website_hostname_with_bucket}/index.html"
end
def self.upload_files(s3, config, site_dir)
changed_files, new_files = DiffHelper.resolve_files_to_upload(
s3.buckets[config['s3_bucket']], site_dir)
to_upload = changed_files + new_files
if to_upload.empty?
puts "No new or changed files to upload"
else
pre_upload_report = []
pre_upload_report << "Uploading"
pre_upload_report << "#{new_files.length} new" if new_files.length > 0
pre_upload_report << "and" if changed_files.length > 0 and new_files.length > 0
pre_upload_report << "#{changed_files.length} changed" if changed_files.length > 0
pre_upload_report << "file(s)"
puts pre_upload_report.join(' ')
to_upload.each do |f|
upload_file(f, s3, config, site_dir)
end
end
[new_files.length, changed_files.length, changed_files]
end
def self.upload_file(file, s3, config, site_dir)
Retry.run_with_retry do
upload = Upload.new(file, s3, config, site_dir)
if upload.perform!
puts "Upload #{upload.details}: Success!"
else
puts "Upload #{upload.details}: FAILURE!"
end
end
end
def self.remove_superfluous_files(s3, s3_bucket_name, site_dir, in_headless_mode)
remote_files = s3.buckets[s3_bucket_name].objects.map { |f| f.key }
local_files = load_all_local_files(site_dir)
files_to_delete = remote_files - local_files
deleted_files_count = 0
if in_headless_mode
files_to_delete.each { |s3_object_key|
delete_s3_object s3, s3_bucket_name, s3_object_key
deleted_files_count += 1
}
else
Keyboard.if_user_confirms_delete(files_to_delete) { |s3_object_key|
delete_s3_object s3, s3_bucket_name, s3_object_key
deleted_files_count += 1
}
end
deleted_files_count
end
def self.delete_s3_object(s3, s3_bucket_name, s3_object_key)
Retry.run_with_retry do
s3.buckets[s3_bucket_name].objects[s3_object_key].delete
puts("Delete #{s3_object_key}: Success!")
end
end
def self.load_all_local_files(site_dir)
Dir[site_dir + '/**/{*,.*}'].
delete_if { |f| File.directory?(f) }.
map { |f| f.gsub(site_dir + '/', '') }
end
end
end
end
Load dotfiles also with Ruby 2.0.0
module Jekyll
module S3
class Uploader
def self.run(site_dir, config, in_headless_mode = false)
puts "Deploying _site/* to #{config['s3_bucket']}"
s3 = AWS::S3.new(:access_key_id => config['s3_id'],
:secret_access_key => config['s3_secret'],
:s3_endpoint => Endpoint.new(config['s3_endpoint']).hostname )
new_files_count, changed_files_count, changed_files = upload_files(
s3, config, site_dir
)
deleted_files_count = remove_superfluous_files(
s3, config['s3_bucket'], site_dir, in_headless_mode)
print_done_report config
[new_files_count, changed_files_count, deleted_files_count, changed_files]
end
private
def self.print_done_report(config)
bucket_name = config['s3_bucket']
website_hostname_suffix = Endpoint.new(config['s3_endpoint']).website_hostname
website_hostname_with_bucket =
"%s.%s" % [bucket_name, website_hostname_suffix]
puts "Done! Go visit: http://#{website_hostname_with_bucket}/index.html"
end
def self.upload_files(s3, config, site_dir)
changed_files, new_files = DiffHelper.resolve_files_to_upload(
s3.buckets[config['s3_bucket']], site_dir)
to_upload = changed_files + new_files
if to_upload.empty?
puts "No new or changed files to upload"
else
pre_upload_report = []
pre_upload_report << "Uploading"
pre_upload_report << "#{new_files.length} new" if new_files.length > 0
pre_upload_report << "and" if changed_files.length > 0 and new_files.length > 0
pre_upload_report << "#{changed_files.length} changed" if changed_files.length > 0
pre_upload_report << "file(s)"
puts pre_upload_report.join(' ')
to_upload.each do |f|
upload_file(f, s3, config, site_dir)
end
end
[new_files.length, changed_files.length, changed_files]
end
def self.upload_file(file, s3, config, site_dir)
Retry.run_with_retry do
upload = Upload.new(file, s3, config, site_dir)
if upload.perform!
puts "Upload #{upload.details}: Success!"
else
puts "Upload #{upload.details}: FAILURE!"
end
end
end
def self.remove_superfluous_files(s3, s3_bucket_name, site_dir, in_headless_mode)
remote_files = s3.buckets[s3_bucket_name].objects.map { |f| f.key }
local_files = load_all_local_files(site_dir)
files_to_delete = remote_files - local_files
deleted_files_count = 0
if in_headless_mode
files_to_delete.each { |s3_object_key|
delete_s3_object s3, s3_bucket_name, s3_object_key
deleted_files_count += 1
}
else
Keyboard.if_user_confirms_delete(files_to_delete) { |s3_object_key|
delete_s3_object s3, s3_bucket_name, s3_object_key
deleted_files_count += 1
}
end
deleted_files_count
end
def self.delete_s3_object(s3, s3_bucket_name, s3_object_key)
Retry.run_with_retry do
s3.buckets[s3_bucket_name].objects[s3_object_key].delete
puts("Delete #{s3_object_key}: Success!")
end
end
def self.load_all_local_files(site_dir)
Dir.glob(site_dir + '/**/*', File::FNM_DOTMATCH).
delete_if { |f| File.directory?(f) }.
map { |f| f.gsub(site_dir + '/', '') }
end
end
end
end
|
class Jerakia
class Encryption
attr_reader :loaded
def initialize(provider=nil)
if provider.nil?
provider = config["provider"]
end
return nil if provider.nil?
begin
require "jerakia/encryption/#{provider}"
rescue LoadError => e
raise Jerakia::Error, "Failed to load encryption provider #{provider}"
end
begin
eval "extend Jerakia::Encryption::#{provider.capitalize}"
rescue NameError => e
raise Jerakia::Error, "Encryption provider #{provider} did not provide class"
end
@loaded = true
end
def loaded?
loaded
end
def features?(feature)
case feature
when :encrypt
respond_to?('encrypt')
when :decrypt
respond_to?('decrypt')
else
false
end
end
def self.config
Jerakia.config[:encryption]
end
def config
self.class.config
end
end
end
Bugfix: return empty hash for configuration when encryption not configured in jerakia.yaml
class Jerakia
class Encryption
attr_reader :loaded
def initialize(provider=nil)
if provider.nil?
provider = config["provider"]
end
return nil if provider.nil?
begin
require "jerakia/encryption/#{provider}"
rescue LoadError => e
raise Jerakia::Error, "Failed to load encryption provider #{provider}"
end
begin
eval "extend Jerakia::Encryption::#{provider.capitalize}"
rescue NameError => e
raise Jerakia::Error, "Encryption provider #{provider} did not provide class"
end
@loaded = true
end
def loaded?
loaded
end
def features?(feature)
case feature
when :encrypt
respond_to?('encrypt')
when :decrypt
respond_to?('decrypt')
else
false
end
end
def self.config
Jerakia.config[:encryption] || {}
end
def config
self.class.config
end
end
end
|
class Jobler::BaseJobler
attr_accessor :controller, :format
attr_reader :args, :job
def self.before_jobling(&blk)
@@before_jobling ||= [] # rubocop:disable Style/ClassVars
@@before_jobling << blk
end
def self.after_jobling(&blk)
@@after_jobling ||= [] # rubocop:disable Style/ClassVars
@@after_jobling << blk
end
def initialize(args:, job:)
@args = args
@job = job
end
def call_before_callbacks
@@before_jobling&.each do |before_callback|
instance_eval(&before_callback)
end
end
def call_after_callbacks
@@after_jobling&.each do |after_callback|
instance_eval(&after_callback)
end
end
def create_result!(content: nil, name:, result: nil, temp_file: nil, save_in_database: false)
jobler_result = job.results.new(name: name)
if content && !temp_file
temp_file = Tempfile.new(name)
temp_file.write(content)
temp_file.close
end
if result
jobler_result.result = result
else
raise "No tempfile could be found" unless temp_file
handle_file(jobler_result: jobler_result, save_in_database: save_in_database, temp_file: temp_file)
end
jobler_result.save!
jobler_result
end
def execute!
raise NoMethodError, "You should define the 'execute!' method on #{self.class.name}"
end
def jobler_name
new_name = ""
parts = self.class.name.split("::")
parts.each do |part|
new_name << "/" unless new_name.empty?
new_name << part.underscore
end
new_name
end
def increment_progress!(value: 1.0)
@_progress_count ||= 0.0
@_progress_count += value.to_f
new_progress = @_progress_count / @_progress_total
if @_current_progress.nil?
update = true
else
progress_difference = new_progress - @_current_progress
update = true if progress_difference > 0.01
end
if update
job.update!(progress: new_progress)
@_current_progress = new_progress
end
end
def progress_total(new_total)
@_progress_total = new_total.to_f
end
def render(template_path, locals = {})
template_path = "joblers/#{jobler_name}/#{template_path}" if template_path.is_a?(Symbol)
request = ActionDispatch::Request.new(
"HTTP_HOST" => "#{job.host}:#{job.port}",
"HTTP_X_FORWARDED_PROTO" => job.protocol
)
controller = ::ApplicationJoblerController.new
controller.instance_variable_set(:@jobler, self)
controller.request = request
controller.response = ActionDispatch::Response.new
render_result = controller.render(template_path, layout: false, locals: {jobler: self}.merge(locals))
if render_result.is_a?(String)
# Rails 5 behaviour
render_result
else
# Rails 4 behaviour
render_result.join
end
end
def result
raise NoMethodError, "You should define the 'result' method on #{self.class.name}"
end
def temp_file_for_result(name:)
job_result = job.results.where(name: name).first
raise "No result by that name: #{name}" unless job_result
temp_file = ::Tempfile.new("jobler_tempfile")
temp_file.binmode
temp_file.write(job_result.result)
temp_file.close
temp_file
end
def url_for_result(name:)
job_result = job.results.where(name: name).first
raise "No result by that name: #{name}" unless job_result
Rails.application.routes.url_helpers.rails_blob_path(job_result.file.attachment, only_path: true)
end
private
def handle_file(jobler_result:, save_in_database:, temp_file:)
if save_in_database
temp_file.close unless temp_file.closed?
content = File.read(temp_file.path)
jobler_result.result = content
else
jobler_result.file.attach(
filename: File.basename(temp_file.path),
io: File.open(temp_file.path)
)
end
end
end
Fixed undefined vars
class Jobler::BaseJobler
attr_accessor :controller, :format
attr_reader :args, :job
def self.before_jobling(&blk)
@@before_jobling ||= [] # rubocop:disable Style/ClassVars
@@before_jobling << blk
end
def self.after_jobling(&blk)
@@after_jobling ||= [] # rubocop:disable Style/ClassVars
@@after_jobling << blk
end
def initialize(args:, job:)
@args = args
@job = job
@@before_jobling ||= []
@@after_jobling ||= []
end
def call_before_callbacks
@@before_jobling&.each do |before_callback|
instance_eval(&before_callback)
end
end
def call_after_callbacks
@@after_jobling&.each do |after_callback|
instance_eval(&after_callback)
end
end
def create_result!(content: nil, name:, result: nil, temp_file: nil, save_in_database: false)
jobler_result = job.results.new(name: name)
if content && !temp_file
temp_file = Tempfile.new(name)
temp_file.write(content)
temp_file.close
end
if result
jobler_result.result = result
else
raise "No tempfile could be found" unless temp_file
handle_file(jobler_result: jobler_result, save_in_database: save_in_database, temp_file: temp_file)
end
jobler_result.save!
jobler_result
end
def execute!
raise NoMethodError, "You should define the 'execute!' method on #{self.class.name}"
end
def jobler_name
new_name = ""
parts = self.class.name.split("::")
parts.each do |part|
new_name << "/" unless new_name.empty?
new_name << part.underscore
end
new_name
end
def increment_progress!(value: 1.0)
@_progress_count ||= 0.0
@_progress_count += value.to_f
new_progress = @_progress_count / @_progress_total
if @_current_progress.nil?
update = true
else
progress_difference = new_progress - @_current_progress
update = true if progress_difference > 0.01
end
if update
job.update!(progress: new_progress)
@_current_progress = new_progress
end
end
def progress_total(new_total)
@_progress_total = new_total.to_f
end
def render(template_path, locals = {})
template_path = "joblers/#{jobler_name}/#{template_path}" if template_path.is_a?(Symbol)
request = ActionDispatch::Request.new(
"HTTP_HOST" => "#{job.host}:#{job.port}",
"HTTP_X_FORWARDED_PROTO" => job.protocol
)
controller = ::ApplicationJoblerController.new
controller.instance_variable_set(:@jobler, self)
controller.request = request
controller.response = ActionDispatch::Response.new
render_result = controller.render(template_path, layout: false, locals: {jobler: self}.merge(locals))
if render_result.is_a?(String)
# Rails 5 behaviour
render_result
else
# Rails 4 behaviour
render_result.join
end
end
def result
raise NoMethodError, "You should define the 'result' method on #{self.class.name}"
end
def temp_file_for_result(name:)
job_result = job.results.where(name: name).first
raise "No result by that name: #{name}" unless job_result
temp_file = ::Tempfile.new("jobler_tempfile")
temp_file.binmode
temp_file.write(job_result.result)
temp_file.close
temp_file
end
def url_for_result(name:)
job_result = job.results.where(name: name).first
raise "No result by that name: #{name}" unless job_result
Rails.application.routes.url_helpers.rails_blob_path(job_result.file.attachment, only_path: true)
end
private
def handle_file(jobler_result:, save_in_database:, temp_file:)
if save_in_database
temp_file.close unless temp_file.closed?
content = File.read(temp_file.path)
jobler_result.result = content
else
jobler_result.file.attach(
filename: File.basename(temp_file.path),
io: File.open(temp_file.path)
)
end
end
end
|
module Judge
class FormBuilder < ActionView::Helpers::FormBuilder
%w{text_field text_area password_field}.each do |type|
helper = <<-END
def #{type}(method, options = {})
add_validate_attr!(self.object, method, options)
super
end
END
class_eval helper, __FILE__, __LINE__
end
def radio_button(method, tag_value, options = {})
add_validate_attr!(self.object, method, options)
super
end
def check_box(method, options = {}, checked_value = "1", unchecked_value = "0")
add_validate_attr!(self.object, method, options)
super
end
def select(method, choices, options = {}, html_options = {})
add_validate_attr!(self.object, method, options, html_options)
super
end
def collection_select(method, collection, value_method, text_method, options = {}, html_options = {})
add_validate_attr!(self.object, method, options, html_options)
super
end
def grouped_collection_select(method, collection, group_method, group_label_method, option_key_method, option_value_method, options = {}, html_options = {})
add_validate_attr!(self.object, method, options, html_options)
super
end
%w{date_select datetime_select time_select}.each do |type|
helper = <<-END
def #{type}(method, options = {}, html_options = {})
add_validate_attr!(self.object, method, options, html_options)
super
end
END
class_eval helper, __FILE__, __LINE__
end
def time_zone_select(method, priority_zones = nil, options = {}, html_options = {})
html_options = add_validate_attr!(self.object, method, options, html_options)
super
end
private
def add_validate_attr!(object, method, options, html_options = nil)
options_to_merge = html_options || options
if options.delete(:validate)
data = { "data-validate" => validators_for(object, method).to_json }
options_to_merge.merge!(data)
end
end
def validators_for(object, method)
Judge::ValidatorCollection.new(object, method)
end
end
end
Remove stray assignment in FormBuilder
One left over from a previous refactoring.
module Judge
class FormBuilder < ActionView::Helpers::FormBuilder
%w{text_field text_area password_field}.each do |type|
helper = <<-END
def #{type}(method, options = {})
add_validate_attr!(self.object, method, options)
super
end
END
class_eval helper, __FILE__, __LINE__
end
def radio_button(method, tag_value, options = {})
add_validate_attr!(self.object, method, options)
super
end
def check_box(method, options = {}, checked_value = "1", unchecked_value = "0")
add_validate_attr!(self.object, method, options)
super
end
def select(method, choices, options = {}, html_options = {})
add_validate_attr!(self.object, method, options, html_options)
super
end
def collection_select(method, collection, value_method, text_method, options = {}, html_options = {})
add_validate_attr!(self.object, method, options, html_options)
super
end
def grouped_collection_select(method, collection, group_method, group_label_method, option_key_method, option_value_method, options = {}, html_options = {})
add_validate_attr!(self.object, method, options, html_options)
super
end
%w{date_select datetime_select time_select}.each do |type|
helper = <<-END
def #{type}(method, options = {}, html_options = {})
add_validate_attr!(self.object, method, options, html_options)
super
end
END
class_eval helper, __FILE__, __LINE__
end
def time_zone_select(method, priority_zones = nil, options = {}, html_options = {})
add_validate_attr!(self.object, method, options, html_options)
super
end
private
def add_validate_attr!(object, method, options, html_options = nil)
options_to_merge = html_options || options
if options.delete(:validate)
data = { "data-validate" => validators_for(object, method).to_json }
options_to_merge.merge!(data)
end
end
def validators_for(object, method)
Judge::ValidatorCollection.new(object, method)
end
end
end
|
require 'uri'
require_relative './odbc'
module Carto
class Connector
# {
# "provider": "bigquery",
# "billing_project": "cartodb-on-gcp-core-team",
# "dataset": "f1",
# "table": "circuits",
# "import_as": "my_circuits",
# "storage_api": true
# }
class BigQueryProvider < OdbcProvider
metadata id: 'bigquery', name: 'Google BigQuery', public?: true
odbc_attributes billing_project: :Catalog, storage_api: :EnableHTAPI, project: :AdditionalProjects, dataset: { DefaultDataset: nil }
def errors(only_for: nil)
parameters_to_validate = @params.normalize_parameter_names(only_for)
dataset_errors = []
if parameters_to_validate.blank? || parameters_to_validate.include?(:dataset)
# dataset is not optional if not using a query
if !@params.normalized_names.include?(:dataset) && !@params.normalized_names.include?(:sql_query)
dataset_errors << "The dataset parameter is needed for tables"
end
end
super + dataset_errors
end
# BigQuery provider add the list_projects feature
def features_information
super.merge(list_projects: true, dry_run: true)
end
def check_connection
ok = false
oauth_client = @sync_oauth&.get_service_datasource
if oauth_client
ok = oauth_client.token_valid?
end
ok
end
def list_projects
oauth_client = @sync_oauth&.get_service_datasource
if oauth_client
oauth_client.list_projects
end
end
def list_project_datasets(project_id)
oauth_client = @sync_oauth&.get_service_datasource
if oauth_client
oauth_client.list_datasets(project_id)
end
end
def list_project_dataset_tables(project_id, dataset_id)
oauth_client = @sync_oauth&.get_service_datasource
if oauth_client
oauth_client.list_tables(project_id, dataset_id)
end
end
def parameters_to_odbc_attributes(params, optional_params, required_params)
super(params, optional_params, required_params).map { |k, v|
if v == true
v = 1
elsif v == false
v = 0
end
[k, v]
}
end
def table_options
params = super
# due to driver limitations (users need specific permissions in
# their projects) table imports have to be imported as sql_query
if !params[:sql_query].present?
project = @params[:project] || @params[:billing_project]
params[:sql_query] = table_query
end
params
end
def table_query
project = @params[:project] || @params[:billing_project]
%{SELECT * FROM `#{project}.#{@params[:dataset]}.#{@params[:table]}`;}
end
def list_tables(limits: {})
tables = []
limit = limits[:max_listed_tables]
oauth_client = @sync_oauth&.get_service_datasource
if oauth_client
oauth_client.list_projects.each do |project|
project_id = project[:id]
oauth_client.list_datasets(project_id).each do |dataset|
dataset_id = dataset[:id]
oauth_client.list_tables(project_id, dataset_id).each do |table|
tables << {
schema: dataset[:qualified_name],
name: table[:id]
}
break if tables.size >= limit
end
end
end
end
tables
end
def dry_run
fixed_odbc_attributes unless @dry_run_result
@dry_run_result
end
private
# Notes regarding IMPORT (extermal) schema and the DefaultDataset parameter:
# * For tables DefaultDataset is unnecesary (but does not harm if present),
# the IMPORT (extermal) schema is necessary and the one which defines the dataset.
# * For queries (sql_query), IMPORT (extermal) schema is ignored and
# the DefaultDataset is necessary when table names are not qualified with the dataset.
server_attributes %I(
Driver
Catalog
SQLDialect
OAuthMechanism
ClientId
ClientSecret
EnableHTAPI
AllowLargeResults
UseQueryCache
HTAPI_MinActivationRatio
HTAPI_MinResultsSize
UseDefaultLargeResultsDataset
LargeResultsDataSetId
LargeResultsTempTableExpirationTime
AdditionalProjects
)
user_attributes %I(RefreshToken)
required_parameters %I(billing_project)
optional_parameters %I(project location import_as dataset table sql_query storage_api)
# Class constants
DATASOURCE_NAME = id
# Driver constants
DRIVER_NAME = 'Simba ODBC Driver for Google BigQuery 64-bit'
SQL_DIALECT = 1
OAUTH_MECHANISM = 1
ALLOW_LRESULTS = 0
ENABLE_STORAGE_API = 0
QUERY_CACHE = 1
HTAPI_MIN_ACTIVATION_RATIO = 0
HTAPI_MIN_RESULTS_SIZE = 100
HTAPI_TEMP_DATASET = '_cartoimport_temp'
HTAPI_TEMP_TABLE_EXP = 3600000
def initialize(context, params)
super
@oauth_config = Cartodb.get_config(:oauth, DATASOURCE_NAME)
@sync_oauth = context&.user&.oauths&.select(DATASOURCE_NAME)
validate_config!(context)
end
def validate_config!(context)
# If a user is not provided we omit validation, because the
# instantiated provider can be used for operations that don't require
# a connection such as obtaining metadata (list_tables?, features_information, etc.)
return if !context || !context.user
if @oauth_config.nil? || @oauth_config['client_id'].nil? || @oauth_config['client_secret'].nil?
raise "Missing OAuth configuration for BigQuery: Client ID & Secret must be defined"
end
if @sync_oauth.blank?
raise "Missing OAuth credentials for BigQuery: user must authorize"
end
end
def token
# We can get a validated token (having obtained a refreshed access token) with
# @token ||= @sync_oauth&.get_service_datasource&.token
# But since the ODBC driver takes care of obtaining a fresh access token
# that's unnecessary.
@token ||= @sync_oauth&.token
end
def fixed_odbc_attributes
return @server_conf if @server_conf.present?
proxy_conf = create_proxy_conf
@server_conf = {
Driver: DRIVER_NAME,
SQLDialect: SQL_DIALECT,
OAuthMechanism: OAUTH_MECHANISM,
RefreshToken: token,
ClientId: @oauth_config['client_id'],
ClientSecret: @oauth_config['client_secret'],
AllowLargeResults: ALLOW_LRESULTS,
HTAPI_MinActivationRatio: HTAPI_MIN_ACTIVATION_RATIO,
EnableHTAPI: ENABLE_STORAGE_API,
UseQueryCache: QUERY_CACHE,
HTAPI_MinResultsSize: HTAPI_MIN_RESULTS_SIZE,
LargeResultsTempTableExpirationTime: HTAPI_TEMP_TABLE_EXP
}
if @params[:storage_api] == true
@server_conf = @server_conf.merge({
UseDefaultLargeResultsDataset: 1
})
if @params[:location].present?
@params[:location].upcase!
@server_conf = @server_conf.merge({
UseDefaultLargeResultsDataset: 0,
LargeResultsDataSetId: create_temp_dataset(@params[:billing_project], @params[:location])
})
end
end
unless @oauth_config['no_dry_run']
# Perform a dry-run of the query to catch errors (API permissions, SQL syntax, etc.)
# Note that the import may still fail if using Storage API and needed permission is missing.
sql = @params[:sql_query] || table_query
result = perform_dry_run(@params[:billing_project], sql)
@dry_run_result = result
if result[:error]
# TODO: avoid rescuing errors in dry_run? return our own exception here?
raise result[:client_error]
end
# TODO: could we make result[:total_bytes_processed] available?
end
if !proxy_conf.nil?
@server_conf = @server_conf.merge(proxy_conf)
end
return @server_conf
end
def create_temp_dataset(project_id, location)
temp_dataset_id = %{#{HTAPI_TEMP_DATASET}_#{location.downcase}}
oauth_client = @sync_oauth&.get_service_datasource
if oauth_client
begin
oauth_client.create_dataset(project_id, temp_dataset_id, {
:default_table_expiration_ms => HTAPI_TEMP_TABLE_EXP,
:location => location
})
rescue Google::Apis::ClientError => error
# if the dataset exists (409 conflict) do it nothing
raise error unless error.status_code == 409
end
end
temp_dataset_id
end
def perform_dry_run(project_id, sql)
oauth_client = @sync_oauth&.get_service_datasource
if oauth_client
oauth_client.dry_run(project_id, sql)
else
{
error: false
}
end
end
def remote_schema_name
# Note that DefaultDataset may not be defined and not needed when using IMPORT FOREIGN SCHEMA
# is used with a query (sql_query). Since it is actually ignored in that case we'll used
# and arbitrary name in that case.
@params[:dataset] || 'unused'
end
def create_proxy_conf
proxy = ENV['HTTP_PROXY'] || ENV['http_proxy']
if !proxy.nil?
proxy = URI.parse(proxy)
{
ProxyHost: proxy.host,
ProxyPort: proxy.port
}
end
end
end
end
end
Reduce conditional nesting
require 'uri'
require_relative './odbc'
module Carto
class Connector
# {
# "provider": "bigquery",
# "billing_project": "cartodb-on-gcp-core-team",
# "dataset": "f1",
# "table": "circuits",
# "import_as": "my_circuits",
# "storage_api": true
# }
class BigQueryProvider < OdbcProvider
metadata id: 'bigquery', name: 'Google BigQuery', public?: true
odbc_attributes billing_project: :Catalog, storage_api: :EnableHTAPI, project: :AdditionalProjects, dataset: { DefaultDataset: nil }
def errors(only_for: nil)
parameters_to_validate = @params.normalize_parameter_names(only_for)
dataset_errors = []
if parameters_to_validate.blank? || parameters_to_validate.include?(:dataset)
# dataset is not optional if not using a query
if !@params.normalized_names.include?(:dataset) && !@params.normalized_names.include?(:sql_query)
dataset_errors << "The dataset parameter is needed for tables"
end
end
super + dataset_errors
end
# BigQuery provider add the list_projects feature
def features_information
super.merge(list_projects: true, dry_run: true)
end
def check_connection
ok = false
oauth_client = @sync_oauth&.get_service_datasource
if oauth_client
ok = oauth_client.token_valid?
end
ok
end
def list_projects
oauth_client = @sync_oauth&.get_service_datasource
if oauth_client
oauth_client.list_projects
end
end
def list_project_datasets(project_id)
oauth_client = @sync_oauth&.get_service_datasource
if oauth_client
oauth_client.list_datasets(project_id)
end
end
def list_project_dataset_tables(project_id, dataset_id)
oauth_client = @sync_oauth&.get_service_datasource
if oauth_client
oauth_client.list_tables(project_id, dataset_id)
end
end
def parameters_to_odbc_attributes(params, optional_params, required_params)
super(params, optional_params, required_params).map { |k, v|
if v == true
v = 1
elsif v == false
v = 0
end
[k, v]
}
end
def table_options
params = super
# due to driver limitations (users need specific permissions in
# their projects) table imports have to be imported as sql_query
if !params[:sql_query].present?
project = @params[:project] || @params[:billing_project]
params[:sql_query] = table_query
end
params
end
def table_query
project = @params[:project] || @params[:billing_project]
%{SELECT * FROM `#{project}.#{@params[:dataset]}.#{@params[:table]}`;}
end
def list_tables(limits: {})
tables = []
limit = limits[:max_listed_tables]
oauth_client = @sync_oauth&.get_service_datasource
projects = oauth_client&.list_projects || []
projects.each do |project|
project_id = project[:id]
oauth_client.list_datasets(project_id).each do |dataset|
dataset_id = dataset[:id]
oauth_client.list_tables(project_id, dataset_id).each do |table|
tables << {
schema: dataset[:qualified_name],
name: table[:id]
}
break if tables.size >= limit
end
end
end
tables
end
def dry_run
fixed_odbc_attributes unless @dry_run_result
@dry_run_result
end
private
# Notes regarding IMPORT (extermal) schema and the DefaultDataset parameter:
# * For tables DefaultDataset is unnecesary (but does not harm if present),
# the IMPORT (extermal) schema is necessary and the one which defines the dataset.
# * For queries (sql_query), IMPORT (extermal) schema is ignored and
# the DefaultDataset is necessary when table names are not qualified with the dataset.
server_attributes %I(
Driver
Catalog
SQLDialect
OAuthMechanism
ClientId
ClientSecret
EnableHTAPI
AllowLargeResults
UseQueryCache
HTAPI_MinActivationRatio
HTAPI_MinResultsSize
UseDefaultLargeResultsDataset
LargeResultsDataSetId
LargeResultsTempTableExpirationTime
AdditionalProjects
)
user_attributes %I(RefreshToken)
required_parameters %I(billing_project)
optional_parameters %I(project location import_as dataset table sql_query storage_api)
# Class constants
DATASOURCE_NAME = id
# Driver constants
DRIVER_NAME = 'Simba ODBC Driver for Google BigQuery 64-bit'
SQL_DIALECT = 1
OAUTH_MECHANISM = 1
ALLOW_LRESULTS = 0
ENABLE_STORAGE_API = 0
QUERY_CACHE = 1
HTAPI_MIN_ACTIVATION_RATIO = 0
HTAPI_MIN_RESULTS_SIZE = 100
HTAPI_TEMP_DATASET = '_cartoimport_temp'
HTAPI_TEMP_TABLE_EXP = 3600000
def initialize(context, params)
super
@oauth_config = Cartodb.get_config(:oauth, DATASOURCE_NAME)
@sync_oauth = context&.user&.oauths&.select(DATASOURCE_NAME)
validate_config!(context)
end
def validate_config!(context)
# If a user is not provided we omit validation, because the
# instantiated provider can be used for operations that don't require
# a connection such as obtaining metadata (list_tables?, features_information, etc.)
return if !context || !context.user
if @oauth_config.nil? || @oauth_config['client_id'].nil? || @oauth_config['client_secret'].nil?
raise "Missing OAuth configuration for BigQuery: Client ID & Secret must be defined"
end
if @sync_oauth.blank?
raise "Missing OAuth credentials for BigQuery: user must authorize"
end
end
def token
# We can get a validated token (having obtained a refreshed access token) with
# @token ||= @sync_oauth&.get_service_datasource&.token
# But since the ODBC driver takes care of obtaining a fresh access token
# that's unnecessary.
@token ||= @sync_oauth&.token
end
def fixed_odbc_attributes
return @server_conf if @server_conf.present?
proxy_conf = create_proxy_conf
@server_conf = {
Driver: DRIVER_NAME,
SQLDialect: SQL_DIALECT,
OAuthMechanism: OAUTH_MECHANISM,
RefreshToken: token,
ClientId: @oauth_config['client_id'],
ClientSecret: @oauth_config['client_secret'],
AllowLargeResults: ALLOW_LRESULTS,
HTAPI_MinActivationRatio: HTAPI_MIN_ACTIVATION_RATIO,
EnableHTAPI: ENABLE_STORAGE_API,
UseQueryCache: QUERY_CACHE,
HTAPI_MinResultsSize: HTAPI_MIN_RESULTS_SIZE,
LargeResultsTempTableExpirationTime: HTAPI_TEMP_TABLE_EXP
}
if @params[:storage_api] == true
@server_conf = @server_conf.merge({
UseDefaultLargeResultsDataset: 1
})
if @params[:location].present?
@params[:location].upcase!
@server_conf = @server_conf.merge({
UseDefaultLargeResultsDataset: 0,
LargeResultsDataSetId: create_temp_dataset(@params[:billing_project], @params[:location])
})
end
end
unless @oauth_config['no_dry_run']
# Perform a dry-run of the query to catch errors (API permissions, SQL syntax, etc.)
# Note that the import may still fail if using Storage API and needed permission is missing.
sql = @params[:sql_query] || table_query
result = perform_dry_run(@params[:billing_project], sql)
@dry_run_result = result
if result[:error]
# TODO: avoid rescuing errors in dry_run? return our own exception here?
raise result[:client_error]
end
# TODO: could we make result[:total_bytes_processed] available?
end
if !proxy_conf.nil?
@server_conf = @server_conf.merge(proxy_conf)
end
return @server_conf
end
def create_temp_dataset(project_id, location)
temp_dataset_id = %{#{HTAPI_TEMP_DATASET}_#{location.downcase}}
oauth_client = @sync_oauth&.get_service_datasource
if oauth_client
begin
oauth_client.create_dataset(project_id, temp_dataset_id, {
:default_table_expiration_ms => HTAPI_TEMP_TABLE_EXP,
:location => location
})
rescue Google::Apis::ClientError => error
# if the dataset exists (409 conflict) do it nothing
raise error unless error.status_code == 409
end
end
temp_dataset_id
end
def perform_dry_run(project_id, sql)
oauth_client = @sync_oauth&.get_service_datasource
if oauth_client
oauth_client.dry_run(project_id, sql)
else
{
error: false
}
end
end
def remote_schema_name
# Note that DefaultDataset may not be defined and not needed when using IMPORT FOREIGN SCHEMA
# is used with a query (sql_query). Since it is actually ignored in that case we'll used
# and arbitrary name in that case.
@params[:dataset] || 'unused'
end
def create_proxy_conf
proxy = ENV['HTTP_PROXY'] || ENV['http_proxy']
if !proxy.nil?
proxy = URI.parse(proxy)
{
ProxyHost: proxy.host,
ProxyPort: proxy.port
}
end
end
end
end
end
|
module KumoTutum
VERSION = '0.0.5'
end
Time for another release
module KumoTutum
VERSION = '0.0.6'
end
|
# -*- coding: utf-8 -*-
# Copyright 2014 TIS Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module CloudConductor
module Adapters
class OpenStackAdapter < AbstractAdapter
TYPE = :openstack
def initialize
end
def create_orchestration(options)
::Fog::Orchestration.new(
provider: :OpenStack,
openstack_auth_url: options[:entry_point].to_s + 'v2.0/tokens',
openstack_api_key: options[:secret],
openstack_username: options[:key],
openstack_tenant: options[:tenant_id]
)
end
def create_stack(name, template, parameters, options = {})
options = options.with_indifferent_access
orc = create_orchestration options
stack_params = {
template: template,
parameters: JSON.parse(parameters)
}
orc.create_stack name, stack_params
end
def get_stack_status(name, options = {})
options = options.with_indifferent_access
orc = create_orchestration options
body = (orc.list_stacks)[:body].with_indifferent_access
target_stack = body[:stacks].find { |stack| stack[:stack_name] == name }
target_stack[:stack_status].to_sym
end
def get_outputs(name, options = {})
options = options.with_indifferent_access
orc = create_orchestration options
body = (orc.list_stacks)[:body].with_indifferent_access
target_stack = body[:stacks].find { |stack| stack[:stack_name] == name }
target_stack_id = target_stack[:id]
url = URI.parse "#{options[:entry_point]}/v1/#{options[:tenant_id]}/stacks/#{name}/#{target_stack_id}"
request = Net::HTTP::Get.new url.path
request.content_type = 'application/json'
request.add_field 'X-Auth-Token', orc.auth_token
response = Net::HTTP.start url.host, url.port do |http|
http.request request
end
response = (JSON.parse response.body).with_indifferent_access
target_stack = response[:stack]
outputs = {}
target_stack[:outputs].each do |output|
outputs[output[:output_key]] = output[:output_value]
end
outputs
end
end
end
end
Correct stack url
# -*- coding: utf-8 -*-
# Copyright 2014 TIS Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module CloudConductor
module Adapters
class OpenStackAdapter < AbstractAdapter
TYPE = :openstack
def initialize
end
def create_orchestration(options)
::Fog::Orchestration.new(
provider: :OpenStack,
openstack_auth_url: options[:entry_point].to_s + 'v2.0/tokens',
openstack_api_key: options[:secret],
openstack_username: options[:key],
openstack_tenant: options[:tenant_id]
)
end
def create_stack(name, template, parameters, options = {})
options = options.with_indifferent_access
orc = create_orchestration options
stack_params = {
template: template,
parameters: JSON.parse(parameters)
}
orc.create_stack name, stack_params
end
def get_stack_status(name, options = {})
options = options.with_indifferent_access
orc = create_orchestration options
body = (orc.list_stacks)[:body].with_indifferent_access
target_stack = body[:stacks].find { |stack| stack[:stack_name] == name }
target_stack[:stack_status].to_sym
end
def get_outputs(name, options = {})
options = options.with_indifferent_access
orc = create_orchestration options
body = (orc.list_stacks)[:body].with_indifferent_access
target_stack = body[:stacks].find { |stack| stack[:stack_name] == name }
target_link = target_stack[:links].find { |link| link[:rel] == 'self' }
url = URI.parse "#{target_link[:href]}"
request = Net::HTTP::Get.new url.path
request.content_type = 'application/json'
request.add_field 'X-Auth-Token', orc.auth_token
response = Net::HTTP.start url.host, url.port do |http|
http.request request
end
response = (JSON.parse response.body).with_indifferent_access
target_stack = response[:stack]
outputs = {}
target_stack[:outputs].each do |output|
outputs[output[:output_key]] = output[:output_value]
end
outputs
end
end
end
end
|
module KynetxAmApi
#
# Simple wrapper to allow access to the OAuth user information. This also hold some basic user data like
# username, name and user id.
#
class User
# OAuth Request Token
attr_accessor :request_token
# OAuth Secret Token
attr_accessor :request_secret
# OAuth Verifieer
attr_accessor :oauth_verifier
# OAuth Access Token
attr_accessor :access_token
# OAuth Acces sSecret
attr_accessor :access_secret
# Kynetx User name
attr_accessor :username
# Kynetx User ID
attr_accessor :userid
# Full name of user
attr_accessor :name
# Current Application context.
attr_reader :current_application
#
# Accepts a hash that has the following entries.
# :request_token
# :request_secret
# :oauth_verifier
# :access_token
# :access_secret
# :username
# :userid
# :name
#
def initialize(attributes)
@request_token = attributes[:request_token]
@request_secret = attributes[:request_secret]
@oauth_verifier = attributes[:oauth_verifier]
@access_token = attributes[:access_token]
@access_secret = attributes[:access_secret]
@username = attributes[:username]
@userid = attributes[:userid]
@name = attributes[:name]
@current_applicaion = nil
end
def api
@api ||= KynetxAmApi::DirectApi.new({:access_token => @access_token, :access_secret => @access_secret})
return @api
end
#
# Read applications list
#
# :offset => Start in list (not implemented)
# :size => Number of application to list (not implemented)
#
# Returns a has with two keys
# "apps" => Array Off Hashes with :appid , :role, :name, :created
# "valid" => true
#
def applications(options = {})
@applications = api.get_applist if !@applications
@applications
end
#
# :application_id => application_id
# :version => Version of application to obtain
#
def find_application(options = {})
options[:version] ||= "development"
raise "Expecting :application_id" unless options[:application_id]
puts "Creating a new Application object."
if @current_application && @current_application.application_id != options[:application_id]
@current_application = KynetxAmApi::Application.new(self, options[:application_id], options[:version])
else
@current_application ||= KynetxAmApi::Application.new(self, options[:application_id], options[:version])
end
# rst = api.get_app_source(options[:application_id],options[:version], :krl);
# app.source = rst;
return @current_application
end
def create_application(name, description="")
appid = api.get_appcreate["appid"]
@current_application = KynetxAmApi::Application.new(self, appid).create_initial_app(name, description)
return @current_application
end
def duplicate_application(application_id)
old_app = KynetxAmApi::Application.new(self, application_id)
new_app = create_application(old_app.name, "")
new_app.krl = old_app.krl
return new_app
end
def owns_current?
puts "OWNER / CURRENT_APP: #{@current_application.name}"
return false unless @current_application
puts "ME: #{self.userid.to_i} OWNER: #{@current_application.owner["kynetxuserid"].to_i}"
return @current_application.owner["kynetxuserid"].to_i == self.userid.to_i
end
end
end
more docs
module KynetxAmApi
#
# Simple wrapper to allow access to the OAuth user information. This also hold some basic user data like
# username, name and user id.
#
class User
# OAuth Request Token
attr_accessor :request_token
# OAuth Secret Token
attr_accessor :request_secret
# OAuth Verifieer
attr_accessor :oauth_verifier
# OAuth Access Token
attr_accessor :access_token
# OAuth Acces sSecret
attr_accessor :access_secret
# Kynetx User name
attr_accessor :username
# Kynetx User ID
attr_accessor :userid
# Full name of user
attr_accessor :name
# Current Application context.
attr_reader :current_application
#
# Accepts a hash that has the following entries.
# - :request_token
# - :request_secret
# - :oauth_verifier
# - :access_token
# - :access_secret
# - :username
# - :userid
# - :name
#
def initialize(attributes)
@request_token = attributes[:request_token]
@request_secret = attributes[:request_secret]
@oauth_verifier = attributes[:oauth_verifier]
@access_token = attributes[:access_token]
@access_secret = attributes[:access_secret]
@username = attributes[:username]
@userid = attributes[:userid]
@name = attributes[:name]
@current_applicaion = nil
end
#
# Returns the direct api to the Kynetx Application Manager.
#
def api
@api ||= KynetxAmApi::DirectApi.new({:access_token => @access_token, :access_secret => @access_secret})
return @api
end
#
# Read applications list
#
# - :offset => Start in list (not implemented)
# - :size => Number of application to list (not implemented)
#
# Returns a has with two keys
# - "apps" => Array Off Hashes with :appid , :role, :name, :created
# - "valid" => true
#
def applications(options = {})
@applications = api.get_applist if !@applications
@applications
end
#
# - :application_id => application_id
# - :version => Version of application to obtain
#
def find_application(options = {})
options[:version] ||= "development"
raise "Expecting :application_id" unless options[:application_id]
puts "Creating a new Application object."
if @current_application && @current_application.application_id != options[:application_id]
@current_application = KynetxAmApi::Application.new(self, options[:application_id], options[:version])
else
@current_application ||= KynetxAmApi::Application.new(self, options[:application_id], options[:version])
end
# rst = api.get_app_source(options[:application_id],options[:version], :krl);
# app.source = rst;
return @current_application
end
def create_application(name, description="")
appid = api.get_appcreate["appid"]
@current_application = KynetxAmApi::Application.new(self, appid).create_initial_app(name, description)
return @current_application
end
def duplicate_application(application_id)
old_app = KynetxAmApi::Application.new(self, application_id)
new_app = create_application(old_app.name, "")
new_app.krl = old_app.krl
return new_app
end
def owns_current?
puts "OWNER / CURRENT_APP: #{@current_application.name}"
return false unless @current_application
puts "ME: #{self.userid.to_i} OWNER: #{@current_application.owner["kynetxuserid"].to_i}"
return @current_application.owner["kynetxuserid"].to_i == self.userid.to_i
end
end
end |
module KnowsAboutDataDependencies
::TEST_CONFIG ||= {}
def self.extended(base)
base.instance_eval do
path = TEST_CONFIG["data.yml"] || "config/data.yml"
raise "The data dependencies file does not exist at #{path}" unless File.exist?(path)
@@data_dependencies = YAML.load_file(path)
end
end
def data_for_a(object, which: "is currently available for purchase", but_isnt: nil)
data = @@data_dependencies[object.to_s][which]
if data.respond_to? :sample
data.delete_if { |item| item == but_isnt } if but_isnt
data = data.sample
end
raise unless data
data
rescue
pending "Test error: There is no data dependency defined for a #{object} which #{which}"
end
end
World(KnowsAboutDataDependencies)
Better default in data_for_a
module KnowsAboutDataDependencies
::TEST_CONFIG ||= {}
def self.extended(base)
base.instance_eval do
path = TEST_CONFIG["data.yml"] || "config/data.yml"
raise "The data dependencies file does not exist at #{path}" unless File.exist?(path)
@@data_dependencies = YAML.load_file(path)
end
end
def data_for_a(object, which: nil, but_isnt: nil)
raise ArgumentError, "Please specify a condition using `which:`" if which.nil?
data = @@data_dependencies[object.to_s][which]
if data.respond_to? :sample
data.delete_if { |item| item == but_isnt } if but_isnt
data = data.sample
end
raise unless data
data
rescue
pending "Test error: There is no data dependency defined for a #{object} which #{which}"
end
end
World(KnowsAboutDataDependencies) |
require 'omf-web/theme/abstract_page'
require 'labwiki/theme/column_renderer'
module OMF::Web::Theme
class Page < OMF::Web::Theme::AbstractPage
depends_on :css, "/resource/vendor/bootstrap/css/bootstrap.css"
depends_on :css, '/resource/theme/bright/css/reset-fonts-grids.css'
depends_on :css, "/resource/theme/bright/css/bright.css"
depends_on :css, "/resource/theme/labwiki/css/kaiten.css"
depends_on :css, "/resource/theme/labwiki/css/labwiki.css"
# depends_on :js, '/resource/vendor/jquery/jquery.periodicalupdater.js'
# depends_on :js, "/resource/vendor/jquery-ui/js/jquery-ui.min.js"
#depends_on :js, "/resource/vendor/jquery-ui/js/jquery.ui.autocomplete.js"
depends_on :js, "/resource/theme/labwiki/js/column_controller.js"
depends_on :js, "/resource/theme/labwiki/js/content_selector_widget.js"
#depends_on :js, "/resource/theme/labwiki/js/execute_col_controller.js"
depends_on :js, "/resource/theme/labwiki/js/labwiki.js"
def initialize(widget, opts)
super
@title = "LabWiki"
index = -1
@col_renderers = [:plan, :prepare, :execute].map do |name|
index += 1
ColumnRenderer.new(name.to_s.capitalize, @widget.column_widget(name), name, index)
end
end
def content
javascript %{
if (typeof(LW) == "undefined") LW = {};
if (typeof(LW.plugin) == "undefined") LW.plugin = {};
LW.session_id = OML.session_id = '#{OMF::Web::SessionStore.session_id}';
L.provide('jquery', ['/resource/vendor/jquery/jquery.js']);
L.provide('jquery.periodicalupdater', ['/resource/vendor/jquery/jquery.periodicalupdater.js']);
L.provide('jquery.ui', ['/resource/vendor/jquery-ui/js/jquery-ui.min.js']);
X = null;
/*
$(document).ready(function() {
X = $;
});
*/
}
div :id => "container", :style => "position: relative; height: 100%;" do
div :id => "k-window" do
div :id => "k-topbar" do
span 'LabWiki', :class => 'brand'
ul :class => 'secondary-nav' do
if OMF::Web::SessionStore[:exps, :gimi].nil?
li :style => "padding-top: 6px; margin-right: 10px;" do
span :class => 'label label-warning' do
text "You don't have any projects or experiments associated, certain features might not function properly."
end
end
end
li do
a :href => '#', :class => 'user' do
i :class => "icon-user icon-white"
text OMF::Web::SessionStore[:id, :user] || 'Unknown'
end
end
li do
a :href => '/logout', :class => 'logout' do
i :class => "icon-off icon-white"
text 'Log out'
end
end
end
end
div :id => "k-slider", :style => "height: 500px;" do
@col_renderers.each do |renderer|
rawtext renderer.to_html
end
end
end
end
end
end # class Page
end # OMF::Web::Theme
Temp disable no ges warning
require 'omf-web/theme/abstract_page'
require 'labwiki/theme/column_renderer'
module OMF::Web::Theme
class Page < OMF::Web::Theme::AbstractPage
depends_on :css, "/resource/vendor/bootstrap/css/bootstrap.css"
depends_on :css, '/resource/theme/bright/css/reset-fonts-grids.css'
depends_on :css, "/resource/theme/bright/css/bright.css"
depends_on :css, "/resource/theme/labwiki/css/kaiten.css"
depends_on :css, "/resource/theme/labwiki/css/labwiki.css"
# depends_on :js, '/resource/vendor/jquery/jquery.periodicalupdater.js'
# depends_on :js, "/resource/vendor/jquery-ui/js/jquery-ui.min.js"
#depends_on :js, "/resource/vendor/jquery-ui/js/jquery.ui.autocomplete.js"
depends_on :js, "/resource/theme/labwiki/js/column_controller.js"
depends_on :js, "/resource/theme/labwiki/js/content_selector_widget.js"
#depends_on :js, "/resource/theme/labwiki/js/execute_col_controller.js"
depends_on :js, "/resource/theme/labwiki/js/labwiki.js"
def initialize(widget, opts)
super
@title = "LabWiki"
index = -1
@col_renderers = [:plan, :prepare, :execute].map do |name|
index += 1
ColumnRenderer.new(name.to_s.capitalize, @widget.column_widget(name), name, index)
end
end
def content
javascript %{
if (typeof(LW) == "undefined") LW = {};
if (typeof(LW.plugin) == "undefined") LW.plugin = {};
LW.session_id = OML.session_id = '#{OMF::Web::SessionStore.session_id}';
L.provide('jquery', ['/resource/vendor/jquery/jquery.js']);
L.provide('jquery.periodicalupdater', ['/resource/vendor/jquery/jquery.periodicalupdater.js']);
L.provide('jquery.ui', ['/resource/vendor/jquery-ui/js/jquery-ui.min.js']);
X = null;
/*
$(document).ready(function() {
X = $;
});
*/
}
div :id => "container", :style => "position: relative; height: 100%;" do
div :id => "k-window" do
div :id => "k-topbar" do
span 'LabWiki', :class => 'brand'
ul :class => 'secondary-nav' do
#if OMF::Web::SessionStore[:exps, :gimi].nil?
# li :style => "padding-top: 6px; margin-right: 10px;" do
# span :class => 'label label-warning' do
# text "You don't have any projects or experiments associated, certain features might not function properly."
# end
# end
#end
li do
a :href => '#', :class => 'user' do
i :class => "icon-user icon-white"
text OMF::Web::SessionStore[:id, :user] || 'Unknown'
end
end
li do
a :href => '/logout', :class => 'logout' do
i :class => "icon-off icon-white"
text 'Log out'
end
end
end
end
div :id => "k-slider", :style => "height: 500px;" do
@col_renderers.each do |renderer|
rawtext renderer.to_html
end
end
end
end
end
end # class Page
end # OMF::Web::Theme
|
require "tmpdir"
require "digest/md5"
require "benchmark"
require "rubygems"
require "language_pack"
require "language_pack/base"
require "language_pack/ruby_version"
require "language_pack/helpers/node_installer"
require "language_pack/version"
# base Ruby Language Pack. This is for any base ruby app.
class LanguagePack::Ruby < LanguagePack::Base
NAME = "ruby"
LIBYAML_VERSION = "0.1.6"
LIBYAML_PATH = "libyaml-#{LIBYAML_VERSION}"
BUNDLER_VERSION = "1.7.11"
BUNDLER_GEM_PATH = "bundler-#{BUNDLER_VERSION}"
JVM_BASE_URL = "http://heroku-jdk.s3.amazonaws.com"
LATEST_JVM_VERSION = "openjdk7-latest"
LEGACY_JVM_VERSION = "openjdk1.7.0_25"
DEFAULT_RUBY_VERSION = "ruby-2.0.0"
RBX_BASE_URL = "http://binaries.rubini.us/heroku"
NODE_BP_PATH = "vendor/node/bin"
# detects if this is a valid Ruby app
# @return [Boolean] true if it's a Ruby app
def self.use?
instrument "ruby.use" do
File.exist?("Gemfile")
end
end
def self.bundler
@bundler ||= LanguagePack::Helpers::BundlerWrapper.new.install
end
def bundler
self.class.bundler
end
def initialize(build_path, cache_path=nil)
super(build_path, cache_path)
@fetchers[:mri] = LanguagePack::Fetcher.new(VENDOR_URL, @stack)
@fetchers[:jvm] = LanguagePack::Fetcher.new(JVM_BASE_URL)
@fetchers[:rbx] = LanguagePack::Fetcher.new(RBX_BASE_URL, @stack)
@node_installer = LanguagePack::NodeInstaller.new(@stack)
end
def name
"Ruby"
end
def default_addons
instrument "ruby.default_addons" do
add_dev_database_addon
end
end
def default_config_vars
instrument "ruby.default_config_vars" do
vars = {
"LANG" => env("LANG") || "en_US.UTF-8"
}
ruby_version.jruby? ? vars.merge({
"JAVA_OPTS" => default_java_opts,
"JRUBY_OPTS" => default_jruby_opts,
"JAVA_TOOL_OPTIONS" => default_java_tool_options
}) : vars
end
end
def default_process_types
instrument "ruby.default_process_types" do
{
"rake" => "bundle exec rake",
"console" => "bundle exec irb"
}
end
end
def compile
instrument 'ruby.compile' do
# check for new app at the beginning of the compile
new_app?
Dir.chdir(build_path)
remove_vendor_bundle
install_ruby
install_jvm
setup_language_pack_environment
setup_profiled
allow_git do
install_bundler_in_app
build_bundler
post_bundler
create_database_yml
install_binaries
run_assets_precompile_rake_task
end
super
end
end
private
# the base PATH environment variable to be used
# @return [String] the resulting PATH
def default_path
# need to remove bin/ folder since it links
# to the wrong --prefix ruby binstubs
# breaking require. This only applies to Ruby 1.9.2 and 1.8.7.
safe_binstubs = binstubs_relative_paths - ["bin"]
paths = [
ENV["PATH"],
"bin",
system_paths,
]
paths.unshift("#{slug_vendor_jvm}/bin") if ruby_version.jruby?
paths.unshift(safe_binstubs)
paths.join(":")
end
def binstubs_relative_paths
[
"bin",
bundler_binstubs_path,
"#{slug_vendor_base}/bin"
]
end
def system_paths
"/usr/local/bin:/usr/bin:/bin"
end
# the relative path to the bundler directory of gems
# @return [String] resulting path
def slug_vendor_base
instrument 'ruby.slug_vendor_base' do
if @slug_vendor_base
@slug_vendor_base
elsif ruby_version.ruby_version == "1.8.7"
@slug_vendor_base = "vendor/bundle/1.8"
else
@slug_vendor_base = run_no_pipe(%q(ruby -e "require 'rbconfig';puts \"vendor/bundle/#{RUBY_ENGINE}/#{RbConfig::CONFIG['ruby_version']}\"")).chomp
error "Problem detecting bundler vendor directory: #{@slug_vendor_base}" unless $?.success?
@slug_vendor_base
end
end
end
# the relative path to the vendored ruby directory
# @return [String] resulting path
def slug_vendor_ruby
"vendor/#{ruby_version.version_without_patchlevel}"
end
# the relative path to the vendored jvm
# @return [String] resulting path
def slug_vendor_jvm
"vendor/jvm"
end
# the absolute path of the build ruby to use during the buildpack
# @return [String] resulting path
def build_ruby_path
"/tmp/#{ruby_version.version_without_patchlevel}"
end
# fetch the ruby version from bundler
# @return [String, nil] returns the ruby version if detected or nil if none is detected
def ruby_version
instrument 'ruby.ruby_version' do
return @ruby_version if @ruby_version
new_app = !File.exist?("vendor/heroku")
last_version_file = "buildpack_ruby_version"
last_version = nil
last_version = @metadata.read(last_version_file).chomp if @metadata.exists?(last_version_file)
@ruby_version = LanguagePack::RubyVersion.new(bundler.ruby_version,
is_new: new_app,
last_version: last_version)
return @ruby_version
end
end
# default JAVA_OPTS
# return [String] string of JAVA_OPTS
def default_java_opts
"-Xmx384m -Xss512k -XX:+UseCompressedOops -Dfile.encoding=UTF-8"
end
# default JRUBY_OPTS
# return [String] string of JRUBY_OPTS
def default_jruby_opts
"-Xcompile.invokedynamic=false"
end
# default JAVA_TOOL_OPTIONS
# return [String] string of JAVA_TOOL_OPTIONS
def default_java_tool_options
"-Djava.rmi.server.useCodebaseOnly=true"
end
# list the available valid ruby versions
# @note the value is memoized
# @return [Array] list of Strings of the ruby versions available
def ruby_versions
return @ruby_versions if @ruby_versions
Dir.mktmpdir("ruby_versions-") do |tmpdir|
Dir.chdir(tmpdir) do
@fetchers[:buildpack].fetch("ruby_versions.yml")
@ruby_versions = YAML::load_file("ruby_versions.yml")
end
end
@ruby_versions
end
# sets up the environment variables for the build process
def setup_language_pack_environment
instrument 'ruby.setup_language_pack_environment' do
ENV["PATH"] += ":bin" if ruby_version.jruby?
setup_ruby_install_env
ENV["PATH"] += ":#{node_bp_bin_path}" if node_js_installed?
# TODO when buildpack-env-args rolls out, we can get rid of
# ||= and the manual setting below
config_vars = default_config_vars.each do |key, value|
ENV[key] ||= value
end
ENV["GEM_PATH"] = slug_vendor_base
ENV["GEM_HOME"] = slug_vendor_base
ENV["PATH"] = default_path
end
end
# sets up the profile.d script for this buildpack
def setup_profiled
instrument 'setup_profiled' do
set_env_override "GEM_PATH", "$HOME/#{slug_vendor_base}:$GEM_PATH"
set_env_default "LANG", "en_US.UTF-8"
set_env_override "PATH", binstubs_relative_paths.map {|path| "$HOME/#{path}" }.join(":") + ":$PATH"
if ruby_version.jruby?
set_env_default "JAVA_OPTS", default_java_opts
set_env_default "JRUBY_OPTS", default_jruby_opts
set_env_default "JAVA_TOOL_OPTIONS", default_java_tool_options
end
end
end
# install the vendored ruby
# @return [Boolean] true if it installs the vendored ruby and false otherwise
def install_ruby
instrument 'ruby.install_ruby' do
return false unless ruby_version
invalid_ruby_version_message = <<ERROR
Invalid RUBY_VERSION specified: #{ruby_version.version}
Valid versions: #{ruby_versions.join(", ")}
ERROR
if ruby_version.build?
FileUtils.mkdir_p(build_ruby_path)
Dir.chdir(build_ruby_path) do
ruby_vm = "ruby"
instrument "ruby.fetch_build_ruby" do
@fetchers[:mri].fetch_untar("#{ruby_version.version.sub(ruby_vm, "#{ruby_vm}-build")}.tgz")
end
end
error invalid_ruby_version_message unless $?.success?
end
FileUtils.mkdir_p(slug_vendor_ruby)
Dir.chdir(slug_vendor_ruby) do
instrument "ruby.fetch_ruby" do
if ruby_version.rbx?
file = "#{ruby_version.version}.tar.bz2"
sha_file = "#{file}.sha1"
@fetchers[:rbx].fetch(file)
@fetchers[:rbx].fetch(sha_file)
expected_checksum = File.read(sha_file).chomp
actual_checksum = Digest::SHA1.file(file).hexdigest
error <<-ERROR_MSG unless expected_checksum == actual_checksum
RBX Checksum for #{file} does not match.
Expected #{expected_checksum} but got #{actual_checksum}.
Please try pushing again in a few minutes.
ERROR_MSG
run("tar jxf #{file}")
FileUtils.mv(Dir.glob("app/#{slug_vendor_ruby}/*"), ".")
FileUtils.rm_rf("app")
FileUtils.rm(file)
FileUtils.rm(sha_file)
else
@fetchers[:mri].fetch_untar("#{ruby_version.version}.tgz")
end
end
end
error invalid_ruby_version_message unless $?.success?
app_bin_dir = "bin"
FileUtils.mkdir_p app_bin_dir
run("ln -s ruby #{slug_vendor_ruby}/bin/ruby.exe")
Dir["#{slug_vendor_ruby}/bin/*"].each do |vendor_bin|
run("ln -s ../#{vendor_bin} #{app_bin_dir}")
end
@metadata.write("buildpack_ruby_version", ruby_version.version)
topic "Using Ruby version: #{ruby_version.version}"
if !ruby_version.set
warn(<<WARNING)
You have not declared a Ruby version in your Gemfile.
To set your Ruby version add this line to your Gemfile:
#{ruby_version.to_gemfile}
# See https://devcenter.heroku.com/articles/ruby-versions for more information.
WARNING
end
end
true
end
def new_app?
@new_app ||= !File.exist?("vendor/heroku")
end
# vendors JVM into the slug for JRuby
def install_jvm(forced = false)
instrument 'ruby.install_jvm' do
if ruby_version.jruby? || forced
jvm_version =
if forced || Gem::Version.new(ruby_version.engine_version) >= Gem::Version.new("1.7.4")
LATEST_JVM_VERSION
else
LEGACY_JVM_VERSION
end
topic "Installing JVM: #{jvm_version}"
FileUtils.mkdir_p(slug_vendor_jvm)
Dir.chdir(slug_vendor_jvm) do
@fetchers[:jvm].fetch_untar("#{jvm_version}.tar.gz")
end
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir["#{slug_vendor_jvm}/bin/*"].each do |bin|
run("ln -s ../#{bin} #{bin_dir}")
end
end
end
end
# find the ruby install path for its binstubs during build
# @return [String] resulting path or empty string if ruby is not vendored
def ruby_install_binstub_path
@ruby_install_binstub_path ||=
if ruby_version.build?
"#{build_ruby_path}/bin"
elsif ruby_version
"#{slug_vendor_ruby}/bin"
else
""
end
end
# setup the environment so we can use the vendored ruby
def setup_ruby_install_env
instrument 'ruby.setup_ruby_install_env' do
ENV["PATH"] = "#{ruby_install_binstub_path}:#{ENV["PATH"]}"
if ruby_version.jruby?
ENV['JAVA_OPTS'] = default_java_opts
end
end
end
# installs vendored gems into the slug
def install_bundler_in_app
instrument 'ruby.install_language_pack_gems' do
FileUtils.mkdir_p(slug_vendor_base)
Dir.chdir(slug_vendor_base) do |dir|
`cp -R #{bundler.bundler_path}/. .`
end
end
end
# default set of binaries to install
# @return [Array] resulting list
def binaries
add_node_js_binary
end
# vendors binaries into the slug
def install_binaries
instrument 'ruby.install_binaries' do
binaries.each {|binary| install_binary(binary) }
Dir["bin/*"].each {|path| run("chmod +x #{path}") }
end
end
# vendors individual binary into the slug
# @param [String] name of the binary package from S3.
# Example: https://s3.amazonaws.com/language-pack-ruby/node-0.4.7.tgz, where name is "node-0.4.7"
def install_binary(name)
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir.chdir(bin_dir) do |dir|
if name.match(/^node\-/)
@node_installer.install
else
@fetchers[:buildpack].fetch_untar("#{name}.tgz")
end
end
end
# removes a binary from the slug
# @param [String] relative path of the binary on the slug
def uninstall_binary(path)
FileUtils.rm File.join('bin', File.basename(path)), :force => true
end
def load_default_cache?
new_app? && ruby_version.default?
end
# loads a default bundler cache for new apps to speed up initial bundle installs
def load_default_cache
instrument "ruby.load_default_cache" do
if false # load_default_cache?
puts "New app detected loading default bundler cache"
patchlevel = run("ruby -e 'puts RUBY_PATCHLEVEL'").chomp
cache_name = "#{DEFAULT_RUBY_VERSION}-p#{patchlevel}-default-cache"
@fetchers[:buildpack].fetch_untar("#{cache_name}.tgz")
end
end
end
# install libyaml into the LP to be referenced for psych compilation
# @param [String] tmpdir to store the libyaml files
def install_libyaml(dir)
instrument 'ruby.install_libyaml' do
FileUtils.mkdir_p dir
Dir.chdir(dir) do |dir|
@fetchers[:buildpack].fetch_untar("#{LIBYAML_PATH}.tgz")
end
end
end
# remove `vendor/bundle` that comes from the git repo
# in case there are native ext.
# users should be using `bundle pack` instead.
# https://github.com/heroku/heroku-buildpack-ruby/issues/21
def remove_vendor_bundle
if File.exists?("vendor/bundle")
warn(<<WARNING)
Removing `vendor/bundle`.
Checking in `vendor/bundle` is not supported. Please remove this directory
and add it to your .gitignore. To vendor your gems with Bundler, use
`bundle pack` instead.
WARNING
FileUtils.rm_rf("vendor/bundle")
end
end
def bundler_binstubs_path
"vendor/bundle/bin"
end
# runs bundler to install the dependencies
def build_bundler
instrument 'ruby.build_bundler' do
log("bundle") do
bundle_without = env("BUNDLE_WITHOUT") || "development:test"
bundle_bin = "bundle"
bundle_command = "#{bundle_bin} _#{BUNDLER_VERSION}_ install --without #{bundle_without} --path vendor/bundle --binstubs #{bundler_binstubs_path}"
bundle_command << " -j4"
if bundler.windows_gemfile_lock?
warn(<<WARNING, inline: true)
Removing `Gemfile.lock` because it was generated on Windows.
Bundler will do a full resolve so native gems are handled properly.
This may result in unexpected gem versions being used in your app.
In rare occasions Bundler may not be able to resolve your dependencies at all.
https://devcenter.heroku.com/articles/bundler-windows-gemfile
WARNING
log("bundle", "has_windows_gemfile_lock")
File.unlink("Gemfile.lock")
else
# using --deployment is preferred if we can
bundle_command += " --deployment"
cache.load ".bundle"
end
topic("Installing dependencies using #{bundler.version}")
load_bundler_cache
bundler_output = ""
bundle_time = nil
Dir.mktmpdir("libyaml-") do |tmpdir|
libyaml_dir = "#{tmpdir}/#{LIBYAML_PATH}"
install_libyaml(libyaml_dir)
# need to setup compile environment for the psych gem
yaml_include = File.expand_path("#{libyaml_dir}/include").shellescape
yaml_lib = File.expand_path("#{libyaml_dir}/lib").shellescape
pwd = Dir.pwd
bundler_path = "#{pwd}/#{slug_vendor_base}/gems/#{BUNDLER_GEM_PATH}/lib"
# we need to set BUNDLE_CONFIG and BUNDLE_GEMFILE for
# codon since it uses bundler.
env_vars = {
"BUNDLE_GEMFILE" => "#{pwd}/Gemfile",
"BUNDLE_CONFIG" => "#{pwd}/.bundle/config",
"CPATH" => noshellescape("#{yaml_include}:$CPATH"),
"CPPATH" => noshellescape("#{yaml_include}:$CPPATH"),
"LIBRARY_PATH" => noshellescape("#{yaml_lib}:$LIBRARY_PATH"),
"RUBYOPT" => syck_hack,
"NOKOGIRI_USE_SYSTEM_LIBRARIES" => "true"
}
env_vars["BUNDLER_LIB_PATH"] = "#{bundler_path}" if ruby_version.ruby_version == "1.8.7"
puts "Running: #{bundle_command}"
instrument "ruby.bundle_install" do
bundle_time = Benchmark.realtime do
bundler_output << pipe("#{bundle_command} --no-clean", out: "2>&1", env: env_vars, user_env: true)
end
end
end
if $?.success?
puts "Bundle completed (#{"%.2f" % bundle_time}s)"
log "bundle", :status => "success"
puts "Cleaning up the bundler cache."
instrument "ruby.bundle_clean" do
# Only show bundle clean output when not using default cache
if load_default_cache?
run "bundle clean > /dev/null"
else
pipe("#{bundle_bin} clean", out: "2> /dev/null")
end
end
cache.store ".bundle"
@bundler_cache.store
# Keep gem cache out of the slug
FileUtils.rm_rf("#{slug_vendor_base}/cache")
else
log "bundle", :status => "failure"
error_message = "Failed to install gems via Bundler."
puts "Bundler Output: #{bundler_output}"
if bundler_output.match(/An error occurred while installing sqlite3/)
error_message += <<ERROR
Detected sqlite3 gem which is not supported on Heroku.
https://devcenter.heroku.com/articles/sqlite3
ERROR
end
error error_message
end
end
end
end
def post_bundler
if bundler.has_gem?('yui-compressor') && !ruby_version.jruby?
install_jvm(true)
ENV["PATH"] += ":bin"
end
end
# RUBYOPT line that requires syck_hack file
# @return [String] require string if needed or else an empty string
def syck_hack
instrument "ruby.syck_hack" do
syck_hack_file = File.expand_path(File.join(File.dirname(__FILE__), "../../vendor/syck_hack"))
rv = run_stdout('ruby -e "puts RUBY_VERSION"').chomp
# < 1.9.3 includes syck, so we need to use the syck hack
if Gem::Version.new(rv) < Gem::Version.new("1.9.3")
"-r#{syck_hack_file}"
else
""
end
end
end
# writes ERB based database.yml for Rails. The database.yml uses the DATABASE_URL from the environment during runtime.
def create_database_yml
instrument 'ruby.create_database_yml' do
log("create_database_yml") do
return unless File.directory?("config")
topic("Writing config/database.yml to read from DATABASE_URL")
File.open("config/database.yml", "w") do |file|
file.puts <<-DATABASE_YML
<%
require 'cgi'
require 'uri'
begin
uri = URI.parse(ENV["DATABASE_URL"])
rescue URI::InvalidURIError
raise "Invalid DATABASE_URL"
end
raise "No RACK_ENV or RAILS_ENV found" unless ENV["RAILS_ENV"] || ENV["RACK_ENV"]
def attribute(name, value, force_string = false)
if value
value_string =
if force_string
'"' + value + '"'
else
value
end
"\#{name}: \#{value_string}"
else
""
end
end
adapter = uri.scheme
adapter = "postgresql" if adapter == "postgres"
database = (uri.path || "").split("/")[1]
username = uri.user
password = uri.password
host = uri.host
port = uri.port
params = CGI.parse(uri.query || "")
%>
<%= ENV["RAILS_ENV"] || ENV["RACK_ENV"] %>:
<%= attribute "adapter", adapter %>
<%= attribute "database", database %>
<%= attribute "username", username %>
<%= attribute "password", password, true %>
<%= attribute "host", host %>
<%= attribute "port", port %>
<% params.each do |key, value| %>
<%= key %>: <%= value.first %>
<% end %>
DATABASE_YML
end
end
end
end
def rake
@rake ||= begin
LanguagePack::Helpers::RakeRunner.new(
bundler.has_gem?("rake") || ruby_version.rake_is_vendored?
).load_rake_tasks!(env: rake_env)
end
end
def rake_env
if database_url
{ "DATABASE_URL" => database_url }
else
{}
end.merge(user_env_hash)
end
def database_url
env("DATABASE_URL") if env("DATABASE_URL")
end
# executes the block with GIT_DIR environment variable removed since it can mess with the current working directory git thinks it's in
# @param [block] block to be executed in the GIT_DIR free context
def allow_git(&blk)
git_dir = ENV.delete("GIT_DIR") # can mess with bundler
blk.call
ENV["GIT_DIR"] = git_dir
end
# decides if we need to enable the dev database addon
# @return [Array] the database addon if the pg gem is detected or an empty Array if it isn't.
def add_dev_database_addon
bundler.has_gem?("pg") ? ['heroku-postgresql:hobby-dev'] : []
end
# decides if we need to install the node.js binary
# @note execjs will blow up if no JS RUNTIME is detected and is loaded.
# @return [Array] the node.js binary path if we need it or an empty Array
def add_node_js_binary
bundler.has_gem?('execjs') && !node_js_installed? ? [@node_installer.binary_path] : []
end
def node_bp_bin_path
"#{Dir.pwd}/#{NODE_BP_PATH}"
end
# checks if node.js is installed via the official heroku-buildpack-nodejs using multibuildpack
# @return [Boolean] true if it's detected and false if it isn't
def node_js_installed?
@node_js_installed ||= run("#{node_bp_bin_path}/node -v") && $?.success?
end
def run_assets_precompile_rake_task
instrument 'ruby.run_assets_precompile_rake_task' do
precompile = rake.task("assets:precompile")
return true unless precompile.is_defined?
topic "Precompiling assets"
precompile.invoke(env: rake_env)
if precompile.success?
puts "Asset precompilation completed (#{"%.2f" % precompile.time}s)"
else
precompile_fail(precompile.output)
end
end
end
def precompile_fail(output)
log "assets_precompile", :status => "failure"
msg = "Precompiling assets failed.\n"
if output.match(/(127\.0\.0\.1)|(org\.postgresql\.util)/)
msg << "Attempted to access a nonexistent database:\n"
msg << "https://devcenter.heroku.com/articles/pre-provision-database\n"
end
error msg
end
def bundler_cache
"vendor/bundle"
end
def load_bundler_cache
instrument "ruby.load_bundler_cache" do
cache.load "vendor"
full_ruby_version = run_stdout(%q(ruby -v)).chomp
rubygems_version = run_stdout(%q(gem -v)).chomp
heroku_metadata = "vendor/heroku"
old_rubygems_version = nil
ruby_version_cache = "ruby_version"
buildpack_version_cache = "buildpack_version"
bundler_version_cache = "bundler_version"
rubygems_version_cache = "rubygems_version"
stack_cache = "stack"
old_rubygems_version = @metadata.read(ruby_version_cache).chomp if @metadata.exists?(ruby_version_cache)
old_stack = @metadata.read(stack_cache).chomp if @metadata.exists?(stack_cache)
old_stack ||= DEFAULT_LEGACY_STACK
stack_change = old_stack != @stack
convert_stack = @bundler_cache.old?
@bundler_cache.convert_stack(stack_change) if convert_stack
if !new_app? && stack_change
puts "Purging Cache. Changing stack from #{old_stack} to #{@stack}"
purge_bundler_cache(old_stack)
elsif !new_app? && !convert_stack
@bundler_cache.load
end
# fix bug from v37 deploy
if File.exists?("vendor/ruby_version")
puts "Broken cache detected. Purging build cache."
cache.clear("vendor")
FileUtils.rm_rf("vendor/ruby_version")
purge_bundler_cache
# fix bug introduced in v38
elsif !@metadata.exists?(buildpack_version_cache) && @metadata.exists?(ruby_version_cache)
puts "Broken cache detected. Purging build cache."
purge_bundler_cache
elsif (@bundler_cache.exists? || @bundler_cache.old?) && @metadata.exists?(ruby_version_cache) && full_ruby_version != @metadata.read(ruby_version_cache).chomp
puts "Ruby version change detected. Clearing bundler cache."
puts "Old: #{@metadata.read(ruby_version_cache).chomp}"
puts "New: #{full_ruby_version}"
purge_bundler_cache
end
# fix git gemspec bug from Bundler 1.3.0+ upgrade
if File.exists?(bundler_cache) && !@metadata.exists?(bundler_version_cache) && !run("find vendor/bundle/*/*/bundler/gems/*/ -name *.gemspec").include?("No such file or directory")
puts "Old bundler cache detected. Clearing bundler cache."
purge_bundler_cache
end
# fix for https://github.com/heroku/heroku-buildpack-ruby/issues/86
if (!@metadata.exists?(rubygems_version_cache) ||
(old_rubygems_version == "2.0.0" && old_rubygems_version != rubygems_version)) &&
@metadata.exists?(ruby_version_cache) && @metadata.read(ruby_version_cache).chomp.include?("ruby 2.0.0p0")
puts "Updating to rubygems #{rubygems_version}. Clearing bundler cache."
purge_bundler_cache
end
# fix for https://github.com/sparklemotion/nokogiri/issues/923
if @metadata.exists?(buildpack_version_cache) && (bv = @metadata.read(buildpack_version_cache).sub('v', '').to_i) && bv != 0 && bv <= 76
puts "Fixing nokogiri install. Clearing bundler cache."
puts "See https://github.com/sparklemotion/nokogiri/issues/923."
purge_bundler_cache
end
# recompile nokogiri to use new libyaml
if @metadata.exists?(buildpack_version_cache) && (bv = @metadata.read(buildpack_version_cache).sub('v', '').to_i) && bv != 0 && bv <= 99 && bundler.has_gem?("psych")
puts "Need to recompile psych for CVE-2013-6393. Clearing bundler cache."
puts "See http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=737076."
purge_bundler_cache
end
FileUtils.mkdir_p(heroku_metadata)
@metadata.write(ruby_version_cache, full_ruby_version, false)
@metadata.write(buildpack_version_cache, BUILDPACK_VERSION, false)
@metadata.write(bundler_version_cache, BUNDLER_VERSION, false)
@metadata.write(rubygems_version_cache, rubygems_version, false)
@metadata.write(stack_cache, @stack, false)
@metadata.save
end
end
def purge_bundler_cache(stack = nil)
instrument "ruby.purge_bundler_cache" do
@bundler_cache.clear(stack)
# need to reinstall language pack gems
install_bundler_in_app
end
end
end
Revert "Upgrade to bundler 1.7.11"
This reverts commit fe06ad15631d06effdaf8d22e624364614fd99b3.
require "tmpdir"
require "digest/md5"
require "benchmark"
require "rubygems"
require "language_pack"
require "language_pack/base"
require "language_pack/ruby_version"
require "language_pack/helpers/node_installer"
require "language_pack/version"
# base Ruby Language Pack. This is for any base ruby app.
class LanguagePack::Ruby < LanguagePack::Base
NAME = "ruby"
LIBYAML_VERSION = "0.1.6"
LIBYAML_PATH = "libyaml-#{LIBYAML_VERSION}"
BUNDLER_VERSION = "1.6.3"
BUNDLER_GEM_PATH = "bundler-#{BUNDLER_VERSION}"
JVM_BASE_URL = "http://heroku-jdk.s3.amazonaws.com"
LATEST_JVM_VERSION = "openjdk7-latest"
LEGACY_JVM_VERSION = "openjdk1.7.0_25"
DEFAULT_RUBY_VERSION = "ruby-2.0.0"
RBX_BASE_URL = "http://binaries.rubini.us/heroku"
NODE_BP_PATH = "vendor/node/bin"
# detects if this is a valid Ruby app
# @return [Boolean] true if it's a Ruby app
def self.use?
instrument "ruby.use" do
File.exist?("Gemfile")
end
end
def self.bundler
@bundler ||= LanguagePack::Helpers::BundlerWrapper.new.install
end
def bundler
self.class.bundler
end
def initialize(build_path, cache_path=nil)
super(build_path, cache_path)
@fetchers[:mri] = LanguagePack::Fetcher.new(VENDOR_URL, @stack)
@fetchers[:jvm] = LanguagePack::Fetcher.new(JVM_BASE_URL)
@fetchers[:rbx] = LanguagePack::Fetcher.new(RBX_BASE_URL, @stack)
@node_installer = LanguagePack::NodeInstaller.new(@stack)
end
def name
"Ruby"
end
def default_addons
instrument "ruby.default_addons" do
add_dev_database_addon
end
end
def default_config_vars
instrument "ruby.default_config_vars" do
vars = {
"LANG" => env("LANG") || "en_US.UTF-8"
}
ruby_version.jruby? ? vars.merge({
"JAVA_OPTS" => default_java_opts,
"JRUBY_OPTS" => default_jruby_opts,
"JAVA_TOOL_OPTIONS" => default_java_tool_options
}) : vars
end
end
def default_process_types
instrument "ruby.default_process_types" do
{
"rake" => "bundle exec rake",
"console" => "bundle exec irb"
}
end
end
def compile
instrument 'ruby.compile' do
# check for new app at the beginning of the compile
new_app?
Dir.chdir(build_path)
remove_vendor_bundle
install_ruby
install_jvm
setup_language_pack_environment
setup_profiled
allow_git do
install_bundler_in_app
build_bundler
post_bundler
create_database_yml
install_binaries
run_assets_precompile_rake_task
end
super
end
end
private
# the base PATH environment variable to be used
# @return [String] the resulting PATH
def default_path
# need to remove bin/ folder since it links
# to the wrong --prefix ruby binstubs
# breaking require. This only applies to Ruby 1.9.2 and 1.8.7.
safe_binstubs = binstubs_relative_paths - ["bin"]
paths = [
ENV["PATH"],
"bin",
system_paths,
]
paths.unshift("#{slug_vendor_jvm}/bin") if ruby_version.jruby?
paths.unshift(safe_binstubs)
paths.join(":")
end
def binstubs_relative_paths
[
"bin",
bundler_binstubs_path,
"#{slug_vendor_base}/bin"
]
end
def system_paths
"/usr/local/bin:/usr/bin:/bin"
end
# the relative path to the bundler directory of gems
# @return [String] resulting path
def slug_vendor_base
instrument 'ruby.slug_vendor_base' do
if @slug_vendor_base
@slug_vendor_base
elsif ruby_version.ruby_version == "1.8.7"
@slug_vendor_base = "vendor/bundle/1.8"
else
@slug_vendor_base = run_no_pipe(%q(ruby -e "require 'rbconfig';puts \"vendor/bundle/#{RUBY_ENGINE}/#{RbConfig::CONFIG['ruby_version']}\"")).chomp
error "Problem detecting bundler vendor directory: #{@slug_vendor_base}" unless $?.success?
@slug_vendor_base
end
end
end
# the relative path to the vendored ruby directory
# @return [String] resulting path
def slug_vendor_ruby
"vendor/#{ruby_version.version_without_patchlevel}"
end
# the relative path to the vendored jvm
# @return [String] resulting path
def slug_vendor_jvm
"vendor/jvm"
end
# the absolute path of the build ruby to use during the buildpack
# @return [String] resulting path
def build_ruby_path
"/tmp/#{ruby_version.version_without_patchlevel}"
end
# fetch the ruby version from bundler
# @return [String, nil] returns the ruby version if detected or nil if none is detected
def ruby_version
instrument 'ruby.ruby_version' do
return @ruby_version if @ruby_version
new_app = !File.exist?("vendor/heroku")
last_version_file = "buildpack_ruby_version"
last_version = nil
last_version = @metadata.read(last_version_file).chomp if @metadata.exists?(last_version_file)
@ruby_version = LanguagePack::RubyVersion.new(bundler.ruby_version,
is_new: new_app,
last_version: last_version)
return @ruby_version
end
end
# default JAVA_OPTS
# return [String] string of JAVA_OPTS
def default_java_opts
"-Xmx384m -Xss512k -XX:+UseCompressedOops -Dfile.encoding=UTF-8"
end
# default JRUBY_OPTS
# return [String] string of JRUBY_OPTS
def default_jruby_opts
"-Xcompile.invokedynamic=false"
end
# default JAVA_TOOL_OPTIONS
# return [String] string of JAVA_TOOL_OPTIONS
def default_java_tool_options
"-Djava.rmi.server.useCodebaseOnly=true"
end
# list the available valid ruby versions
# @note the value is memoized
# @return [Array] list of Strings of the ruby versions available
def ruby_versions
return @ruby_versions if @ruby_versions
Dir.mktmpdir("ruby_versions-") do |tmpdir|
Dir.chdir(tmpdir) do
@fetchers[:buildpack].fetch("ruby_versions.yml")
@ruby_versions = YAML::load_file("ruby_versions.yml")
end
end
@ruby_versions
end
# sets up the environment variables for the build process
def setup_language_pack_environment
instrument 'ruby.setup_language_pack_environment' do
ENV["PATH"] += ":bin" if ruby_version.jruby?
setup_ruby_install_env
ENV["PATH"] += ":#{node_bp_bin_path}" if node_js_installed?
# TODO when buildpack-env-args rolls out, we can get rid of
# ||= and the manual setting below
config_vars = default_config_vars.each do |key, value|
ENV[key] ||= value
end
ENV["GEM_PATH"] = slug_vendor_base
ENV["GEM_HOME"] = slug_vendor_base
ENV["PATH"] = default_path
end
end
# sets up the profile.d script for this buildpack
def setup_profiled
instrument 'setup_profiled' do
set_env_override "GEM_PATH", "$HOME/#{slug_vendor_base}:$GEM_PATH"
set_env_default "LANG", "en_US.UTF-8"
set_env_override "PATH", binstubs_relative_paths.map {|path| "$HOME/#{path}" }.join(":") + ":$PATH"
if ruby_version.jruby?
set_env_default "JAVA_OPTS", default_java_opts
set_env_default "JRUBY_OPTS", default_jruby_opts
set_env_default "JAVA_TOOL_OPTIONS", default_java_tool_options
end
end
end
# install the vendored ruby
# @return [Boolean] true if it installs the vendored ruby and false otherwise
def install_ruby
instrument 'ruby.install_ruby' do
return false unless ruby_version
invalid_ruby_version_message = <<ERROR
Invalid RUBY_VERSION specified: #{ruby_version.version}
Valid versions: #{ruby_versions.join(", ")}
ERROR
if ruby_version.build?
FileUtils.mkdir_p(build_ruby_path)
Dir.chdir(build_ruby_path) do
ruby_vm = "ruby"
instrument "ruby.fetch_build_ruby" do
@fetchers[:mri].fetch_untar("#{ruby_version.version.sub(ruby_vm, "#{ruby_vm}-build")}.tgz")
end
end
error invalid_ruby_version_message unless $?.success?
end
FileUtils.mkdir_p(slug_vendor_ruby)
Dir.chdir(slug_vendor_ruby) do
instrument "ruby.fetch_ruby" do
if ruby_version.rbx?
file = "#{ruby_version.version}.tar.bz2"
sha_file = "#{file}.sha1"
@fetchers[:rbx].fetch(file)
@fetchers[:rbx].fetch(sha_file)
expected_checksum = File.read(sha_file).chomp
actual_checksum = Digest::SHA1.file(file).hexdigest
error <<-ERROR_MSG unless expected_checksum == actual_checksum
RBX Checksum for #{file} does not match.
Expected #{expected_checksum} but got #{actual_checksum}.
Please try pushing again in a few minutes.
ERROR_MSG
run("tar jxf #{file}")
FileUtils.mv(Dir.glob("app/#{slug_vendor_ruby}/*"), ".")
FileUtils.rm_rf("app")
FileUtils.rm(file)
FileUtils.rm(sha_file)
else
@fetchers[:mri].fetch_untar("#{ruby_version.version}.tgz")
end
end
end
error invalid_ruby_version_message unless $?.success?
app_bin_dir = "bin"
FileUtils.mkdir_p app_bin_dir
run("ln -s ruby #{slug_vendor_ruby}/bin/ruby.exe")
Dir["#{slug_vendor_ruby}/bin/*"].each do |vendor_bin|
run("ln -s ../#{vendor_bin} #{app_bin_dir}")
end
@metadata.write("buildpack_ruby_version", ruby_version.version)
topic "Using Ruby version: #{ruby_version.version}"
if !ruby_version.set
warn(<<WARNING)
You have not declared a Ruby version in your Gemfile.
To set your Ruby version add this line to your Gemfile:
#{ruby_version.to_gemfile}
# See https://devcenter.heroku.com/articles/ruby-versions for more information.
WARNING
end
end
true
end
def new_app?
@new_app ||= !File.exist?("vendor/heroku")
end
# vendors JVM into the slug for JRuby
def install_jvm(forced = false)
instrument 'ruby.install_jvm' do
if ruby_version.jruby? || forced
jvm_version =
if forced || Gem::Version.new(ruby_version.engine_version) >= Gem::Version.new("1.7.4")
LATEST_JVM_VERSION
else
LEGACY_JVM_VERSION
end
topic "Installing JVM: #{jvm_version}"
FileUtils.mkdir_p(slug_vendor_jvm)
Dir.chdir(slug_vendor_jvm) do
@fetchers[:jvm].fetch_untar("#{jvm_version}.tar.gz")
end
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir["#{slug_vendor_jvm}/bin/*"].each do |bin|
run("ln -s ../#{bin} #{bin_dir}")
end
end
end
end
# find the ruby install path for its binstubs during build
# @return [String] resulting path or empty string if ruby is not vendored
def ruby_install_binstub_path
@ruby_install_binstub_path ||=
if ruby_version.build?
"#{build_ruby_path}/bin"
elsif ruby_version
"#{slug_vendor_ruby}/bin"
else
""
end
end
# setup the environment so we can use the vendored ruby
def setup_ruby_install_env
instrument 'ruby.setup_ruby_install_env' do
ENV["PATH"] = "#{ruby_install_binstub_path}:#{ENV["PATH"]}"
if ruby_version.jruby?
ENV['JAVA_OPTS'] = default_java_opts
end
end
end
# installs vendored gems into the slug
def install_bundler_in_app
instrument 'ruby.install_language_pack_gems' do
FileUtils.mkdir_p(slug_vendor_base)
Dir.chdir(slug_vendor_base) do |dir|
`cp -R #{bundler.bundler_path}/. .`
end
end
end
# default set of binaries to install
# @return [Array] resulting list
def binaries
add_node_js_binary
end
# vendors binaries into the slug
def install_binaries
instrument 'ruby.install_binaries' do
binaries.each {|binary| install_binary(binary) }
Dir["bin/*"].each {|path| run("chmod +x #{path}") }
end
end
# vendors individual binary into the slug
# @param [String] name of the binary package from S3.
# Example: https://s3.amazonaws.com/language-pack-ruby/node-0.4.7.tgz, where name is "node-0.4.7"
def install_binary(name)
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir.chdir(bin_dir) do |dir|
if name.match(/^node\-/)
@node_installer.install
else
@fetchers[:buildpack].fetch_untar("#{name}.tgz")
end
end
end
# removes a binary from the slug
# @param [String] relative path of the binary on the slug
def uninstall_binary(path)
FileUtils.rm File.join('bin', File.basename(path)), :force => true
end
def load_default_cache?
new_app? && ruby_version.default?
end
# loads a default bundler cache for new apps to speed up initial bundle installs
def load_default_cache
instrument "ruby.load_default_cache" do
if false # load_default_cache?
puts "New app detected loading default bundler cache"
patchlevel = run("ruby -e 'puts RUBY_PATCHLEVEL'").chomp
cache_name = "#{DEFAULT_RUBY_VERSION}-p#{patchlevel}-default-cache"
@fetchers[:buildpack].fetch_untar("#{cache_name}.tgz")
end
end
end
# install libyaml into the LP to be referenced for psych compilation
# @param [String] tmpdir to store the libyaml files
def install_libyaml(dir)
instrument 'ruby.install_libyaml' do
FileUtils.mkdir_p dir
Dir.chdir(dir) do |dir|
@fetchers[:buildpack].fetch_untar("#{LIBYAML_PATH}.tgz")
end
end
end
# remove `vendor/bundle` that comes from the git repo
# in case there are native ext.
# users should be using `bundle pack` instead.
# https://github.com/heroku/heroku-buildpack-ruby/issues/21
def remove_vendor_bundle
if File.exists?("vendor/bundle")
warn(<<WARNING)
Removing `vendor/bundle`.
Checking in `vendor/bundle` is not supported. Please remove this directory
and add it to your .gitignore. To vendor your gems with Bundler, use
`bundle pack` instead.
WARNING
FileUtils.rm_rf("vendor/bundle")
end
end
def bundler_binstubs_path
"vendor/bundle/bin"
end
# runs bundler to install the dependencies
def build_bundler
instrument 'ruby.build_bundler' do
log("bundle") do
bundle_without = env("BUNDLE_WITHOUT") || "development:test"
bundle_bin = "bundle"
bundle_command = "#{bundle_bin} _#{BUNDLER_VERSION}_ install --without #{bundle_without} --path vendor/bundle --binstubs #{bundler_binstubs_path}"
bundle_command << " -j4"
if bundler.windows_gemfile_lock?
warn(<<WARNING, inline: true)
Removing `Gemfile.lock` because it was generated on Windows.
Bundler will do a full resolve so native gems are handled properly.
This may result in unexpected gem versions being used in your app.
In rare occasions Bundler may not be able to resolve your dependencies at all.
https://devcenter.heroku.com/articles/bundler-windows-gemfile
WARNING
log("bundle", "has_windows_gemfile_lock")
File.unlink("Gemfile.lock")
else
# using --deployment is preferred if we can
bundle_command += " --deployment"
cache.load ".bundle"
end
topic("Installing dependencies using #{bundler.version}")
load_bundler_cache
bundler_output = ""
bundle_time = nil
Dir.mktmpdir("libyaml-") do |tmpdir|
libyaml_dir = "#{tmpdir}/#{LIBYAML_PATH}"
install_libyaml(libyaml_dir)
# need to setup compile environment for the psych gem
yaml_include = File.expand_path("#{libyaml_dir}/include").shellescape
yaml_lib = File.expand_path("#{libyaml_dir}/lib").shellescape
pwd = Dir.pwd
bundler_path = "#{pwd}/#{slug_vendor_base}/gems/#{BUNDLER_GEM_PATH}/lib"
# we need to set BUNDLE_CONFIG and BUNDLE_GEMFILE for
# codon since it uses bundler.
env_vars = {
"BUNDLE_GEMFILE" => "#{pwd}/Gemfile",
"BUNDLE_CONFIG" => "#{pwd}/.bundle/config",
"CPATH" => noshellescape("#{yaml_include}:$CPATH"),
"CPPATH" => noshellescape("#{yaml_include}:$CPPATH"),
"LIBRARY_PATH" => noshellescape("#{yaml_lib}:$LIBRARY_PATH"),
"RUBYOPT" => syck_hack,
"NOKOGIRI_USE_SYSTEM_LIBRARIES" => "true"
}
env_vars["BUNDLER_LIB_PATH"] = "#{bundler_path}" if ruby_version.ruby_version == "1.8.7"
puts "Running: #{bundle_command}"
instrument "ruby.bundle_install" do
bundle_time = Benchmark.realtime do
bundler_output << pipe("#{bundle_command} --no-clean", out: "2>&1", env: env_vars, user_env: true)
end
end
end
if $?.success?
puts "Bundle completed (#{"%.2f" % bundle_time}s)"
log "bundle", :status => "success"
puts "Cleaning up the bundler cache."
instrument "ruby.bundle_clean" do
# Only show bundle clean output when not using default cache
if load_default_cache?
run "bundle clean > /dev/null"
else
pipe("#{bundle_bin} clean", out: "2> /dev/null")
end
end
cache.store ".bundle"
@bundler_cache.store
# Keep gem cache out of the slug
FileUtils.rm_rf("#{slug_vendor_base}/cache")
else
log "bundle", :status => "failure"
error_message = "Failed to install gems via Bundler."
puts "Bundler Output: #{bundler_output}"
if bundler_output.match(/An error occurred while installing sqlite3/)
error_message += <<ERROR
Detected sqlite3 gem which is not supported on Heroku.
https://devcenter.heroku.com/articles/sqlite3
ERROR
end
error error_message
end
end
end
end
def post_bundler
if bundler.has_gem?('yui-compressor') && !ruby_version.jruby?
install_jvm(true)
ENV["PATH"] += ":bin"
end
end
# RUBYOPT line that requires syck_hack file
# @return [String] require string if needed or else an empty string
def syck_hack
instrument "ruby.syck_hack" do
syck_hack_file = File.expand_path(File.join(File.dirname(__FILE__), "../../vendor/syck_hack"))
rv = run_stdout('ruby -e "puts RUBY_VERSION"').chomp
# < 1.9.3 includes syck, so we need to use the syck hack
if Gem::Version.new(rv) < Gem::Version.new("1.9.3")
"-r#{syck_hack_file}"
else
""
end
end
end
# writes ERB based database.yml for Rails. The database.yml uses the DATABASE_URL from the environment during runtime.
def create_database_yml
instrument 'ruby.create_database_yml' do
log("create_database_yml") do
return unless File.directory?("config")
topic("Writing config/database.yml to read from DATABASE_URL")
File.open("config/database.yml", "w") do |file|
file.puts <<-DATABASE_YML
<%
require 'cgi'
require 'uri'
begin
uri = URI.parse(ENV["DATABASE_URL"])
rescue URI::InvalidURIError
raise "Invalid DATABASE_URL"
end
raise "No RACK_ENV or RAILS_ENV found" unless ENV["RAILS_ENV"] || ENV["RACK_ENV"]
def attribute(name, value, force_string = false)
if value
value_string =
if force_string
'"' + value + '"'
else
value
end
"\#{name}: \#{value_string}"
else
""
end
end
adapter = uri.scheme
adapter = "postgresql" if adapter == "postgres"
database = (uri.path || "").split("/")[1]
username = uri.user
password = uri.password
host = uri.host
port = uri.port
params = CGI.parse(uri.query || "")
%>
<%= ENV["RAILS_ENV"] || ENV["RACK_ENV"] %>:
<%= attribute "adapter", adapter %>
<%= attribute "database", database %>
<%= attribute "username", username %>
<%= attribute "password", password, true %>
<%= attribute "host", host %>
<%= attribute "port", port %>
<% params.each do |key, value| %>
<%= key %>: <%= value.first %>
<% end %>
DATABASE_YML
end
end
end
end
def rake
@rake ||= begin
LanguagePack::Helpers::RakeRunner.new(
bundler.has_gem?("rake") || ruby_version.rake_is_vendored?
).load_rake_tasks!(env: rake_env)
end
end
def rake_env
if database_url
{ "DATABASE_URL" => database_url }
else
{}
end.merge(user_env_hash)
end
def database_url
env("DATABASE_URL") if env("DATABASE_URL")
end
# executes the block with GIT_DIR environment variable removed since it can mess with the current working directory git thinks it's in
# @param [block] block to be executed in the GIT_DIR free context
def allow_git(&blk)
git_dir = ENV.delete("GIT_DIR") # can mess with bundler
blk.call
ENV["GIT_DIR"] = git_dir
end
# decides if we need to enable the dev database addon
# @return [Array] the database addon if the pg gem is detected or an empty Array if it isn't.
def add_dev_database_addon
bundler.has_gem?("pg") ? ['heroku-postgresql:hobby-dev'] : []
end
# decides if we need to install the node.js binary
# @note execjs will blow up if no JS RUNTIME is detected and is loaded.
# @return [Array] the node.js binary path if we need it or an empty Array
def add_node_js_binary
bundler.has_gem?('execjs') && !node_js_installed? ? [@node_installer.binary_path] : []
end
def node_bp_bin_path
"#{Dir.pwd}/#{NODE_BP_PATH}"
end
# checks if node.js is installed via the official heroku-buildpack-nodejs using multibuildpack
# @return [Boolean] true if it's detected and false if it isn't
def node_js_installed?
@node_js_installed ||= run("#{node_bp_bin_path}/node -v") && $?.success?
end
def run_assets_precompile_rake_task
instrument 'ruby.run_assets_precompile_rake_task' do
precompile = rake.task("assets:precompile")
return true unless precompile.is_defined?
topic "Precompiling assets"
precompile.invoke(env: rake_env)
if precompile.success?
puts "Asset precompilation completed (#{"%.2f" % precompile.time}s)"
else
precompile_fail(precompile.output)
end
end
end
def precompile_fail(output)
log "assets_precompile", :status => "failure"
msg = "Precompiling assets failed.\n"
if output.match(/(127\.0\.0\.1)|(org\.postgresql\.util)/)
msg << "Attempted to access a nonexistent database:\n"
msg << "https://devcenter.heroku.com/articles/pre-provision-database\n"
end
error msg
end
def bundler_cache
"vendor/bundle"
end
def load_bundler_cache
instrument "ruby.load_bundler_cache" do
cache.load "vendor"
full_ruby_version = run_stdout(%q(ruby -v)).chomp
rubygems_version = run_stdout(%q(gem -v)).chomp
heroku_metadata = "vendor/heroku"
old_rubygems_version = nil
ruby_version_cache = "ruby_version"
buildpack_version_cache = "buildpack_version"
bundler_version_cache = "bundler_version"
rubygems_version_cache = "rubygems_version"
stack_cache = "stack"
old_rubygems_version = @metadata.read(ruby_version_cache).chomp if @metadata.exists?(ruby_version_cache)
old_stack = @metadata.read(stack_cache).chomp if @metadata.exists?(stack_cache)
old_stack ||= DEFAULT_LEGACY_STACK
stack_change = old_stack != @stack
convert_stack = @bundler_cache.old?
@bundler_cache.convert_stack(stack_change) if convert_stack
if !new_app? && stack_change
puts "Purging Cache. Changing stack from #{old_stack} to #{@stack}"
purge_bundler_cache(old_stack)
elsif !new_app? && !convert_stack
@bundler_cache.load
end
# fix bug from v37 deploy
if File.exists?("vendor/ruby_version")
puts "Broken cache detected. Purging build cache."
cache.clear("vendor")
FileUtils.rm_rf("vendor/ruby_version")
purge_bundler_cache
# fix bug introduced in v38
elsif !@metadata.exists?(buildpack_version_cache) && @metadata.exists?(ruby_version_cache)
puts "Broken cache detected. Purging build cache."
purge_bundler_cache
elsif (@bundler_cache.exists? || @bundler_cache.old?) && @metadata.exists?(ruby_version_cache) && full_ruby_version != @metadata.read(ruby_version_cache).chomp
puts "Ruby version change detected. Clearing bundler cache."
puts "Old: #{@metadata.read(ruby_version_cache).chomp}"
puts "New: #{full_ruby_version}"
purge_bundler_cache
end
# fix git gemspec bug from Bundler 1.3.0+ upgrade
if File.exists?(bundler_cache) && !@metadata.exists?(bundler_version_cache) && !run("find vendor/bundle/*/*/bundler/gems/*/ -name *.gemspec").include?("No such file or directory")
puts "Old bundler cache detected. Clearing bundler cache."
purge_bundler_cache
end
# fix for https://github.com/heroku/heroku-buildpack-ruby/issues/86
if (!@metadata.exists?(rubygems_version_cache) ||
(old_rubygems_version == "2.0.0" && old_rubygems_version != rubygems_version)) &&
@metadata.exists?(ruby_version_cache) && @metadata.read(ruby_version_cache).chomp.include?("ruby 2.0.0p0")
puts "Updating to rubygems #{rubygems_version}. Clearing bundler cache."
purge_bundler_cache
end
# fix for https://github.com/sparklemotion/nokogiri/issues/923
if @metadata.exists?(buildpack_version_cache) && (bv = @metadata.read(buildpack_version_cache).sub('v', '').to_i) && bv != 0 && bv <= 76
puts "Fixing nokogiri install. Clearing bundler cache."
puts "See https://github.com/sparklemotion/nokogiri/issues/923."
purge_bundler_cache
end
# recompile nokogiri to use new libyaml
if @metadata.exists?(buildpack_version_cache) && (bv = @metadata.read(buildpack_version_cache).sub('v', '').to_i) && bv != 0 && bv <= 99 && bundler.has_gem?("psych")
puts "Need to recompile psych for CVE-2013-6393. Clearing bundler cache."
puts "See http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=737076."
purge_bundler_cache
end
FileUtils.mkdir_p(heroku_metadata)
@metadata.write(ruby_version_cache, full_ruby_version, false)
@metadata.write(buildpack_version_cache, BUILDPACK_VERSION, false)
@metadata.write(bundler_version_cache, BUNDLER_VERSION, false)
@metadata.write(rubygems_version_cache, rubygems_version, false)
@metadata.write(stack_cache, @stack, false)
@metadata.save
end
end
def purge_bundler_cache(stack = nil)
instrument "ruby.purge_bundler_cache" do
@bundler_cache.clear(stack)
# need to reinstall language pack gems
install_bundler_in_app
end
end
end
|
# -*- coding: utf-8 -*-
#require 'delayer/deferred/error'
#require 'thread'
module Delayer::Deferred::Deferredable
module NodeSequence
class Sequence
attr_reader :name
def initialize(name)
@name = name.to_sym
@map = {}
@exceptions = Hash.new(Delayer::Deferred::SequenceError)
end
def add(seq, flow = seq.name)
@map[flow] = seq
self
end
def exception(exc, flow)
@exceptions[flow] = exc
self
end
def pull(flow)
if @map.has_key?(flow.to_sym)
@map[flow.to_sym]
else
raise @exceptions[flow.to_sym], "Invalid sequence flow `#{name}' to `#{flow}'."
end
end
def inspect
"#<#{self.class}: #{name}>"
end
end
FRESH = Sequence.new(:fresh)
CONNECTED = Sequence.new(:connected) # 子がいる、未実行
RESERVED = Sequence.new(:reserved) # 実行キュー待ち
RESERVED_C= Sequence.new(:reserved) # 実行キュー待ち(子がいる)
RUN = Sequence.new(:run) # 実行中
RUN_C = Sequence.new(:run) # 実行中(子がいる)
PASS = Sequence.new(:pass) # パス中
PASS_C = Sequence.new(:pass) # パス中
AWAIT = Sequence.new(:await) # Await中
AWAIT_C = Sequence.new(:await) # Await中(子がいる)
GRAFT = Sequence.new(:graft) # 戻り値がAwaitableの時
GRAFT_C = Sequence.new(:graft) # 戻り値がAwaitableの時(子がいる)
CALL_CHILD= Sequence.new(:call_child) # 完了、子がいる
STOP = Sequence.new(:stop) # 完了、子なし
WAIT = Sequence.new(:wait) # 完了、オブザーバ登録済み
BURST_OUT = Sequence.new(:burst_out) # 完了、オブザーバ登録済み、子追加済み
ROTTEN = Sequence.new(:rotten).freeze # 終了
GENOCIDE = Sequence.new(:genocide).freeze# この地ではかつて大量虐殺があったという。
FRESH
.add(CONNECTED, :get_child)
.add(RESERVED, :reserve)
.add(GENOCIDE).freeze
CONNECTED
.add(RESERVED_C, :reserve)
.exception(Delayer::Deferred::MultipleAssignmentError, :get_child)
.add(GENOCIDE).freeze
RESERVED
.add(RUN, :activate)
.add(RESERVED_C, :get_child)
.add(GENOCIDE).freeze
RESERVED_C
.add(RUN_C, :activate)
.exception(Delayer::Deferred::MultipleAssignmentError, :get_child)
.add(GENOCIDE).freeze
RUN
.add(RUN_C, :get_child)
.add(PASS)
.add(AWAIT, :await)
.add(STOP, :complete)
.add(GENOCIDE).freeze
RUN_C
.add(PASS_C)
.add(AWAIT_C, :await)
.add(CALL_CHILD, :complete)
.exception(Delayer::Deferred::MultipleAssignmentError, :get_child)
.add(GENOCIDE).freeze
PASS
.add(PASS_C, :get_child)
.add(RUN, :resume)
.add(GENOCIDE).freeze
PASS_C
.add(RUN_C, :resume)
.add(GENOCIDE).freeze
AWAIT
.add(RUN, :resume)
.add(AWAIT_C, :get_child)
.add(GENOCIDE).freeze
AWAIT_C
.add(RUN_C, :resume)
.exception(Delayer::Deferred::MultipleAssignmentError, :get_child)
.add(GENOCIDE).freeze
CALL_CHILD
.add(GRAFT_C, :await)
.add(ROTTEN, :called)
.add(GENOCIDE).freeze
GRAFT
.add(STOP, :resume)
.add(GRAFT_C, :get_child)
.add(GENOCIDE).freeze
GRAFT_C
.add(CALL_CHILD, :resume)
.add(GENOCIDE).freeze
STOP
.add(GRAFT, :await)
.add(WAIT, :gaze)
.add(GENOCIDE).freeze
WAIT
.add(BURST_OUT, :get_child)
.add(GENOCIDE).freeze
BURST_OUT
.add(ROTTEN, :called)
.add(GENOCIDE).freeze
SEQUENCE_LOCK = Monitor.new
def sequence
@sequence ||= FRESH
end
# このメソッドはスレッドセーフです
def change_sequence(flow, &block)
SEQUENCE_LOCK.synchronize do
old_seq = sequence
new_seq = @sequence = sequence.pull(flow)
(@seq_logger ||= [old_seq]) << new_seq
if block
result = block.()
on_sequence_changed(old_seq, flow, new_seq)
result
else
on_sequence_changed(old_seq, flow, new_seq)
nil
end
end
end
def on_sequence_changed(old_seq, flow, new_seq)
end
def activated?
![FRESH, CONNECTED, RUN, RUN_C].include?(sequence)
end
def spoiled?
sequence == ROTTEN || sequence == GENOCIDE
end
end
end
Monitorが無くても動くようにした
# -*- coding: utf-8 -*-
#require 'delayer/deferred/error'
#require 'thread'
module Delayer::Deferred::Deferredable
module NodeSequence
class Sequence
attr_reader :name
def initialize(name)
@name = name.to_sym
@map = {}
@exceptions = Hash.new(Delayer::Deferred::SequenceError)
end
def add(seq, flow = seq.name)
@map[flow] = seq
self
end
def exception(exc, flow)
@exceptions[flow] = exc
self
end
def pull(flow)
if @map.has_key?(flow.to_sym)
@map[flow.to_sym]
else
raise @exceptions[flow.to_sym], "Invalid sequence flow `#{name}' to `#{flow}'."
end
end
def inspect
"#<#{self.class}: #{name}>"
end
end
FRESH = Sequence.new(:fresh)
CONNECTED = Sequence.new(:connected) # 子がいる、未実行
RESERVED = Sequence.new(:reserved) # 実行キュー待ち
RESERVED_C= Sequence.new(:reserved) # 実行キュー待ち(子がいる)
RUN = Sequence.new(:run) # 実行中
RUN_C = Sequence.new(:run) # 実行中(子がいる)
PASS = Sequence.new(:pass) # パス中
PASS_C = Sequence.new(:pass) # パス中
AWAIT = Sequence.new(:await) # Await中
AWAIT_C = Sequence.new(:await) # Await中(子がいる)
GRAFT = Sequence.new(:graft) # 戻り値がAwaitableの時
GRAFT_C = Sequence.new(:graft) # 戻り値がAwaitableの時(子がいる)
CALL_CHILD= Sequence.new(:call_child) # 完了、子がいる
STOP = Sequence.new(:stop) # 完了、子なし
WAIT = Sequence.new(:wait) # 完了、オブザーバ登録済み
BURST_OUT = Sequence.new(:burst_out) # 完了、オブザーバ登録済み、子追加済み
ROTTEN = Sequence.new(:rotten).freeze # 終了
GENOCIDE = Sequence.new(:genocide).freeze# この地ではかつて大量虐殺があったという。
FRESH
.add(CONNECTED, :get_child)
.add(RESERVED, :reserve)
.add(GENOCIDE).freeze
CONNECTED
.add(RESERVED_C, :reserve)
.exception(Delayer::Deferred::MultipleAssignmentError, :get_child)
.add(GENOCIDE).freeze
RESERVED
.add(RUN, :activate)
.add(RESERVED_C, :get_child)
.add(GENOCIDE).freeze
RESERVED_C
.add(RUN_C, :activate)
.exception(Delayer::Deferred::MultipleAssignmentError, :get_child)
.add(GENOCIDE).freeze
RUN
.add(RUN_C, :get_child)
.add(PASS)
.add(AWAIT, :await)
.add(STOP, :complete)
.add(GENOCIDE).freeze
RUN_C
.add(PASS_C)
.add(AWAIT_C, :await)
.add(CALL_CHILD, :complete)
.exception(Delayer::Deferred::MultipleAssignmentError, :get_child)
.add(GENOCIDE).freeze
PASS
.add(PASS_C, :get_child)
.add(RUN, :resume)
.add(GENOCIDE).freeze
PASS_C
.add(RUN_C, :resume)
.add(GENOCIDE).freeze
AWAIT
.add(RUN, :resume)
.add(AWAIT_C, :get_child)
.add(GENOCIDE).freeze
AWAIT_C
.add(RUN_C, :resume)
.exception(Delayer::Deferred::MultipleAssignmentError, :get_child)
.add(GENOCIDE).freeze
CALL_CHILD
.add(GRAFT_C, :await)
.add(ROTTEN, :called)
.add(GENOCIDE).freeze
GRAFT
.add(STOP, :resume)
.add(GRAFT_C, :get_child)
.add(GENOCIDE).freeze
GRAFT_C
.add(CALL_CHILD, :resume)
.add(GENOCIDE).freeze
STOP
.add(GRAFT, :await)
.add(WAIT, :gaze)
.add(GENOCIDE).freeze
WAIT
.add(BURST_OUT, :get_child)
.add(GENOCIDE).freeze
BURST_OUT
.add(ROTTEN, :called)
.add(GENOCIDE).freeze
if const_defined?(:Monitor)
SEQUENCE_LOCK = Monitor.new
else
SEQUENCE_LOCK = Class.new do
def synchronize
yield
end
end.new
end
def sequence
@sequence ||= FRESH
end
# このメソッドはスレッドセーフです
def change_sequence(flow, &block)
SEQUENCE_LOCK.synchronize do
old_seq = sequence
new_seq = @sequence = sequence.pull(flow)
(@seq_logger ||= [old_seq]) << new_seq
if block
result = block.()
on_sequence_changed(old_seq, flow, new_seq)
result
else
on_sequence_changed(old_seq, flow, new_seq)
nil
end
end
end
def on_sequence_changed(old_seq, flow, new_seq)
end
def activated?
![FRESH, CONNECTED, RUN, RUN_C].include?(sequence)
end
def spoiled?
sequence == ROTTEN || sequence == GENOCIDE
end
end
end
|
require "tmpdir"
require "digest/md5"
require "benchmark"
require "rubygems"
require "language_pack"
require "language_pack/base"
require "language_pack/ruby_version"
require "language_pack/version"
# base Ruby Language Pack. This is for any base ruby app.
class LanguagePack::Ruby < LanguagePack::Base
NAME = "ruby"
LIBYAML_VERSION = "0.1.6"
LIBYAML_PATH = "libyaml-#{LIBYAML_VERSION}"
BUNDLER_VERSION = "1.6.3"
BUNDLER_GEM_PATH = "bundler-#{BUNDLER_VERSION}"
NODE_VERSION = "0.4.7"
NODE_JS_BINARY_PATH = "node-#{NODE_VERSION}"
JVM_BASE_URL = "http://heroku-jdk.s3.amazonaws.com"
LATEST_JVM_VERSION = "openjdk7-latest"
LEGACY_JVM_VERSION = "openjdk1.7.0_25"
DEFAULT_RUBY_VERSION = "ruby-2.0.0"
RBX_BASE_URL = "http://binaries.rubini.us/heroku"
NODE_BP_PATH = "vendor/node/bin"
CMAKE_BASE_URL = "http://www.cmake.org/files/v2.8"
CMAKE_VERSION = "2.8.12.2"
CMAKE_PATH = "vendor/cmake/bin"
# detects if this is a valid Ruby app
# @return [Boolean] true if it's a Ruby app
def self.use?
instrument "ruby.use" do
File.exist?("Gemfile")
end
end
def self.bundler
@bundler ||= LanguagePack::Helpers::BundlerWrapper.new.install
end
def bundler
self.class.bundler
end
def initialize(build_path, cache_path=nil)
super(build_path, cache_path)
@fetchers[:jvm] = LanguagePack::Fetcher.new(JVM_BASE_URL)
@fetchers[:rbx] = LanguagePack::Fetcher.new(RBX_BASE_URL)
@fetchers[:cmake] = LanguagePack::Fetcher.new(CMAKE_BASE_URL)
end
def name
"Ruby"
end
def default_addons
instrument "ruby.default_addons" do
add_dev_database_addon
end
end
def default_config_vars
instrument "ruby.default_config_vars" do
vars = {
"LANG" => "en_US.UTF-8",
}
ruby_version.jruby? ? vars.merge({
"JAVA_OPTS" => default_java_opts,
"JRUBY_OPTS" => default_jruby_opts,
"JAVA_TOOL_OPTIONS" => default_java_tool_options
}) : vars
end
end
def default_process_types
instrument "ruby.default_process_types" do
{
"rake" => "bundle exec rake",
"console" => "bundle exec irb"
}
end
end
def compile
instrument 'ruby.compile' do
# check for new app at the beginning of the compile
new_app?
Dir.chdir(build_path)
remove_vendor_bundle
install_ruby
install_cmake
install_jvm
setup_language_pack_environment
setup_profiled
allow_git do
install_bundler_in_app
build_bundler
create_database_yml
install_binaries
run_assets_precompile_rake_task
end
super
end
end
private
# the base PATH environment variable to be used
# @return [String] the resulting PATH
def default_path
# need to remove bin/ folder since it links
# to the wrong --prefix ruby binstubs
# breaking require. This only applies to Ruby 1.9.2 and 1.8.7.
safe_binstubs = binstubs_relative_paths - ["bin"]
paths = [
ENV["PATH"],
"bin",
system_paths,
]
paths.unshift("#{slug_vendor_jvm}/bin") if ruby_version.jruby?
paths.unshift(File.expand_path("#{slug_vendor_cmake}/cmake-#{CMAKE_VERSION}-Linux-i386/bin")) unless ruby_version.jruby?
paths.unshift(safe_binstubs)
paths.join(":")
end
def binstubs_relative_paths
[
"bin",
bundler_binstubs_path,
"#{slug_vendor_base}/bin"
]
end
def system_paths
"/usr/local/bin:/usr/bin:/bin"
end
# the relative path to the bundler directory of gems
# @return [String] resulting path
def slug_vendor_base
instrument 'ruby.slug_vendor_base' do
if @slug_vendor_base
@slug_vendor_base
elsif ruby_version.ruby_version == "1.8.7"
@slug_vendor_base = "vendor/bundle/1.8"
else
@slug_vendor_base = run_no_pipe(%q(ruby -e "require 'rbconfig';puts \"vendor/bundle/#{RUBY_ENGINE}/#{RbConfig::CONFIG['ruby_version']}\"")).chomp
error "Problem detecting bundler vendor directory: #{@slug_vendor_base}" unless $?.success?
@slug_vendor_base
end
end
end
# the relative path to the vendored ruby directory
# @return [String] resulting path
def slug_vendor_ruby
"vendor/#{ruby_version.version_without_patchlevel}"
end
# the relative path to the vendored jvm
# @return [String] resulting path
def slug_vendor_jvm
"vendor/jvm"
end
# the absolute path of the build ruby to use during the buildpack
# @return [String] resulting path
def build_ruby_path
"/tmp/#{ruby_version.version_without_patchlevel}"
end
# fetch the ruby version from bundler
# @return [String, nil] returns the ruby version if detected or nil if none is detected
def ruby_version
instrument 'ruby.ruby_version' do
return @ruby_version if @ruby_version
new_app = !File.exist?("vendor/heroku")
last_version_file = "buildpack_ruby_version"
last_version = nil
last_version = @metadata.read(last_version_file).chomp if @metadata.exists?(last_version_file)
@ruby_version = LanguagePack::RubyVersion.new(bundler.ruby_version,
is_new: new_app,
last_version: last_version)
return @ruby_version
end
end
# default JAVA_OPTS
# return [String] string of JAVA_OPTS
def default_java_opts
"-Xmx384m -Xss512k -XX:+UseCompressedOops -Dfile.encoding=UTF-8"
end
# default JRUBY_OPTS
# return [String] string of JRUBY_OPTS
def default_jruby_opts
"-Xcompile.invokedynamic=false"
end
# default JAVA_TOOL_OPTIONS
# return [String] string of JAVA_TOOL_OPTIONS
def default_java_tool_options
"-Djava.rmi.server.useCodebaseOnly=true"
end
# list the available valid ruby versions
# @note the value is memoized
# @return [Array] list of Strings of the ruby versions available
def ruby_versions
return @ruby_versions if @ruby_versions
Dir.mktmpdir("ruby_versions-") do |tmpdir|
Dir.chdir(tmpdir) do
@fetchers[:buildpack].fetch("ruby_versions.yml")
@ruby_versions = YAML::load_file("ruby_versions.yml")
end
end
@ruby_versions
end
# sets up the environment variables for the build process
def setup_language_pack_environment
instrument 'ruby.setup_language_pack_environment' do
setup_ruby_install_env
ENV["PATH"] += ":#{node_bp_bin_path}" if node_js_installed?
# TODO when buildpack-env-args rolls out, we can get rid of
# ||= and the manual setting below
config_vars = default_config_vars.each do |key, value|
ENV[key] ||= value
end
ENV["GEM_PATH"] = slug_vendor_base
ENV["GEM_HOME"] = slug_vendor_base
ENV["PATH"] = default_path
end
end
# sets up the profile.d script for this buildpack
def setup_profiled
instrument 'setup_profiled' do
set_env_override "GEM_PATH", "$HOME/#{slug_vendor_base}:$GEM_PATH"
set_env_default "LANG", "en_US.UTF-8"
set_env_override "PATH", binstubs_relative_paths.map {|path| "$HOME/#{path}" }.join(":") + ":$PATH"
if ruby_version.jruby?
set_env_default "JAVA_OPTS", default_java_opts
set_env_default "JRUBY_OPTS", default_jruby_opts
set_env_default "JAVA_TOOL_OPTIONS", default_java_tool_options
end
end
end
# install the vendored ruby
# @return [Boolean] true if it installs the vendored ruby and false otherwise
def install_ruby
instrument 'ruby.install_ruby' do
return false unless ruby_version
invalid_ruby_version_message = <<ERROR
Invalid RUBY_VERSION specified: #{ruby_version.version}
Valid versions: #{ruby_versions.join(", ")}
ERROR
if ruby_version.build?
FileUtils.mkdir_p(build_ruby_path)
Dir.chdir(build_ruby_path) do
ruby_vm = "ruby"
instrument "ruby.fetch_build_ruby" do
@fetchers[:buildpack].fetch_untar("#{ruby_version.version.sub(ruby_vm, "#{ruby_vm}-build")}.tgz")
end
end
error invalid_ruby_version_message unless $?.success?
end
FileUtils.mkdir_p(slug_vendor_ruby)
Dir.chdir(slug_vendor_ruby) do
instrument "ruby.fetch_ruby" do
if ruby_version.rbx?
file = "#{ruby_version.version}.tar.bz2"
sha_file = "#{file}.sha1"
@fetchers[:rbx].fetch(file)
@fetchers[:rbx].fetch(sha_file)
expected_checksum = File.read(sha_file).chomp
actual_checksum = Digest::SHA1.file(file).hexdigest
error <<-ERROR_MSG unless expected_checksum == actual_checksum
RBX Checksum for #{file} does not match.
Expected #{expected_checksum} but got #{actual_checksum}.
Please try pushing again in a few minutes.
ERROR_MSG
run("tar jxf #{file}")
FileUtils.mv(Dir.glob("app/#{slug_vendor_ruby}/*"), ".")
FileUtils.rm_rf("app")
FileUtils.rm(file)
FileUtils.rm(sha_file)
else
@fetchers[:buildpack].fetch_untar("#{ruby_version.version}.tgz")
end
end
end
error invalid_ruby_version_message unless $?.success?
app_bin_dir = "bin"
FileUtils.mkdir_p app_bin_dir
run("ln -s ruby #{slug_vendor_ruby}/bin/ruby.exe")
Dir["#{slug_vendor_ruby}/bin/*"].each do |vendor_bin|
run("ln -s ../#{vendor_bin} #{app_bin_dir}")
end
@metadata.write("buildpack_ruby_version", ruby_version.version)
topic "Using Ruby version: #{ruby_version.version}"
if !ruby_version.set
warn(<<WARNING)
You have not declared a Ruby version in your Gemfile.
To set your Ruby version add this line to your Gemfile:
#{ruby_version.to_gemfile}
# See https://devcenter.heroku.com/articles/ruby-versions for more information.
WARNING
end
end
true
end
def new_app?
@new_app ||= !File.exist?("vendor/heroku")
end
def install_cmake
instrument 'ruby.install_cmake' do
return if ruby_version.jruby?
# TODO check for cached build output
topic "Fetching cmake binary (#{CMAKE_VERSION})"
FileUtils.mkdir_p(slug_vendor_cmake)
Dir.chdir(slug_vendor_cmake) do
instrument "ruby.fetch_cmake" do
@fetchers[:cmake].fetch_untar("cmake-#{CMAKE_VERSION}-Linux-i386.tar.gz")
system("chmod +x cmake-#{CMAKE_VERSION}-Linux-i386/bin/*")
end
end
error "Couldn't fetch cmake (cmake-#{CMAKE_VERSION}-Linux-i386.tar.gz)!" unless $?.success?
out = `ls -l #{slug_vendor_cmake}/cmake-#{CMAKE_VERSION}-Linux-i386/bin`
topic "Done! Cmake path: #{slug_vendor_cmake}/cmake-#{CMAKE_VERSION}-Linux-i386/bin\n #{out}"
out = `#{slug_vendor_cmake}/cmake-#{CMAKE_VERSION}-Linux-i386/bin/cmake --version`
topic "CMake version: #{out}"
# TODO cache the build output
end
end
def slug_vendor_cmake
"vendor/cmake"
end
# vendors JVM into the slug for JRuby
def install_jvm
instrument 'ruby.install_jvm' do
if ruby_version.jruby?
jvm_version =
if Gem::Version.new(ruby_version.engine_version) >= Gem::Version.new("1.7.4")
LATEST_JVM_VERSION
else
LEGACY_JVM_VERSION
end
topic "Installing JVM: #{jvm_version}"
FileUtils.mkdir_p(slug_vendor_jvm)
Dir.chdir(slug_vendor_jvm) do
@fetchers[:jvm].fetch_untar("#{jvm_version}.tar.gz")
end
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir["#{slug_vendor_jvm}/bin/*"].each do |bin|
run("ln -s ../#{bin} #{bin_dir}")
end
end
end
end
# find the ruby install path for its binstubs during build
# @return [String] resulting path or empty string if ruby is not vendored
def ruby_install_binstub_path
@ruby_install_binstub_path ||=
if ruby_version.build?
"#{build_ruby_path}/bin"
elsif ruby_version
"#{slug_vendor_ruby}/bin"
else
""
end
end
# setup the environment so we can use the vendored ruby
def setup_ruby_install_env
instrument 'ruby.setup_ruby_install_env' do
ENV["PATH"] = "#{ruby_install_binstub_path}:#{ENV["PATH"]}"
if ruby_version.jruby?
ENV['JAVA_OPTS'] = default_java_opts
end
end
end
# installs vendored gems into the slug
def install_bundler_in_app
instrument 'ruby.install_language_pack_gems' do
FileUtils.mkdir_p(slug_vendor_base)
Dir.chdir(slug_vendor_base) do |dir|
`cp -R #{bundler.bundler_path}/. .`
end
end
end
# default set of binaries to install
# @return [Array] resulting list
def binaries
add_node_js_binary
end
# vendors binaries into the slug
def install_binaries
instrument 'ruby.install_binaries' do
binaries.each {|binary| install_binary(binary) }
Dir["bin/*"].each {|path| run("chmod +x #{path}") }
end
end
# vendors individual binary into the slug
# @param [String] name of the binary package from S3.
# Example: https://s3.amazonaws.com/language-pack-ruby/node-0.4.7.tgz, where name is "node-0.4.7"
def install_binary(name)
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir.chdir(bin_dir) do |dir|
@fetchers[:buildpack].fetch_untar("#{name}.tgz")
end
end
# removes a binary from the slug
# @param [String] relative path of the binary on the slug
def uninstall_binary(path)
FileUtils.rm File.join('bin', File.basename(path)), :force => true
end
def load_default_cache?
new_app? && ruby_version.default?
end
# loads a default bundler cache for new apps to speed up initial bundle installs
def load_default_cache
instrument "ruby.load_default_cache" do
if false # load_default_cache?
puts "New app detected loading default bundler cache"
patchlevel = run("ruby -e 'puts RUBY_PATCHLEVEL'").chomp
cache_name = "#{DEFAULT_RUBY_VERSION}-p#{patchlevel}-default-cache"
@fetchers[:buildpack].fetch_untar("#{cache_name}.tgz")
end
end
end
# install libyaml into the LP to be referenced for psych compilation
# @param [String] tmpdir to store the libyaml files
def install_libyaml(dir)
instrument 'ruby.install_libyaml' do
FileUtils.mkdir_p dir
Dir.chdir(dir) do |dir|
@fetchers[:buildpack].fetch_untar("#{LIBYAML_PATH}.tgz")
end
end
end
# remove `vendor/bundle` that comes from the git repo
# in case there are native ext.
# users should be using `bundle pack` instead.
# https://github.com/heroku/heroku-buildpack-ruby/issues/21
def remove_vendor_bundle
if File.exists?("vendor/bundle")
warn(<<WARNING)
Removing `vendor/bundle`.
Checking in `vendor/bundle` is not supported. Please remove this directory
and add it to your .gitignore. To vendor your gems with Bundler, use
`bundle pack` instead.
WARNING
FileUtils.rm_rf("vendor/bundle")
end
end
def bundler_binstubs_path
"vendor/bundle/bin"
end
# runs bundler to install the dependencies
def build_bundler
instrument 'ruby.build_bundler' do
log("bundle") do
bundle_without = env("BUNDLE_WITHOUT") || "development:test"
bundle_bin = "bundle"
bundle_command = "#{bundle_bin} install --without #{bundle_without} --path vendor/bundle --binstubs #{bundler_binstubs_path}"
bundle_command << " -j4"
if bundler.windows_gemfile_lock?
warn(<<WARNING, inline: true)
Removing `Gemfile.lock` because it was generated on Windows.
Bundler will do a full resolve so native gems are handled properly.
This may result in unexpected gem versions being used in your app.
In rare occasions Bundler may not be able to resolve your dependencies at all.
https://devcenter.heroku.com/articles/bundler-windows-gemfile
WARNING
log("bundle", "has_windows_gemfile_lock")
File.unlink("Gemfile.lock")
else
# using --deployment is preferred if we can
bundle_command += " --deployment"
cache.load ".bundle"
end
topic("Installing dependencies using #{bundler.version}")
load_bundler_cache
bundler_output = ""
bundle_time = nil
Dir.mktmpdir("libyaml-") do |tmpdir|
libyaml_dir = "#{tmpdir}/#{LIBYAML_PATH}"
install_libyaml(libyaml_dir)
# need to setup compile environment for the psych gem
yaml_include = File.expand_path("#{libyaml_dir}/include").shellescape
yaml_lib = File.expand_path("#{libyaml_dir}/lib").shellescape
pwd = Dir.pwd
bundler_path = "#{pwd}/#{slug_vendor_base}/gems/#{BUNDLER_GEM_PATH}/lib"
# we need to set BUNDLE_CONFIG and BUNDLE_GEMFILE for
# codon since it uses bundler.
env_vars = {
"BUNDLE_GEMFILE" => "#{pwd}/Gemfile",
"BUNDLE_CONFIG" => "#{pwd}/.bundle/config",
"CPATH" => noshellescape("#{yaml_include}:$CPATH"),
"CPPATH" => noshellescape("#{yaml_include}:$CPPATH"),
"LIBRARY_PATH" => noshellescape("#{yaml_lib}:$LIBRARY_PATH"),
"RUBYOPT" => syck_hack,
"NOKOGIRI_USE_SYSTEM_LIBRARIES" => "true"
}
env_vars["BUNDLER_LIB_PATH"] = "#{bundler_path}" if ruby_version.ruby_version == "1.8.7"
puts "Running: #{bundle_command}"
instrument "ruby.bundle_install" do
bundle_time = Benchmark.realtime do
bundler_output << pipe("#{bundle_command} --no-clean", out: "2>&1", env: env_vars, user_env: true)
end
end
end
if $?.success?
puts "Bundle completed (#{"%.2f" % bundle_time}s)"
log "bundle", :status => "success"
puts "Cleaning up the bundler cache."
instrument "ruby.bundle_clean" do
# Only show bundle clean output when not using default cache
if load_default_cache?
run "bundle clean > /dev/null"
else
pipe("#{bundle_bin} clean", out: "2> /dev/null")
end
end
cache.store ".bundle"
cache.store "vendor/bundle"
# Keep gem cache out of the slug
FileUtils.rm_rf("#{slug_vendor_base}/cache")
else
log "bundle", :status => "failure"
error_message = "Failed to install gems via Bundler."
puts "Bundler Output: #{bundler_output}"
if bundler_output.match(/An error occurred while installing sqlite3/)
error_message += <<ERROR
Detected sqlite3 gem which is not supported on Heroku.
https://devcenter.heroku.com/articles/sqlite3
ERROR
end
error error_message
end
end
end
end
# RUBYOPT line that requires syck_hack file
# @return [String] require string if needed or else an empty string
def syck_hack
instrument "ruby.syck_hack" do
syck_hack_file = File.expand_path(File.join(File.dirname(__FILE__), "../../vendor/syck_hack"))
rv = run_stdout('ruby -e "puts RUBY_VERSION"').chomp
# < 1.9.3 includes syck, so we need to use the syck hack
if Gem::Version.new(rv) < Gem::Version.new("1.9.3")
"-r#{syck_hack_file}"
else
""
end
end
end
# writes ERB based database.yml for Rails. The database.yml uses the DATABASE_URL from the environment during runtime.
def create_database_yml
instrument 'ruby.create_database_yml' do
log("create_database_yml") do
return unless File.directory?("config")
topic("Writing config/database.yml to read from DATABASE_URL")
File.open("config/database.yml", "w") do |file|
file.puts <<-DATABASE_YML
<%
require 'cgi'
require 'uri'
begin
uri = URI.parse(ENV["DATABASE_URL"])
rescue URI::InvalidURIError
raise "Invalid DATABASE_URL"
end
raise "No RACK_ENV or RAILS_ENV found" unless ENV["RAILS_ENV"] || ENV["RACK_ENV"]
def attribute(name, value, force_string = false)
if value
value_string =
if force_string
'"' + value + '"'
else
value
end
"\#{name}: \#{value_string}"
else
""
end
end
adapter = uri.scheme
adapter = "postgresql" if adapter == "postgres"
database = (uri.path || "").split("/")[1]
username = uri.user
password = uri.password
host = uri.host
port = uri.port
params = CGI.parse(uri.query || "")
%>
<%= ENV["RAILS_ENV"] || ENV["RACK_ENV"] %>:
<%= attribute "adapter", adapter %>
<%= attribute "database", database %>
<%= attribute "username", username %>
<%= attribute "password", password, true %>
<%= attribute "host", host %>
<%= attribute "port", port %>
<% params.each do |key, value| %>
<%= key %>: <%= value.first %>
<% end %>
DATABASE_YML
end
end
end
end
def rake
@rake ||= begin
LanguagePack::Helpers::RakeRunner.new(
bundler.has_gem?("rake") || ruby_version.rake_is_vendored?
).load_rake_tasks!(env: rake_env)
end
end
def rake_env
if database_url
{ "DATABASE_URL" => database_url }
else
{}
end.merge(user_env_hash)
end
def database_url
env("DATABASE_URL") if env("DATABASE_URL")
end
# executes the block with GIT_DIR environment variable removed since it can mess with the current working directory git thinks it's in
# @param [block] block to be executed in the GIT_DIR free context
def allow_git(&blk)
git_dir = ENV.delete("GIT_DIR") # can mess with bundler
blk.call
ENV["GIT_DIR"] = git_dir
end
# decides if we need to enable the dev database addon
# @return [Array] the database addon if the pg gem is detected or an empty Array if it isn't.
def add_dev_database_addon
bundler.has_gem?("pg") ? ['heroku-postgresql:hobby-dev'] : []
end
# decides if we need to install the node.js binary
# @note execjs will blow up if no JS RUNTIME is detected and is loaded.
# @return [Array] the node.js binary path if we need it or an empty Array
def add_node_js_binary
bundler.has_gem?('execjs') && !node_js_installed? ? [NODE_JS_BINARY_PATH] : []
end
def node_bp_bin_path
"#{Dir.pwd}/#{NODE_BP_PATH}"
end
# checks if node.js is installed via the official heroku-buildpack-nodejs using multibuildpack
# @return [Boolean] true if it's detected and false if it isn't
def node_js_installed?
@node_js_installed ||= run("#{node_bp_bin_path}/node -v") && $?.success?
end
def run_assets_precompile_rake_task
instrument 'ruby.run_assets_precompile_rake_task' do
precompile = rake.task("assets:precompile")
return true unless precompile.is_defined?
topic "Precompiling assets"
precompile.invoke(env: rake_env)
if precompile.success?
puts "Asset precompilation completed (#{"%.2f" % precompile.time}s)"
else
precompile_fail(precompile.output)
end
end
end
def precompile_fail(output)
log "assets_precompile", :status => "failure"
msg = "Precompiling assets failed.\n"
if output.match(/(127\.0\.0\.1)|(org\.postgresql\.util)/)
msg << "Attempted to access a nonexistent database:\n"
msg << "https://devcenter.heroku.com/articles/pre-provision-database\n"
end
error msg
end
def bundler_cache
"vendor/bundle"
end
def load_bundler_cache
instrument "ruby.load_bundler_cache" do
cache.load "vendor"
full_ruby_version = run_stdout(%q(ruby -v)).chomp
rubygems_version = run_stdout(%q(gem -v)).chomp
heroku_metadata = "vendor/heroku"
old_rubygems_version = nil
ruby_version_cache = "ruby_version"
buildpack_version_cache = "buildpack_version"
bundler_version_cache = "bundler_version"
rubygems_version_cache = "rubygems_version"
old_rubygems_version = @metadata.read(ruby_version_cache).chomp if @metadata.exists?(ruby_version_cache)
load_default_cache
# fix bug from v37 deploy
if File.exists?("vendor/ruby_version")
puts "Broken cache detected. Purging build cache."
cache.clear("vendor")
FileUtils.rm_rf("vendor/ruby_version")
purge_bundler_cache
# fix bug introduced in v38
elsif !@metadata.exists?(buildpack_version_cache) && @metadata.exists?(ruby_version_cache)
puts "Broken cache detected. Purging build cache."
purge_bundler_cache
elsif cache.exists?(bundler_cache) && @metadata.exists?(ruby_version_cache) && full_ruby_version != @metadata.read(ruby_version_cache).chomp
puts "Ruby version change detected. Clearing bundler cache."
puts "Old: #{@metadata.read(ruby_version_cache).chomp}"
puts "New: #{full_ruby_version}"
purge_bundler_cache
end
# fix git gemspec bug from Bundler 1.3.0+ upgrade
if File.exists?(bundler_cache) && !@metadata.exists?(bundler_version_cache) && !run("find vendor/bundle/*/*/bundler/gems/*/ -name *.gemspec").include?("No such file or directory")
puts "Old bundler cache detected. Clearing bundler cache."
purge_bundler_cache
end
# fix for https://github.com/heroku/heroku-buildpack-ruby/issues/86
if (!@metadata.exists?(rubygems_version_cache) ||
(old_rubygems_version == "2.0.0" && old_rubygems_version != rubygems_version)) &&
@metadata.exists?(ruby_version_cache) && @metadata.read(ruby_version_cache).chomp.include?("ruby 2.0.0p0")
puts "Updating to rubygems #{rubygems_version}. Clearing bundler cache."
purge_bundler_cache
end
# fix for https://github.com/sparklemotion/nokogiri/issues/923
if @metadata.exists?(buildpack_version_cache) && (bv = @metadata.read(buildpack_version_cache).sub('v', '').to_i) && bv != 0 && bv <= 76
puts "Fixing nokogiri install. Clearing bundler cache."
puts "See https://github.com/sparklemotion/nokogiri/issues/923."
purge_bundler_cache
end
# recompile nokogiri to use new libyaml
if @metadata.exists?(buildpack_version_cache) && (bv = @metadata.read(buildpack_version_cache).sub('v', '').to_i) && bv != 0 && bv <= 99 && bundler.has_gem?("psych")
puts "Need to recompile psych for CVE-2013-6393. Clearing bundler cache."
puts "See http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=737076."
purge_bundler_cache
end
FileUtils.mkdir_p(heroku_metadata)
@metadata.write(ruby_version_cache, full_ruby_version, false)
@metadata.write(buildpack_version_cache, BUILDPACK_VERSION, false)
@metadata.write(bundler_version_cache, BUNDLER_VERSION, false)
@metadata.write(rubygems_version_cache, rubygems_version, false)
@metadata.save
end
end
def purge_bundler_cache
instrument "ruby.purge_bundler_cache" do
FileUtils.rm_rf(bundler_cache)
cache.clear bundler_cache
# need to reinstall language pack gems
install_bundler_in_app
end
end
end
Absolute path maybe?
require "tmpdir"
require "digest/md5"
require "benchmark"
require "rubygems"
require "language_pack"
require "language_pack/base"
require "language_pack/ruby_version"
require "language_pack/version"
# base Ruby Language Pack. This is for any base ruby app.
class LanguagePack::Ruby < LanguagePack::Base
NAME = "ruby"
LIBYAML_VERSION = "0.1.6"
LIBYAML_PATH = "libyaml-#{LIBYAML_VERSION}"
BUNDLER_VERSION = "1.6.3"
BUNDLER_GEM_PATH = "bundler-#{BUNDLER_VERSION}"
NODE_VERSION = "0.4.7"
NODE_JS_BINARY_PATH = "node-#{NODE_VERSION}"
JVM_BASE_URL = "http://heroku-jdk.s3.amazonaws.com"
LATEST_JVM_VERSION = "openjdk7-latest"
LEGACY_JVM_VERSION = "openjdk1.7.0_25"
DEFAULT_RUBY_VERSION = "ruby-2.0.0"
RBX_BASE_URL = "http://binaries.rubini.us/heroku"
NODE_BP_PATH = "vendor/node/bin"
CMAKE_BASE_URL = "http://www.cmake.org/files/v2.8"
CMAKE_VERSION = "2.8.12.2"
CMAKE_PATH = "vendor/cmake/bin"
# detects if this is a valid Ruby app
# @return [Boolean] true if it's a Ruby app
def self.use?
instrument "ruby.use" do
File.exist?("Gemfile")
end
end
def self.bundler
@bundler ||= LanguagePack::Helpers::BundlerWrapper.new.install
end
def bundler
self.class.bundler
end
def initialize(build_path, cache_path=nil)
super(build_path, cache_path)
@fetchers[:jvm] = LanguagePack::Fetcher.new(JVM_BASE_URL)
@fetchers[:rbx] = LanguagePack::Fetcher.new(RBX_BASE_URL)
@fetchers[:cmake] = LanguagePack::Fetcher.new(CMAKE_BASE_URL)
end
def name
"Ruby"
end
def default_addons
instrument "ruby.default_addons" do
add_dev_database_addon
end
end
def default_config_vars
instrument "ruby.default_config_vars" do
vars = {
"LANG" => "en_US.UTF-8",
}
ruby_version.jruby? ? vars.merge({
"JAVA_OPTS" => default_java_opts,
"JRUBY_OPTS" => default_jruby_opts,
"JAVA_TOOL_OPTIONS" => default_java_tool_options
}) : vars
end
end
def default_process_types
instrument "ruby.default_process_types" do
{
"rake" => "bundle exec rake",
"console" => "bundle exec irb"
}
end
end
def compile
instrument 'ruby.compile' do
# check for new app at the beginning of the compile
new_app?
Dir.chdir(build_path)
remove_vendor_bundle
install_ruby
install_cmake
install_jvm
setup_language_pack_environment
setup_profiled
allow_git do
install_bundler_in_app
build_bundler
create_database_yml
install_binaries
run_assets_precompile_rake_task
end
super
end
end
private
# the base PATH environment variable to be used
# @return [String] the resulting PATH
def default_path
# need to remove bin/ folder since it links
# to the wrong --prefix ruby binstubs
# breaking require. This only applies to Ruby 1.9.2 and 1.8.7.
safe_binstubs = binstubs_relative_paths - ["bin"]
paths = [
ENV["PATH"],
"bin",
system_paths,
]
paths.unshift("#{slug_vendor_jvm}/bin") if ruby_version.jruby?
paths.unshift(File.expand_path("#{slug_vendor_cmake}/cmake-#{CMAKE_VERSION}-Linux-i386/bin")) unless ruby_version.jruby?
paths.unshift(safe_binstubs)
paths.join(":")
end
def binstubs_relative_paths
[
"bin",
bundler_binstubs_path,
"#{slug_vendor_base}/bin"
]
end
def system_paths
"/usr/local/bin:/usr/bin:/bin"
end
# the relative path to the bundler directory of gems
# @return [String] resulting path
def slug_vendor_base
instrument 'ruby.slug_vendor_base' do
if @slug_vendor_base
@slug_vendor_base
elsif ruby_version.ruby_version == "1.8.7"
@slug_vendor_base = "vendor/bundle/1.8"
else
@slug_vendor_base = run_no_pipe(%q(ruby -e "require 'rbconfig';puts \"vendor/bundle/#{RUBY_ENGINE}/#{RbConfig::CONFIG['ruby_version']}\"")).chomp
error "Problem detecting bundler vendor directory: #{@slug_vendor_base}" unless $?.success?
@slug_vendor_base
end
end
end
# the relative path to the vendored ruby directory
# @return [String] resulting path
def slug_vendor_ruby
"vendor/#{ruby_version.version_without_patchlevel}"
end
# the relative path to the vendored jvm
# @return [String] resulting path
def slug_vendor_jvm
"vendor/jvm"
end
# the absolute path of the build ruby to use during the buildpack
# @return [String] resulting path
def build_ruby_path
"/tmp/#{ruby_version.version_without_patchlevel}"
end
# fetch the ruby version from bundler
# @return [String, nil] returns the ruby version if detected or nil if none is detected
def ruby_version
instrument 'ruby.ruby_version' do
return @ruby_version if @ruby_version
new_app = !File.exist?("vendor/heroku")
last_version_file = "buildpack_ruby_version"
last_version = nil
last_version = @metadata.read(last_version_file).chomp if @metadata.exists?(last_version_file)
@ruby_version = LanguagePack::RubyVersion.new(bundler.ruby_version,
is_new: new_app,
last_version: last_version)
return @ruby_version
end
end
# default JAVA_OPTS
# return [String] string of JAVA_OPTS
def default_java_opts
"-Xmx384m -Xss512k -XX:+UseCompressedOops -Dfile.encoding=UTF-8"
end
# default JRUBY_OPTS
# return [String] string of JRUBY_OPTS
def default_jruby_opts
"-Xcompile.invokedynamic=false"
end
# default JAVA_TOOL_OPTIONS
# return [String] string of JAVA_TOOL_OPTIONS
def default_java_tool_options
"-Djava.rmi.server.useCodebaseOnly=true"
end
# list the available valid ruby versions
# @note the value is memoized
# @return [Array] list of Strings of the ruby versions available
def ruby_versions
return @ruby_versions if @ruby_versions
Dir.mktmpdir("ruby_versions-") do |tmpdir|
Dir.chdir(tmpdir) do
@fetchers[:buildpack].fetch("ruby_versions.yml")
@ruby_versions = YAML::load_file("ruby_versions.yml")
end
end
@ruby_versions
end
# sets up the environment variables for the build process
def setup_language_pack_environment
instrument 'ruby.setup_language_pack_environment' do
setup_ruby_install_env
ENV["PATH"] += ":#{node_bp_bin_path}" if node_js_installed?
# TODO when buildpack-env-args rolls out, we can get rid of
# ||= and the manual setting below
config_vars = default_config_vars.each do |key, value|
ENV[key] ||= value
end
ENV["GEM_PATH"] = slug_vendor_base
ENV["GEM_HOME"] = slug_vendor_base
ENV["PATH"] = default_path
end
end
# sets up the profile.d script for this buildpack
def setup_profiled
instrument 'setup_profiled' do
set_env_override "GEM_PATH", "$HOME/#{slug_vendor_base}:$GEM_PATH"
set_env_default "LANG", "en_US.UTF-8"
set_env_override "PATH", binstubs_relative_paths.map {|path| "$HOME/#{path}" }.join(":") + ":$PATH"
if ruby_version.jruby?
set_env_default "JAVA_OPTS", default_java_opts
set_env_default "JRUBY_OPTS", default_jruby_opts
set_env_default "JAVA_TOOL_OPTIONS", default_java_tool_options
end
end
end
# install the vendored ruby
# @return [Boolean] true if it installs the vendored ruby and false otherwise
def install_ruby
instrument 'ruby.install_ruby' do
return false unless ruby_version
invalid_ruby_version_message = <<ERROR
Invalid RUBY_VERSION specified: #{ruby_version.version}
Valid versions: #{ruby_versions.join(", ")}
ERROR
if ruby_version.build?
FileUtils.mkdir_p(build_ruby_path)
Dir.chdir(build_ruby_path) do
ruby_vm = "ruby"
instrument "ruby.fetch_build_ruby" do
@fetchers[:buildpack].fetch_untar("#{ruby_version.version.sub(ruby_vm, "#{ruby_vm}-build")}.tgz")
end
end
error invalid_ruby_version_message unless $?.success?
end
FileUtils.mkdir_p(slug_vendor_ruby)
Dir.chdir(slug_vendor_ruby) do
instrument "ruby.fetch_ruby" do
if ruby_version.rbx?
file = "#{ruby_version.version}.tar.bz2"
sha_file = "#{file}.sha1"
@fetchers[:rbx].fetch(file)
@fetchers[:rbx].fetch(sha_file)
expected_checksum = File.read(sha_file).chomp
actual_checksum = Digest::SHA1.file(file).hexdigest
error <<-ERROR_MSG unless expected_checksum == actual_checksum
RBX Checksum for #{file} does not match.
Expected #{expected_checksum} but got #{actual_checksum}.
Please try pushing again in a few minutes.
ERROR_MSG
run("tar jxf #{file}")
FileUtils.mv(Dir.glob("app/#{slug_vendor_ruby}/*"), ".")
FileUtils.rm_rf("app")
FileUtils.rm(file)
FileUtils.rm(sha_file)
else
@fetchers[:buildpack].fetch_untar("#{ruby_version.version}.tgz")
end
end
end
error invalid_ruby_version_message unless $?.success?
app_bin_dir = "bin"
FileUtils.mkdir_p app_bin_dir
run("ln -s ruby #{slug_vendor_ruby}/bin/ruby.exe")
Dir["#{slug_vendor_ruby}/bin/*"].each do |vendor_bin|
run("ln -s ../#{vendor_bin} #{app_bin_dir}")
end
@metadata.write("buildpack_ruby_version", ruby_version.version)
topic "Using Ruby version: #{ruby_version.version}"
if !ruby_version.set
warn(<<WARNING)
You have not declared a Ruby version in your Gemfile.
To set your Ruby version add this line to your Gemfile:
#{ruby_version.to_gemfile}
# See https://devcenter.heroku.com/articles/ruby-versions for more information.
WARNING
end
end
true
end
def new_app?
@new_app ||= !File.exist?("vendor/heroku")
end
def install_cmake
instrument 'ruby.install_cmake' do
return if ruby_version.jruby?
# TODO check for cached build output
topic "Fetching cmake binary (#{CMAKE_VERSION})"
FileUtils.mkdir_p(slug_vendor_cmake)
Dir.chdir(slug_vendor_cmake) do
instrument "ruby.fetch_cmake" do
@fetchers[:cmake].fetch_untar("cmake-#{CMAKE_VERSION}-Linux-i386.tar.gz")
system("chmod ug+x cmake-#{CMAKE_VERSION}-Linux-i386/bin/*")
end
end
error "Couldn't fetch cmake (cmake-#{CMAKE_VERSION}-Linux-i386.tar.gz)!" unless $?.success?
path = File.expand_path("#{slug_vendor_cmake}/cmake-#{CMAKE_VERSION}-Linux-i386/bin")
out = `ls -l #{path}`
topic "Done! Cmake path: #{path}:\n #{out}"
out = `#{path}/cmake --version`
topic "CMake version: #{out}"
# TODO cache the build output
end
end
def slug_vendor_cmake
"vendor/cmake"
end
# vendors JVM into the slug for JRuby
def install_jvm
instrument 'ruby.install_jvm' do
if ruby_version.jruby?
jvm_version =
if Gem::Version.new(ruby_version.engine_version) >= Gem::Version.new("1.7.4")
LATEST_JVM_VERSION
else
LEGACY_JVM_VERSION
end
topic "Installing JVM: #{jvm_version}"
FileUtils.mkdir_p(slug_vendor_jvm)
Dir.chdir(slug_vendor_jvm) do
@fetchers[:jvm].fetch_untar("#{jvm_version}.tar.gz")
end
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir["#{slug_vendor_jvm}/bin/*"].each do |bin|
run("ln -s ../#{bin} #{bin_dir}")
end
end
end
end
# find the ruby install path for its binstubs during build
# @return [String] resulting path or empty string if ruby is not vendored
def ruby_install_binstub_path
@ruby_install_binstub_path ||=
if ruby_version.build?
"#{build_ruby_path}/bin"
elsif ruby_version
"#{slug_vendor_ruby}/bin"
else
""
end
end
# setup the environment so we can use the vendored ruby
def setup_ruby_install_env
instrument 'ruby.setup_ruby_install_env' do
ENV["PATH"] = "#{ruby_install_binstub_path}:#{ENV["PATH"]}"
if ruby_version.jruby?
ENV['JAVA_OPTS'] = default_java_opts
end
end
end
# installs vendored gems into the slug
def install_bundler_in_app
instrument 'ruby.install_language_pack_gems' do
FileUtils.mkdir_p(slug_vendor_base)
Dir.chdir(slug_vendor_base) do |dir|
`cp -R #{bundler.bundler_path}/. .`
end
end
end
# default set of binaries to install
# @return [Array] resulting list
def binaries
add_node_js_binary
end
# vendors binaries into the slug
def install_binaries
instrument 'ruby.install_binaries' do
binaries.each {|binary| install_binary(binary) }
Dir["bin/*"].each {|path| run("chmod +x #{path}") }
end
end
# vendors individual binary into the slug
# @param [String] name of the binary package from S3.
# Example: https://s3.amazonaws.com/language-pack-ruby/node-0.4.7.tgz, where name is "node-0.4.7"
def install_binary(name)
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir.chdir(bin_dir) do |dir|
@fetchers[:buildpack].fetch_untar("#{name}.tgz")
end
end
# removes a binary from the slug
# @param [String] relative path of the binary on the slug
def uninstall_binary(path)
FileUtils.rm File.join('bin', File.basename(path)), :force => true
end
def load_default_cache?
new_app? && ruby_version.default?
end
# loads a default bundler cache for new apps to speed up initial bundle installs
def load_default_cache
instrument "ruby.load_default_cache" do
if false # load_default_cache?
puts "New app detected loading default bundler cache"
patchlevel = run("ruby -e 'puts RUBY_PATCHLEVEL'").chomp
cache_name = "#{DEFAULT_RUBY_VERSION}-p#{patchlevel}-default-cache"
@fetchers[:buildpack].fetch_untar("#{cache_name}.tgz")
end
end
end
# install libyaml into the LP to be referenced for psych compilation
# @param [String] tmpdir to store the libyaml files
def install_libyaml(dir)
instrument 'ruby.install_libyaml' do
FileUtils.mkdir_p dir
Dir.chdir(dir) do |dir|
@fetchers[:buildpack].fetch_untar("#{LIBYAML_PATH}.tgz")
end
end
end
# remove `vendor/bundle` that comes from the git repo
# in case there are native ext.
# users should be using `bundle pack` instead.
# https://github.com/heroku/heroku-buildpack-ruby/issues/21
def remove_vendor_bundle
if File.exists?("vendor/bundle")
warn(<<WARNING)
Removing `vendor/bundle`.
Checking in `vendor/bundle` is not supported. Please remove this directory
and add it to your .gitignore. To vendor your gems with Bundler, use
`bundle pack` instead.
WARNING
FileUtils.rm_rf("vendor/bundle")
end
end
def bundler_binstubs_path
"vendor/bundle/bin"
end
# runs bundler to install the dependencies
def build_bundler
instrument 'ruby.build_bundler' do
log("bundle") do
bundle_without = env("BUNDLE_WITHOUT") || "development:test"
bundle_bin = "bundle"
bundle_command = "#{bundle_bin} install --without #{bundle_without} --path vendor/bundle --binstubs #{bundler_binstubs_path}"
bundle_command << " -j4"
if bundler.windows_gemfile_lock?
warn(<<WARNING, inline: true)
Removing `Gemfile.lock` because it was generated on Windows.
Bundler will do a full resolve so native gems are handled properly.
This may result in unexpected gem versions being used in your app.
In rare occasions Bundler may not be able to resolve your dependencies at all.
https://devcenter.heroku.com/articles/bundler-windows-gemfile
WARNING
log("bundle", "has_windows_gemfile_lock")
File.unlink("Gemfile.lock")
else
# using --deployment is preferred if we can
bundle_command += " --deployment"
cache.load ".bundle"
end
topic("Installing dependencies using #{bundler.version}")
load_bundler_cache
bundler_output = ""
bundle_time = nil
Dir.mktmpdir("libyaml-") do |tmpdir|
libyaml_dir = "#{tmpdir}/#{LIBYAML_PATH}"
install_libyaml(libyaml_dir)
# need to setup compile environment for the psych gem
yaml_include = File.expand_path("#{libyaml_dir}/include").shellescape
yaml_lib = File.expand_path("#{libyaml_dir}/lib").shellescape
pwd = Dir.pwd
bundler_path = "#{pwd}/#{slug_vendor_base}/gems/#{BUNDLER_GEM_PATH}/lib"
# we need to set BUNDLE_CONFIG and BUNDLE_GEMFILE for
# codon since it uses bundler.
env_vars = {
"BUNDLE_GEMFILE" => "#{pwd}/Gemfile",
"BUNDLE_CONFIG" => "#{pwd}/.bundle/config",
"CPATH" => noshellescape("#{yaml_include}:$CPATH"),
"CPPATH" => noshellescape("#{yaml_include}:$CPPATH"),
"LIBRARY_PATH" => noshellescape("#{yaml_lib}:$LIBRARY_PATH"),
"RUBYOPT" => syck_hack,
"NOKOGIRI_USE_SYSTEM_LIBRARIES" => "true"
}
env_vars["BUNDLER_LIB_PATH"] = "#{bundler_path}" if ruby_version.ruby_version == "1.8.7"
puts "Running: #{bundle_command}"
instrument "ruby.bundle_install" do
bundle_time = Benchmark.realtime do
bundler_output << pipe("#{bundle_command} --no-clean", out: "2>&1", env: env_vars, user_env: true)
end
end
end
if $?.success?
puts "Bundle completed (#{"%.2f" % bundle_time}s)"
log "bundle", :status => "success"
puts "Cleaning up the bundler cache."
instrument "ruby.bundle_clean" do
# Only show bundle clean output when not using default cache
if load_default_cache?
run "bundle clean > /dev/null"
else
pipe("#{bundle_bin} clean", out: "2> /dev/null")
end
end
cache.store ".bundle"
cache.store "vendor/bundle"
# Keep gem cache out of the slug
FileUtils.rm_rf("#{slug_vendor_base}/cache")
else
log "bundle", :status => "failure"
error_message = "Failed to install gems via Bundler."
puts "Bundler Output: #{bundler_output}"
if bundler_output.match(/An error occurred while installing sqlite3/)
error_message += <<ERROR
Detected sqlite3 gem which is not supported on Heroku.
https://devcenter.heroku.com/articles/sqlite3
ERROR
end
error error_message
end
end
end
end
# RUBYOPT line that requires syck_hack file
# @return [String] require string if needed or else an empty string
def syck_hack
instrument "ruby.syck_hack" do
syck_hack_file = File.expand_path(File.join(File.dirname(__FILE__), "../../vendor/syck_hack"))
rv = run_stdout('ruby -e "puts RUBY_VERSION"').chomp
# < 1.9.3 includes syck, so we need to use the syck hack
if Gem::Version.new(rv) < Gem::Version.new("1.9.3")
"-r#{syck_hack_file}"
else
""
end
end
end
# writes ERB based database.yml for Rails. The database.yml uses the DATABASE_URL from the environment during runtime.
def create_database_yml
instrument 'ruby.create_database_yml' do
log("create_database_yml") do
return unless File.directory?("config")
topic("Writing config/database.yml to read from DATABASE_URL")
File.open("config/database.yml", "w") do |file|
file.puts <<-DATABASE_YML
<%
require 'cgi'
require 'uri'
begin
uri = URI.parse(ENV["DATABASE_URL"])
rescue URI::InvalidURIError
raise "Invalid DATABASE_URL"
end
raise "No RACK_ENV or RAILS_ENV found" unless ENV["RAILS_ENV"] || ENV["RACK_ENV"]
def attribute(name, value, force_string = false)
if value
value_string =
if force_string
'"' + value + '"'
else
value
end
"\#{name}: \#{value_string}"
else
""
end
end
adapter = uri.scheme
adapter = "postgresql" if adapter == "postgres"
database = (uri.path || "").split("/")[1]
username = uri.user
password = uri.password
host = uri.host
port = uri.port
params = CGI.parse(uri.query || "")
%>
<%= ENV["RAILS_ENV"] || ENV["RACK_ENV"] %>:
<%= attribute "adapter", adapter %>
<%= attribute "database", database %>
<%= attribute "username", username %>
<%= attribute "password", password, true %>
<%= attribute "host", host %>
<%= attribute "port", port %>
<% params.each do |key, value| %>
<%= key %>: <%= value.first %>
<% end %>
DATABASE_YML
end
end
end
end
def rake
@rake ||= begin
LanguagePack::Helpers::RakeRunner.new(
bundler.has_gem?("rake") || ruby_version.rake_is_vendored?
).load_rake_tasks!(env: rake_env)
end
end
def rake_env
if database_url
{ "DATABASE_URL" => database_url }
else
{}
end.merge(user_env_hash)
end
def database_url
env("DATABASE_URL") if env("DATABASE_URL")
end
# executes the block with GIT_DIR environment variable removed since it can mess with the current working directory git thinks it's in
# @param [block] block to be executed in the GIT_DIR free context
def allow_git(&blk)
git_dir = ENV.delete("GIT_DIR") # can mess with bundler
blk.call
ENV["GIT_DIR"] = git_dir
end
# decides if we need to enable the dev database addon
# @return [Array] the database addon if the pg gem is detected or an empty Array if it isn't.
def add_dev_database_addon
bundler.has_gem?("pg") ? ['heroku-postgresql:hobby-dev'] : []
end
# decides if we need to install the node.js binary
# @note execjs will blow up if no JS RUNTIME is detected and is loaded.
# @return [Array] the node.js binary path if we need it or an empty Array
def add_node_js_binary
bundler.has_gem?('execjs') && !node_js_installed? ? [NODE_JS_BINARY_PATH] : []
end
def node_bp_bin_path
"#{Dir.pwd}/#{NODE_BP_PATH}"
end
# checks if node.js is installed via the official heroku-buildpack-nodejs using multibuildpack
# @return [Boolean] true if it's detected and false if it isn't
def node_js_installed?
@node_js_installed ||= run("#{node_bp_bin_path}/node -v") && $?.success?
end
def run_assets_precompile_rake_task
instrument 'ruby.run_assets_precompile_rake_task' do
precompile = rake.task("assets:precompile")
return true unless precompile.is_defined?
topic "Precompiling assets"
precompile.invoke(env: rake_env)
if precompile.success?
puts "Asset precompilation completed (#{"%.2f" % precompile.time}s)"
else
precompile_fail(precompile.output)
end
end
end
def precompile_fail(output)
log "assets_precompile", :status => "failure"
msg = "Precompiling assets failed.\n"
if output.match(/(127\.0\.0\.1)|(org\.postgresql\.util)/)
msg << "Attempted to access a nonexistent database:\n"
msg << "https://devcenter.heroku.com/articles/pre-provision-database\n"
end
error msg
end
def bundler_cache
"vendor/bundle"
end
def load_bundler_cache
instrument "ruby.load_bundler_cache" do
cache.load "vendor"
full_ruby_version = run_stdout(%q(ruby -v)).chomp
rubygems_version = run_stdout(%q(gem -v)).chomp
heroku_metadata = "vendor/heroku"
old_rubygems_version = nil
ruby_version_cache = "ruby_version"
buildpack_version_cache = "buildpack_version"
bundler_version_cache = "bundler_version"
rubygems_version_cache = "rubygems_version"
old_rubygems_version = @metadata.read(ruby_version_cache).chomp if @metadata.exists?(ruby_version_cache)
load_default_cache
# fix bug from v37 deploy
if File.exists?("vendor/ruby_version")
puts "Broken cache detected. Purging build cache."
cache.clear("vendor")
FileUtils.rm_rf("vendor/ruby_version")
purge_bundler_cache
# fix bug introduced in v38
elsif !@metadata.exists?(buildpack_version_cache) && @metadata.exists?(ruby_version_cache)
puts "Broken cache detected. Purging build cache."
purge_bundler_cache
elsif cache.exists?(bundler_cache) && @metadata.exists?(ruby_version_cache) && full_ruby_version != @metadata.read(ruby_version_cache).chomp
puts "Ruby version change detected. Clearing bundler cache."
puts "Old: #{@metadata.read(ruby_version_cache).chomp}"
puts "New: #{full_ruby_version}"
purge_bundler_cache
end
# fix git gemspec bug from Bundler 1.3.0+ upgrade
if File.exists?(bundler_cache) && !@metadata.exists?(bundler_version_cache) && !run("find vendor/bundle/*/*/bundler/gems/*/ -name *.gemspec").include?("No such file or directory")
puts "Old bundler cache detected. Clearing bundler cache."
purge_bundler_cache
end
# fix for https://github.com/heroku/heroku-buildpack-ruby/issues/86
if (!@metadata.exists?(rubygems_version_cache) ||
(old_rubygems_version == "2.0.0" && old_rubygems_version != rubygems_version)) &&
@metadata.exists?(ruby_version_cache) && @metadata.read(ruby_version_cache).chomp.include?("ruby 2.0.0p0")
puts "Updating to rubygems #{rubygems_version}. Clearing bundler cache."
purge_bundler_cache
end
# fix for https://github.com/sparklemotion/nokogiri/issues/923
if @metadata.exists?(buildpack_version_cache) && (bv = @metadata.read(buildpack_version_cache).sub('v', '').to_i) && bv != 0 && bv <= 76
puts "Fixing nokogiri install. Clearing bundler cache."
puts "See https://github.com/sparklemotion/nokogiri/issues/923."
purge_bundler_cache
end
# recompile nokogiri to use new libyaml
if @metadata.exists?(buildpack_version_cache) && (bv = @metadata.read(buildpack_version_cache).sub('v', '').to_i) && bv != 0 && bv <= 99 && bundler.has_gem?("psych")
puts "Need to recompile psych for CVE-2013-6393. Clearing bundler cache."
puts "See http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=737076."
purge_bundler_cache
end
FileUtils.mkdir_p(heroku_metadata)
@metadata.write(ruby_version_cache, full_ruby_version, false)
@metadata.write(buildpack_version_cache, BUILDPACK_VERSION, false)
@metadata.write(bundler_version_cache, BUNDLER_VERSION, false)
@metadata.write(rubygems_version_cache, rubygems_version, false)
@metadata.save
end
end
def purge_bundler_cache
instrument "ruby.purge_bundler_cache" do
FileUtils.rm_rf(bundler_cache)
cache.clear bundler_cache
# need to reinstall language pack gems
install_bundler_in_app
end
end
end
|
require "tmpdir"
require "rubygems"
require "language_pack"
require "language_pack/base"
# base Ruby Language Pack. This is for any base ruby app.
class LanguagePack::Ruby < LanguagePack::Base
LIBYAML_VERSION = "0.1.4"
LIBYAML_PATH = "libyaml-#{LIBYAML_VERSION}"
BUNDLER_VERSION = "1.2.0.pre"
BUNDLER_GEM_PATH = "bundler-#{BUNDLER_VERSION}"
NODE_VERSION = "0.4.7"
NODE_JS_BINARY_PATH = "node-#{NODE_VERSION}"
# detects if this is a valid Ruby app
# @return [Boolean] true if it's a Ruby app
def self.use?
File.exist?("Gemfile")
end
def name
"Ruby"
end
def default_addons
add_shared_database_addon
end
def default_config_vars
vars = {
"LANG" => "en_US.UTF-8",
"PATH" => default_path,
"GEM_PATH" => slug_vendor_base,
}
ruby_version_jruby? ? vars.merge("JAVA_OPTS" => default_java_opts) : vars
end
def default_process_types
{
"rake" => "bundle exec rake",
"console" => "bundle exec irb"
}
end
def compile
Dir.chdir(build_path)
install_ruby
setup_language_pack_environment
allow_git do
install_language_pack_gems
build_bundler
create_database_yml
install_binaries
run_assets_precompile_rake_task
end
end
private
# the base PATH environment variable to be used
# @return [String] the resulting PATH
def default_path
"bin:#{slug_vendor_base}/bin:/usr/local/bin:/usr/bin:/bin"
end
# the relative path to the bundler directory of gems
# @return [String] resulting path
def slug_vendor_base
@slug_vendor_base ||= run(%q(ruby -e "require 'rbconfig';puts \"vendor/bundle/#{RUBY_ENGINE}/#{RbConfig::CONFIG['ruby_version']}\"")).chomp
end
# the relative path to the vendored ruby directory
# @return [String] resulting path
def slug_vendor_ruby
"vendor/#{ruby_version}"
end
# the absolute path of the build ruby to use during the buildpack
# @return [String] resulting path
def build_ruby_path
"/tmp/#{ruby_version}"
end
# fetch the ruby version from bundler
# @return [String, nil] returns the ruby version if detected or nil if none is detected
def ruby_version
return @ruby_version if @ruby_version
bootstrap_bundler do |bundler_path|
old_system_path = "/usr/local/bin:/usr/local/sbin:/usr/bin:/bin:/usr/sbin:/sbin"
@ruby_version = run_stdout("env PATH=#{old_system_path}:#{bundler_path}/bin GEM_PATH=#{bundler_path} bundle platform --ruby").chomp
end
if @ruby_version == "No ruby version specified" && ENV['RUBY_VERSION']
# for backwards compatibility.
# this will go away in the future
@ruby_version = ENV['RUBY_VERSION']
@ruby_version_env_var = true
elsif @ruby_version == "No ruby version specified"
@ruby_version = nil
else
@ruby_version = @ruby_version.sub('(', '').sub(')', '').split.join('-')
@ruby_version_env_var = false
end
@ruby_version
end
# bootstraps bundler so we can pull the ruby version
def bootstrap_bundler(&block)
Dir.mktmpdir("bundler-") do |tmpdir|
Dir.chdir(tmpdir) do
run("curl #{VENDOR_URL}/#{BUNDLER_GEM_PATH}.tgz -s -o - | tar xzf -")
end
yield tmpdir
end
end
# determine if we're using rbx
# @return [Boolean] true if we are and false if we aren't
def ruby_version_rbx?
ruby_version ? ruby_version.match(/^rbx-/) : false
end
# determine if we're using jruby
# @return [Boolean] true if we are and false if we aren't
def ruby_version_jruby?
ruby_version ? ruby_version.match(/^jruby-/) : false
end
# default JAVA_OPTS
# return [String] string of JAVA_OPTS
def default_java_opts
"-Xmx384m -Xss512k -XX:+UseCompressedOops -Dfile.encoding=UTF-8"
end
# list the available valid ruby versions
# @note the value is memoized
# @return [Array] list of Strings of the ruby versions available
def ruby_versions
return @ruby_versions if @ruby_versions
Dir.mktmpdir("ruby_versions-") do |tmpdir|
Dir.chdir(tmpdir) do
run("curl -O #{VENDOR_URL}/ruby_versions.yml")
@ruby_versions = YAML::load_file("ruby_versions.yml")
end
end
@ruby_versions
end
# sets up the environment variables for the build process
def setup_language_pack_environment
setup_ruby_install_env
default_config_vars.each do |key, value|
ENV[key] ||= value
end
ENV["GEM_HOME"] = slug_vendor_base
ENV["PATH"] = "#{ruby_install_binstub_path}:#{default_config_vars["PATH"]}"
end
# determines if a build ruby is required
# @return [Boolean] true if a build ruby is required
def build_ruby?
!ruby_version_jruby? && ruby_version != "ruby-1.9.3"
end
# install the vendored ruby
# @note this only installs if we detect RUBY_VERSION in the environment
# @return [Boolean] true if it installs the vendored ruby and false otherwise
def install_ruby
return false unless ruby_version
invalid_ruby_version_message = <<ERROR
Invalid RUBY_VERSION specified: #{ruby_version}
Valid versions: #{ruby_versions.join(", ")}
ERROR
if build_ruby?
FileUtils.mkdir_p(build_ruby_path)
Dir.chdir(build_ruby_path) do
ruby_vm = ruby_version_rbx? ? "rbx" : "ruby"
run("curl #{VENDOR_URL}/#{ruby_version.sub(ruby_vm, "#{ruby_vm}-build")}.tgz -s -o - | tar zxf -")
end
error invalid_ruby_version_message unless $?.success?
end
FileUtils.mkdir_p(slug_vendor_ruby)
Dir.chdir(slug_vendor_ruby) do
run("curl #{VENDOR_URL}/#{ruby_version}.tgz -s -o - | tar zxf -")
end
error invalid_ruby_version_message unless $?.success?
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir["#{slug_vendor_ruby}/bin/*"].each do |bin|
run("ln -s ../#{bin} #{bin_dir}")
end
if !@ruby_version_env_var
topic "Using Ruby version: #{ruby_version}"
else
topic "Using RUBY_VERSION: #{ruby_version}"
puts "WARNING: ENV['RUBY_VERSION'] has been deprecated. Please use Gemfile specification instead."
puts "See https://devcenter.heroku.com/articles/ruby-versions"
end
true
end
# find the ruby install path for its binstubs during build
# @return [String] resulting path or empty string if ruby is not vendored
def ruby_install_binstub_path
if build_ruby?
"#{build_ruby_path}/bin"
elsif ruby_version
"#{slug_vendor_ruby}/bin"
else
""
end
end
# setup the environment so we can use the vendored ruby
def setup_ruby_install_env
ENV["PATH"] = "#{ruby_install_binstub_path}:#{ENV["PATH"]}"
if ruby_version_jruby?
ENV['JAVA_OPTS'] = default_java_opts
end
end
# list of default gems to vendor into the slug
# @return [Array] resluting list of gems
def gems
[BUNDLER_GEM_PATH]
end
# installs vendored gems into the slug
def install_language_pack_gems
FileUtils.mkdir_p(slug_vendor_base)
Dir.chdir(slug_vendor_base) do |dir|
gems.each do |gem|
run("curl #{VENDOR_URL}/#{gem}.tgz -s -o - | tar xzf -")
end
Dir["bin/*"].each {|path| run("chmod 755 #{path}") }
end
end
# default set of binaries to install
# @return [Array] resulting list
def binaries
add_node_js_binary
end
# vendors binaries into the slug
def install_binaries
binaries.each {|binary| install_binary(binary) }
Dir["bin/*"].each {|path| run("chmod +x #{path}") }
end
# vendors individual binary into the slug
# @param [String] name of the binary package from S3.
# Example: https://s3.amazonaws.com/language-pack-ruby/node-0.4.7.tgz, where name is "node-0.4.7"
def install_binary(name)
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir.chdir(bin_dir) do |dir|
run("curl #{VENDOR_URL}/#{name}.tgz -s -o - | tar xzf -")
end
end
# removes a binary from the slug
# @param [String] relative path of the binary on the slug
def uninstall_binary(path)
FileUtils.rm File.join('bin', File.basename(path)), :force => true
end
# install libyaml into the LP to be referenced for psych compilation
# @param [String] tmpdir to store the libyaml files
def install_libyaml(dir)
FileUtils.mkdir_p dir
Dir.chdir(dir) do |dir|
run("curl #{VENDOR_URL}/#{LIBYAML_PATH}.tgz -s -o - | tar xzf -")
end
end
# runs bundler to install the dependencies
def build_bundler
log("bundle") do
bundle_without = ENV["BUNDLE_WITHOUT"] || "development:test"
bundle_command = "bundle install --without #{bundle_without} --path vendor/bundle --binstubs bin/"
unless File.exist?("Gemfile.lock")
error "Gemfile.lock is required. Please run \"bundle install\" locally\nand commit your Gemfile.lock."
end
if has_windows_gemfile_lock?
log("bundle", "has_windows_gemfile_lock")
File.unlink("Gemfile.lock")
else
# using --deployment is preferred if we can
bundle_command += " --deployment"
cache_load ".bundle"
end
cache_load "vendor/bundle"
version = run("env RUBYOPT=\"#{syck_hack}\" bundle version").strip
topic("Installing dependencies using #{version}")
bundler_output = ""
Dir.mktmpdir("libyaml-") do |tmpdir|
libyaml_dir = "#{tmpdir}/#{LIBYAML_PATH}"
install_libyaml(libyaml_dir)
# need to setup compile environment for the psych gem
yaml_include = File.expand_path("#{libyaml_dir}/include")
yaml_lib = File.expand_path("#{libyaml_dir}/lib")
pwd = run("pwd").chomp
# we need to set BUNDLE_CONFIG and BUNDLE_GEMFILE for
# codon since it uses bundler.
env_vars = "env BUNDLE_GEMFILE=#{pwd}/Gemfile BUNDLE_CONFIG=#{pwd}/.bundle/config CPATH=#{yaml_include}:$CPATH CPPATH=#{yaml_include}:$CPPATH LIBRARY_PATH=#{yaml_lib}:$LIBRARY_PATH RUBYOPT=\"#{syck_hack}\""
puts "Running: #{bundle_command}"
bundler_output << pipe("#{env_vars} #{bundle_command} --no-clean 2>&1")
end
if $?.success?
log "bundle", :status => "success"
puts "Cleaning up the bundler cache."
run "bundle clean"
cache_store ".bundle"
cache_store "vendor/bundle"
else
log "bundle", :status => "failure"
error_message = "Failed to install gems via Bundler."
if bundler_output.match(/Installing sqlite3 \([\w.]+\) with native extensions Unfortunately/)
error_message += <<ERROR
Detected sqlite3 gem which is not supported on Heroku.
http://devcenter.heroku.com/articles/how-do-i-use-sqlite3-for-development
ERROR
end
error error_message
end
end
end
# RUBYOPT line that requires syck_hack file
# @return [String] require string if needed or else an empty string
def syck_hack
syck_hack_file = File.expand_path(File.join(File.dirname(__FILE__), "../../vendor/syck_hack"))
ruby_version = run('ruby -e "puts RUBY_VERSION"').chomp
# < 1.9.3 includes syck, so we need to use the syck hack
if Gem::Version.new(ruby_version) < Gem::Version.new("1.9.3")
"-r #{syck_hack_file}"
else
""
end
end
# writes ERB based database.yml for Rails. The database.yml uses the DATABASE_URL from the environment during runtime.
def create_database_yml
log("create_database_yml") do
return unless File.directory?("config")
topic("Writing config/database.yml to read from DATABASE_URL")
File.open("config/database.yml", "w") do |file|
file.puts <<-DATABASE_YML
<%
require 'cgi'
require 'uri'
begin
uri = URI.parse(ENV["DATABASE_URL"])
rescue URI::InvalidURIError
raise "Invalid DATABASE_URL"
end
raise "No RACK_ENV or RAILS_ENV found" unless ENV["RAILS_ENV"] || ENV["RACK_ENV"]
def attribute(name, value, force_string = false)
if value
value_string =
if force_string
'"' + value + '"'
else
value
end
"\#{name}: \#{value_string}"
else
""
end
end
adapter = uri.scheme
adapter = "postgresql" if adapter == "postgres"
database = (uri.path || "").split("/")[1]
username = uri.user
password = uri.password
host = uri.host
port = uri.port
params = CGI.parse(uri.query || "")
%>
<%= ENV["RAILS_ENV"] || ENV["RACK_ENV"] %>:
<%= attribute "adapter", adapter %>
<%= attribute "database", database %>
<%= attribute "username", username %>
<%= attribute "password", password, true %>
<%= attribute "host", host %>
<%= attribute "port", port %>
<% params.each do |key, value| %>
<%= key %>: <%= value.first %>
<% end %>
DATABASE_YML
end
end
end
# add bundler to the load path
# @note it sets a flag, so the path can only be loaded once
def add_bundler_to_load_path
return if @bundler_loadpath
$: << File.expand_path(Dir["#{slug_vendor_base}/gems/bundler*/lib"].first)
@bundler_loadpath = true
end
# detects whether the Gemfile.lock contains the Windows platform
# @return [Boolean] true if the Gemfile.lock was created on Windows
def has_windows_gemfile_lock?
lockfile_parser.platforms.detect do |platform|
/mingw|mswin/.match(platform.os) if platform.is_a?(Gem::Platform)
end
end
# detects if a gem is in the bundle.
# @param [String] name of the gem in question
# @return [String, nil] if it finds the gem, it will return the line from bundle show or nil if nothing is found.
def gem_is_bundled?(gem)
@bundler_gems ||= lockfile_parser.specs.map(&:name)
@bundler_gems.include?(gem)
end
# setup the lockfile parser
# @return [Bundler::LockfileParser] a Bundler::LockfileParser
def lockfile_parser
add_bundler_to_load_path
require "bundler"
@lockfile_parser ||= Bundler::LockfileParser.new(File.read("Gemfile.lock"))
end
# detects if a rake task is defined in the app
# @param [String] the task in question
# @return [Boolean] true if the rake task is defined in the app
def rake_task_defined?(task)
run("env PATH=$PATH bundle exec rake #{task} --dry-run") && $?.success?
end
# executes the block with GIT_DIR environment variable removed since it can mess with the current working directory git thinks it's in
# param [block] block to be executed in the GIT_DIR free context
def allow_git(&blk)
git_dir = ENV.delete("GIT_DIR") # can mess with bundler
blk.call
ENV["GIT_DIR"] = git_dir
end
# decides if we need to enable the shared database addon
# @return [Array] the database addon if the pg gem is detected or an empty Array if it isn't.
def add_shared_database_addon
gem_is_bundled?("pg") ? ['shared-database:5mb'] : []
end
# decides if we need to install the node.js binary
# @note execjs will blow up if no JS RUNTIME is detected and is loaded.
# @return [Array] the node.js binary path if we need it or an empty Array
def add_node_js_binary
gem_is_bundled?('execjs') ? [NODE_JS_BINARY_PATH] : []
end
def run_assets_precompile_rake_task
if rake_task_defined?("assets:precompile")
topic "Running: rake assets:precompile"
pipe("env PATH=$PATH:bin bundle exec rake assets:precompile 2>&1")
end
end
end
Reduce slug size
* Keep the gem cache (including Bundler's git cache) out of the slug.
require "tmpdir"
require "rubygems"
require "language_pack"
require "language_pack/base"
# base Ruby Language Pack. This is for any base ruby app.
class LanguagePack::Ruby < LanguagePack::Base
LIBYAML_VERSION = "0.1.4"
LIBYAML_PATH = "libyaml-#{LIBYAML_VERSION}"
BUNDLER_VERSION = "1.2.0.pre"
BUNDLER_GEM_PATH = "bundler-#{BUNDLER_VERSION}"
NODE_VERSION = "0.4.7"
NODE_JS_BINARY_PATH = "node-#{NODE_VERSION}"
# detects if this is a valid Ruby app
# @return [Boolean] true if it's a Ruby app
def self.use?
File.exist?("Gemfile")
end
def name
"Ruby"
end
def default_addons
add_shared_database_addon
end
def default_config_vars
vars = {
"LANG" => "en_US.UTF-8",
"PATH" => default_path,
"GEM_PATH" => slug_vendor_base,
}
ruby_version_jruby? ? vars.merge("JAVA_OPTS" => default_java_opts) : vars
end
def default_process_types
{
"rake" => "bundle exec rake",
"console" => "bundle exec irb"
}
end
def compile
Dir.chdir(build_path)
install_ruby
setup_language_pack_environment
allow_git do
install_language_pack_gems
build_bundler
create_database_yml
install_binaries
run_assets_precompile_rake_task
end
end
private
# the base PATH environment variable to be used
# @return [String] the resulting PATH
def default_path
"bin:#{slug_vendor_base}/bin:/usr/local/bin:/usr/bin:/bin"
end
# the relative path to the bundler directory of gems
# @return [String] resulting path
def slug_vendor_base
@slug_vendor_base ||= run(%q(ruby -e "require 'rbconfig';puts \"vendor/bundle/#{RUBY_ENGINE}/#{RbConfig::CONFIG['ruby_version']}\"")).chomp
end
# the relative path to the vendored ruby directory
# @return [String] resulting path
def slug_vendor_ruby
"vendor/#{ruby_version}"
end
# the absolute path of the build ruby to use during the buildpack
# @return [String] resulting path
def build_ruby_path
"/tmp/#{ruby_version}"
end
# fetch the ruby version from bundler
# @return [String, nil] returns the ruby version if detected or nil if none is detected
def ruby_version
return @ruby_version if @ruby_version
bootstrap_bundler do |bundler_path|
old_system_path = "/usr/local/bin:/usr/local/sbin:/usr/bin:/bin:/usr/sbin:/sbin"
@ruby_version = run_stdout("env PATH=#{old_system_path}:#{bundler_path}/bin GEM_PATH=#{bundler_path} bundle platform --ruby").chomp
end
if @ruby_version == "No ruby version specified" && ENV['RUBY_VERSION']
# for backwards compatibility.
# this will go away in the future
@ruby_version = ENV['RUBY_VERSION']
@ruby_version_env_var = true
elsif @ruby_version == "No ruby version specified"
@ruby_version = nil
else
@ruby_version = @ruby_version.sub('(', '').sub(')', '').split.join('-')
@ruby_version_env_var = false
end
@ruby_version
end
# bootstraps bundler so we can pull the ruby version
def bootstrap_bundler(&block)
Dir.mktmpdir("bundler-") do |tmpdir|
Dir.chdir(tmpdir) do
run("curl #{VENDOR_URL}/#{BUNDLER_GEM_PATH}.tgz -s -o - | tar xzf -")
end
yield tmpdir
end
end
# determine if we're using rbx
# @return [Boolean] true if we are and false if we aren't
def ruby_version_rbx?
ruby_version ? ruby_version.match(/^rbx-/) : false
end
# determine if we're using jruby
# @return [Boolean] true if we are and false if we aren't
def ruby_version_jruby?
ruby_version ? ruby_version.match(/^jruby-/) : false
end
# default JAVA_OPTS
# return [String] string of JAVA_OPTS
def default_java_opts
"-Xmx384m -Xss512k -XX:+UseCompressedOops -Dfile.encoding=UTF-8"
end
# list the available valid ruby versions
# @note the value is memoized
# @return [Array] list of Strings of the ruby versions available
def ruby_versions
return @ruby_versions if @ruby_versions
Dir.mktmpdir("ruby_versions-") do |tmpdir|
Dir.chdir(tmpdir) do
run("curl -O #{VENDOR_URL}/ruby_versions.yml")
@ruby_versions = YAML::load_file("ruby_versions.yml")
end
end
@ruby_versions
end
# sets up the environment variables for the build process
def setup_language_pack_environment
setup_ruby_install_env
default_config_vars.each do |key, value|
ENV[key] ||= value
end
ENV["GEM_HOME"] = slug_vendor_base
ENV["PATH"] = "#{ruby_install_binstub_path}:#{default_config_vars["PATH"]}"
end
# determines if a build ruby is required
# @return [Boolean] true if a build ruby is required
def build_ruby?
!ruby_version_jruby? && ruby_version != "ruby-1.9.3"
end
# install the vendored ruby
# @note this only installs if we detect RUBY_VERSION in the environment
# @return [Boolean] true if it installs the vendored ruby and false otherwise
def install_ruby
return false unless ruby_version
invalid_ruby_version_message = <<ERROR
Invalid RUBY_VERSION specified: #{ruby_version}
Valid versions: #{ruby_versions.join(", ")}
ERROR
if build_ruby?
FileUtils.mkdir_p(build_ruby_path)
Dir.chdir(build_ruby_path) do
ruby_vm = ruby_version_rbx? ? "rbx" : "ruby"
run("curl #{VENDOR_URL}/#{ruby_version.sub(ruby_vm, "#{ruby_vm}-build")}.tgz -s -o - | tar zxf -")
end
error invalid_ruby_version_message unless $?.success?
end
FileUtils.mkdir_p(slug_vendor_ruby)
Dir.chdir(slug_vendor_ruby) do
run("curl #{VENDOR_URL}/#{ruby_version}.tgz -s -o - | tar zxf -")
end
error invalid_ruby_version_message unless $?.success?
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir["#{slug_vendor_ruby}/bin/*"].each do |bin|
run("ln -s ../#{bin} #{bin_dir}")
end
if !@ruby_version_env_var
topic "Using Ruby version: #{ruby_version}"
else
topic "Using RUBY_VERSION: #{ruby_version}"
puts "WARNING: ENV['RUBY_VERSION'] has been deprecated. Please use Gemfile specification instead."
puts "See https://devcenter.heroku.com/articles/ruby-versions"
end
true
end
# find the ruby install path for its binstubs during build
# @return [String] resulting path or empty string if ruby is not vendored
def ruby_install_binstub_path
if build_ruby?
"#{build_ruby_path}/bin"
elsif ruby_version
"#{slug_vendor_ruby}/bin"
else
""
end
end
# setup the environment so we can use the vendored ruby
def setup_ruby_install_env
ENV["PATH"] = "#{ruby_install_binstub_path}:#{ENV["PATH"]}"
if ruby_version_jruby?
ENV['JAVA_OPTS'] = default_java_opts
end
end
# list of default gems to vendor into the slug
# @return [Array] resluting list of gems
def gems
[BUNDLER_GEM_PATH]
end
# installs vendored gems into the slug
def install_language_pack_gems
FileUtils.mkdir_p(slug_vendor_base)
Dir.chdir(slug_vendor_base) do |dir|
gems.each do |gem|
run("curl #{VENDOR_URL}/#{gem}.tgz -s -o - | tar xzf -")
end
Dir["bin/*"].each {|path| run("chmod 755 #{path}") }
end
end
# default set of binaries to install
# @return [Array] resulting list
def binaries
add_node_js_binary
end
# vendors binaries into the slug
def install_binaries
binaries.each {|binary| install_binary(binary) }
Dir["bin/*"].each {|path| run("chmod +x #{path}") }
end
# vendors individual binary into the slug
# @param [String] name of the binary package from S3.
# Example: https://s3.amazonaws.com/language-pack-ruby/node-0.4.7.tgz, where name is "node-0.4.7"
def install_binary(name)
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir.chdir(bin_dir) do |dir|
run("curl #{VENDOR_URL}/#{name}.tgz -s -o - | tar xzf -")
end
end
# removes a binary from the slug
# @param [String] relative path of the binary on the slug
def uninstall_binary(path)
FileUtils.rm File.join('bin', File.basename(path)), :force => true
end
# install libyaml into the LP to be referenced for psych compilation
# @param [String] tmpdir to store the libyaml files
def install_libyaml(dir)
FileUtils.mkdir_p dir
Dir.chdir(dir) do |dir|
run("curl #{VENDOR_URL}/#{LIBYAML_PATH}.tgz -s -o - | tar xzf -")
end
end
# runs bundler to install the dependencies
def build_bundler
log("bundle") do
bundle_without = ENV["BUNDLE_WITHOUT"] || "development:test"
bundle_command = "bundle install --without #{bundle_without} --path vendor/bundle --binstubs bin/"
unless File.exist?("Gemfile.lock")
error "Gemfile.lock is required. Please run \"bundle install\" locally\nand commit your Gemfile.lock."
end
if has_windows_gemfile_lock?
log("bundle", "has_windows_gemfile_lock")
File.unlink("Gemfile.lock")
else
# using --deployment is preferred if we can
bundle_command += " --deployment"
cache_load ".bundle"
end
cache_load "vendor/bundle"
version = run("env RUBYOPT=\"#{syck_hack}\" bundle version").strip
topic("Installing dependencies using #{version}")
bundler_output = ""
Dir.mktmpdir("libyaml-") do |tmpdir|
libyaml_dir = "#{tmpdir}/#{LIBYAML_PATH}"
install_libyaml(libyaml_dir)
# need to setup compile environment for the psych gem
yaml_include = File.expand_path("#{libyaml_dir}/include")
yaml_lib = File.expand_path("#{libyaml_dir}/lib")
pwd = run("pwd").chomp
# we need to set BUNDLE_CONFIG and BUNDLE_GEMFILE for
# codon since it uses bundler.
env_vars = "env BUNDLE_GEMFILE=#{pwd}/Gemfile BUNDLE_CONFIG=#{pwd}/.bundle/config CPATH=#{yaml_include}:$CPATH CPPATH=#{yaml_include}:$CPPATH LIBRARY_PATH=#{yaml_lib}:$LIBRARY_PATH RUBYOPT=\"#{syck_hack}\""
puts "Running: #{bundle_command}"
bundler_output << pipe("#{env_vars} #{bundle_command} --no-clean 2>&1")
end
if $?.success?
log "bundle", :status => "success"
puts "Cleaning up the bundler cache."
run "bundle clean"
cache_store ".bundle"
cache_store "vendor/bundle"
# Keep gem cache out of the slug
FileUtils.rm_rf("vendor/bundle/ruby/1.9.1/cache")
else
log "bundle", :status => "failure"
error_message = "Failed to install gems via Bundler."
if bundler_output.match(/Installing sqlite3 \([\w.]+\) with native extensions Unfortunately/)
error_message += <<ERROR
Detected sqlite3 gem which is not supported on Heroku.
http://devcenter.heroku.com/articles/how-do-i-use-sqlite3-for-development
ERROR
end
error error_message
end
end
end
# RUBYOPT line that requires syck_hack file
# @return [String] require string if needed or else an empty string
def syck_hack
syck_hack_file = File.expand_path(File.join(File.dirname(__FILE__), "../../vendor/syck_hack"))
ruby_version = run('ruby -e "puts RUBY_VERSION"').chomp
# < 1.9.3 includes syck, so we need to use the syck hack
if Gem::Version.new(ruby_version) < Gem::Version.new("1.9.3")
"-r #{syck_hack_file}"
else
""
end
end
# writes ERB based database.yml for Rails. The database.yml uses the DATABASE_URL from the environment during runtime.
def create_database_yml
log("create_database_yml") do
return unless File.directory?("config")
topic("Writing config/database.yml to read from DATABASE_URL")
File.open("config/database.yml", "w") do |file|
file.puts <<-DATABASE_YML
<%
require 'cgi'
require 'uri'
begin
uri = URI.parse(ENV["DATABASE_URL"])
rescue URI::InvalidURIError
raise "Invalid DATABASE_URL"
end
raise "No RACK_ENV or RAILS_ENV found" unless ENV["RAILS_ENV"] || ENV["RACK_ENV"]
def attribute(name, value, force_string = false)
if value
value_string =
if force_string
'"' + value + '"'
else
value
end
"\#{name}: \#{value_string}"
else
""
end
end
adapter = uri.scheme
adapter = "postgresql" if adapter == "postgres"
database = (uri.path || "").split("/")[1]
username = uri.user
password = uri.password
host = uri.host
port = uri.port
params = CGI.parse(uri.query || "")
%>
<%= ENV["RAILS_ENV"] || ENV["RACK_ENV"] %>:
<%= attribute "adapter", adapter %>
<%= attribute "database", database %>
<%= attribute "username", username %>
<%= attribute "password", password, true %>
<%= attribute "host", host %>
<%= attribute "port", port %>
<% params.each do |key, value| %>
<%= key %>: <%= value.first %>
<% end %>
DATABASE_YML
end
end
end
# add bundler to the load path
# @note it sets a flag, so the path can only be loaded once
def add_bundler_to_load_path
return if @bundler_loadpath
$: << File.expand_path(Dir["#{slug_vendor_base}/gems/bundler*/lib"].first)
@bundler_loadpath = true
end
# detects whether the Gemfile.lock contains the Windows platform
# @return [Boolean] true if the Gemfile.lock was created on Windows
def has_windows_gemfile_lock?
lockfile_parser.platforms.detect do |platform|
/mingw|mswin/.match(platform.os) if platform.is_a?(Gem::Platform)
end
end
# detects if a gem is in the bundle.
# @param [String] name of the gem in question
# @return [String, nil] if it finds the gem, it will return the line from bundle show or nil if nothing is found.
def gem_is_bundled?(gem)
@bundler_gems ||= lockfile_parser.specs.map(&:name)
@bundler_gems.include?(gem)
end
# setup the lockfile parser
# @return [Bundler::LockfileParser] a Bundler::LockfileParser
def lockfile_parser
add_bundler_to_load_path
require "bundler"
@lockfile_parser ||= Bundler::LockfileParser.new(File.read("Gemfile.lock"))
end
# detects if a rake task is defined in the app
# @param [String] the task in question
# @return [Boolean] true if the rake task is defined in the app
def rake_task_defined?(task)
run("env PATH=$PATH bundle exec rake #{task} --dry-run") && $?.success?
end
# executes the block with GIT_DIR environment variable removed since it can mess with the current working directory git thinks it's in
# param [block] block to be executed in the GIT_DIR free context
def allow_git(&blk)
git_dir = ENV.delete("GIT_DIR") # can mess with bundler
blk.call
ENV["GIT_DIR"] = git_dir
end
# decides if we need to enable the shared database addon
# @return [Array] the database addon if the pg gem is detected or an empty Array if it isn't.
def add_shared_database_addon
gem_is_bundled?("pg") ? ['shared-database:5mb'] : []
end
# decides if we need to install the node.js binary
# @note execjs will blow up if no JS RUNTIME is detected and is loaded.
# @return [Array] the node.js binary path if we need it or an empty Array
def add_node_js_binary
gem_is_bundled?('execjs') ? [NODE_JS_BINARY_PATH] : []
end
def run_assets_precompile_rake_task
if rake_task_defined?("assets:precompile")
topic "Running: rake assets:precompile"
pipe("env PATH=$PATH:bin bundle exec rake assets:precompile 2>&1")
end
end
end
|
require 'findit'
require 'findit/feature/flat-data-set'
module FindIt
module Feature
module Travis_CO_TX_US
class VotingPlaceFactory
def self.create(db, election, options = {})
klass = Class.new(AbstractVotingPlace)
klass.instance_variable_set(:@db, db)
klass.instance_variable_set(:@type, :VOTING_PLACE)
klass.instance_variable_set(:@marker, FindIt::MapMarker.new(
"/mapicons/vote_icon.png",
:height => 32, :width => 32))
klass.instance_variable_set(:@marker_shadow, FindIt::MapMarker.new(
"/mapicons/vote_icon_shadow.png",
:height => 32, :width => 59))
klass.set_election(election)
klass
end
end
class AbstractVotingPlace < FindIt::BaseFeature
# class instance variables will be initialized by factory method
@type = nil
@marker = nil
@marker_shadow = nil
@db = nil
@voting_places = nil
def self.set_election(election)
@voting_places = FindIt::Feature::FlatDataSet.load(__FILE__, "voting-places", "Voting_Places_#{election}.csv", :index => :precinct) do |row|
#
# Example Row:
#
# <CSV::Row
# "precinct":"360"
# "name":"Bowie High School"
# "street":"4103 West Slaughter Ln"
# "city":"Austin"
# "state":"TX"
# "geo_longitude":"-97.8573487400007"
# "geo_latitude":"30.1889148140537"
# "geo_accuracy":"house"
# "notes":nil>
#
lng = row["geo_longitude"].to_f
lat = row["geo_latitude"].to_f
pct = row["precinct"].to_i
note = "precinct #{pct}"
note += " - #{row["notes"]}" if row["notes"]
{
:precinct => pct,
:name => row["name"],
:street => row["street"],
:city => row["city"],
:state => row["state"],
:note => note,
:location => FindIt::Location.new(lat, lng, :DEG),
}
end # load_csv_data_set_with_location
end # self.set_election
def self.closest(origin)
sth = @db.execute(%q{SELECT * FROM travis_co_tx_us_voting_districts
WHERE ST_Contains(the_geom, ST_Transform(ST_SetSRID(ST_Point(?, ?), 4326), 3081))},
origin.lng, origin.lat)
ds = sth.fetch_all
sth.finish
case ds.count
when 0
return nil
when 1
rec = ds.first
else
raise "overlapping precincts at location lat=#{lat}, lng=#{lng}"
end
precinct = rec[:p_vtd].to_i
rec = @voting_places[precinct]
return nil unless rec
new(rec[:location],
:title => "Your voting place",
:name => rec[:name],
:address => rec[:street],
:city => rec[:city],
:state => rec[:state],
:note => rec[:note],
:origin => origin
)
end
end
end
end
end
Added note that voting place was from the May 12 election.
require 'findit'
require 'findit/feature/flat-data-set'
module FindIt
module Feature
module Travis_CO_TX_US
class VotingPlaceFactory
def self.create(db, election, options = {})
klass = Class.new(AbstractVotingPlace)
klass.instance_variable_set(:@db, db)
klass.instance_variable_set(:@type, :VOTING_PLACE)
klass.instance_variable_set(:@marker, FindIt::MapMarker.new(
"/mapicons/vote_icon.png",
:height => 32, :width => 32))
klass.instance_variable_set(:@marker_shadow, FindIt::MapMarker.new(
"/mapicons/vote_icon_shadow.png",
:height => 32, :width => 59))
klass.set_election(election)
klass
end
end
class AbstractVotingPlace < FindIt::BaseFeature
# class instance variables will be initialized by factory method
@type = nil
@marker = nil
@marker_shadow = nil
@db = nil
@voting_places = nil
def self.set_election(election)
@voting_places = FindIt::Feature::FlatDataSet.load(__FILE__, "voting-places", "Voting_Places_#{election}.csv", :index => :precinct) do |row|
#
# Example Row:
#
# <CSV::Row
# "precinct":"360"
# "name":"Bowie High School"
# "street":"4103 West Slaughter Ln"
# "city":"Austin"
# "state":"TX"
# "geo_longitude":"-97.8573487400007"
# "geo_latitude":"30.1889148140537"
# "geo_accuracy":"house"
# "notes":nil>
#
lng = row["geo_longitude"].to_f
lat = row["geo_latitude"].to_f
pct = row["precinct"].to_i
# FIXME - need a more general note
note = "IMPORTANT: This location was for the May 12, 2012 election.\nIt may change in future elections.\n"
note += "precinct #{pct}"
note += " - #{row["notes"]}" if row["notes"]
{
:precinct => pct,
:name => row["name"],
:street => row["street"],
:city => row["city"],
:state => row["state"],
:note => note,
:location => FindIt::Location.new(lat, lng, :DEG),
}
end # load_csv_data_set_with_location
end # self.set_election
def self.closest(origin)
sth = @db.execute(%q{SELECT * FROM travis_co_tx_us_voting_districts
WHERE ST_Contains(the_geom, ST_Transform(ST_SetSRID(ST_Point(?, ?), 4326), 3081))},
origin.lng, origin.lat)
ds = sth.fetch_all
sth.finish
case ds.count
when 0
return nil
when 1
rec = ds.first
else
raise "overlapping precincts at location lat=#{lat}, lng=#{lng}"
end
precinct = rec[:p_vtd].to_i
rec = @voting_places[precinct]
return nil unless rec
new(rec[:location],
:title => "Your voting place",
:name => rec[:name],
:address => rec[:street],
:city => rec[:city],
:state => rec[:state],
:note => rec[:note],
:origin => origin
)
end
end
end
end
end |
require "tmpdir"
require "rubygems"
require "language_pack"
require "language_pack/base"
require "language_pack/bundler_lockfile"
# base Ruby Language Pack. This is for any base ruby app.
class LanguagePack::Ruby < LanguagePack::Base
include LanguagePack::BundlerLockfile
extend LanguagePack::BundlerLockfile
BUILDPACK_VERSION = "v61"
LIBYAML_VERSION = "0.1.4"
LIBYAML_PATH = "libyaml-#{LIBYAML_VERSION}"
BUNDLER_VERSION = "1.3.2"
BUNDLER_GEM_PATH = "bundler-#{BUNDLER_VERSION}"
NODE_VERSION = "0.4.7"
NODE_JS_BINARY_PATH = "node-#{NODE_VERSION}"
JVM_BASE_URL = "http://heroku-jdk.s3.amazonaws.com"
JVM_VERSION = "openjdk7-latest"
SQLITE_VERSION = "3071700"
SQLITE_PATH = "sqlite-autoconf-#{SQLITE_VERSION}"
MY_VENDOR_URL = "https://s3-eu-west-1.amazonaws.com/prod-audi-leasing-us"
# detects if this is a valid Ruby app
# @return [Boolean] true if it's a Ruby app
def self.use?
File.exist?("Gemfile")
end
def self.lockfile_parser
require "bundler"
Bundler::LockfileParser.new(File.read("Gemfile.lock"))
end
def self.gem_version(name)
gem_version = nil
bootstrap_bundler do |bundler_path|
$: << "#{bundler_path}/gems/bundler-#{LanguagePack::Ruby::BUNDLER_VERSION}/lib"
gem = lockfile_parser.specs.detect {|gem| gem.name == name }
gem_version = gem.version if gem
end
gem_version
end
def name
"Ruby"
end
def default_addons
add_dev_database_addon
end
def default_config_vars
vars = {
"LANG" => "en_US.UTF-8",
"PATH" => default_path,
"GEM_PATH" => slug_vendor_base,
}
ruby_version_jruby? ? vars.merge({
"JAVA_OPTS" => default_java_opts,
"JRUBY_OPTS" => default_jruby_opts,
"JAVA_TOOL_OPTIONS" => default_java_tool_options
}) : vars
end
def default_process_types
{
"rake" => "bundle exec rake",
"console" => "bundle exec irb"
}
end
def compile
Dir.chdir(build_path)
remove_vendor_bundle
install_ruby
install_jvm
setup_language_pack_environment
setup_profiled
allow_git do
install_language_pack_gems
build_bundler
create_database_yml
install_binaries
run_assets_precompile_rake_task
end
end
private
# the base PATH environment variable to be used
# @return [String] the resulting PATH
def default_path
"bin:#{slug_vendor_base}/bin:/usr/local/bin:/usr/bin:/bin"
end
# the relative path to the bundler directory of gems
# @return [String] resulting path
def slug_vendor_base
if @slug_vendor_base
@slug_vendor_base
elsif @ruby_version == "ruby-1.8.7"
@slug_vendor_base = "vendor/bundle/1.8"
else
@slug_vendor_base = run(%q(ruby -e "require 'rbconfig';puts \"vendor/bundle/#{RUBY_ENGINE}/#{RbConfig::CONFIG['ruby_version']}\"")).chomp
end
end
# the relative path to the vendored ruby directory
# @return [String] resulting path
def slug_vendor_ruby
"vendor/#{ruby_version}"
end
# the relative path to the vendored jvm
# @return [String] resulting path
def slug_vendor_jvm
"vendor/jvm"
end
# the absolute path of the build ruby to use during the buildpack
# @return [String] resulting path
def build_ruby_path
"/tmp/#{ruby_version}"
end
# fetch the ruby version from bundler
# @return [String, nil] returns the ruby version if detected or nil if none is detected
def ruby_version
return @ruby_version if @ruby_version_run
@ruby_version_run = true
bootstrap_bundler do |bundler_path|
old_system_path = "/usr/local/bin:/usr/local/sbin:/usr/bin:/bin:/usr/sbin:/sbin"
@ruby_version = run_stdout("env PATH=#{old_system_path}:#{bundler_path}/bin GEM_PATH=#{bundler_path} bundle platform --ruby").chomp
end
if @ruby_version == "No ruby version specified" && ENV['RUBY_VERSION']
# for backwards compatibility.
# this will go away in the future
@ruby_version = ENV['RUBY_VERSION']
@ruby_version_env_var = true
elsif @ruby_version == "No ruby version specified"
@ruby_version = nil
else
@ruby_version = @ruby_version.sub('(', '').sub(')', '').split.join('-')
@ruby_version_env_var = false
end
@ruby_version
end
# determine if we're using rbx
# @return [Boolean] true if we are and false if we aren't
def ruby_version_rbx?
ruby_version ? ruby_version.match(/rbx-/) : false
end
# determine if we're using jruby
# @return [Boolean] true if we are and false if we aren't
def ruby_version_jruby?
@ruby_version_jruby ||= ruby_version ? ruby_version.match(/jruby-/) : false
end
# default JAVA_OPTS
# return [String] string of JAVA_OPTS
def default_java_opts
"-Xmx384m -Xss512k -XX:+UseCompressedOops -Dfile.encoding=UTF-8"
end
# default JRUBY_OPTS
# return [String] string of JRUBY_OPTS
def default_jruby_opts
"-Xcompile.invokedynamic=true"
end
# default JAVA_TOOL_OPTIONS
# return [String] string of JAVA_TOOL_OPTIONS
def default_java_tool_options
"-Djava.rmi.server.useCodebaseOnly=true"
end
# list the available valid ruby versions
# @note the value is memoized
# @return [Array] list of Strings of the ruby versions available
def ruby_versions
return @ruby_versions if @ruby_versions
Dir.mktmpdir("ruby_versions-") do |tmpdir|
Dir.chdir(tmpdir) do
run("curl -O #{VENDOR_URL}/ruby_versions.yml")
@ruby_versions = YAML::load_file("ruby_versions.yml")
end
end
@ruby_versions
end
# sets up the environment variables for the build process
def setup_language_pack_environment
setup_ruby_install_env
config_vars = default_config_vars.each do |key, value|
ENV[key] ||= value
end
ENV["GEM_HOME"] = slug_vendor_base
ENV["PATH"] = "#{ruby_install_binstub_path}:#{config_vars["PATH"]}"
end
# sets up the profile.d script for this buildpack
def setup_profiled
set_env_override "GEM_PATH", "$HOME/#{slug_vendor_base}:$GEM_PATH"
set_env_default "LANG", "en_US.UTF-8"
set_env_override "PATH", "$HOME/bin:$HOME/#{slug_vendor_base}/bin:$PATH"
if ruby_version_jruby?
set_env_default "JAVA_OPTS", default_java_opts
set_env_default "JRUBY_OPTS", default_jruby_opts
set_env_default "JAVA_TOOL_OPTIONS", default_java_tool_options
end
end
# determines if a build ruby is required
# @return [Boolean] true if a build ruby is required
def build_ruby?
@build_ruby ||= !ruby_version_rbx? && !ruby_version_jruby? && !%w{ruby-1.9.3 ruby-2.0.0}.include?(ruby_version)
end
# install the vendored ruby
# @return [Boolean] true if it installs the vendored ruby and false otherwise
def install_ruby
return false unless ruby_version
invalid_ruby_version_message = <<ERROR
Invalid RUBY_VERSION specified: #{ruby_version}
Valid versions: #{ruby_versions.join(", ")}
ERROR
if build_ruby?
FileUtils.mkdir_p(build_ruby_path)
Dir.chdir(build_ruby_path) do
ruby_vm = ruby_version_rbx? ? "rbx" : "ruby"
run("curl #{VENDOR_URL}/#{ruby_version.sub(ruby_vm, "#{ruby_vm}-build")}.tgz -s -o - | tar zxf -")
end
error invalid_ruby_version_message unless $?.success?
end
FileUtils.mkdir_p(slug_vendor_ruby)
Dir.chdir(slug_vendor_ruby) do
run("curl #{VENDOR_URL}/#{ruby_version}.tgz -s -o - | tar zxf -")
end
error invalid_ruby_version_message unless $?.success?
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir["#{slug_vendor_ruby}/bin/*"].each do |bin|
run("ln -s ../#{bin} #{bin_dir}")
end
if !@ruby_version_env_var
topic "Using Ruby version: #{ruby_version}"
else
topic "Using RUBY_VERSION: #{ruby_version}"
puts "WARNING: RUBY_VERSION support has been deprecated and will be removed entirely on August 1, 2012."
puts "See https://devcenter.heroku.com/articles/ruby-versions#selecting_a_version_of_ruby for more information."
end
true
end
# vendors JVM into the slug for JRuby
def install_jvm
if ruby_version_jruby?
topic "Installing JVM: #{JVM_VERSION}"
FileUtils.mkdir_p(slug_vendor_jvm)
Dir.chdir(slug_vendor_jvm) do
run("curl #{JVM_BASE_URL}/#{JVM_VERSION}.tar.gz -s -o - | tar xzf -")
end
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir["#{slug_vendor_jvm}/bin/*"].each do |bin|
run("ln -s ../#{bin} #{bin_dir}")
end
end
end
# find the ruby install path for its binstubs during build
# @return [String] resulting path or empty string if ruby is not vendored
def ruby_install_binstub_path
@ruby_install_binstub_path ||=
if build_ruby?
"#{build_ruby_path}/bin"
elsif ruby_version
"#{slug_vendor_ruby}/bin"
else
""
end
end
# setup the environment so we can use the vendored ruby
def setup_ruby_install_env
ENV["PATH"] = "#{ruby_install_binstub_path}:#{ENV["PATH"]}"
if ruby_version_jruby?
ENV['JAVA_OPTS'] = default_java_opts
end
end
# list of default gems to vendor into the slug
# @return [Array] resulting list of gems
def gems
[BUNDLER_GEM_PATH]
end
# installs vendored gems into the slug
def install_language_pack_gems
FileUtils.mkdir_p(slug_vendor_base)
Dir.chdir(slug_vendor_base) do |dir|
gems.each do |gem|
run("curl #{VENDOR_URL}/#{gem}.tgz -s -o - | tar xzf -")
end
Dir["bin/*"].each {|path| run("chmod 755 #{path}") }
end
end
# default set of binaries to install
# @return [Array] resulting list
def binaries
add_node_js_binary
end
# vendors binaries into the slug
def install_binaries
binaries.each {|binary| install_binary(binary) }
Dir["bin/*"].each {|path| run("chmod +x #{path}") }
end
# vendors individual binary into the slug
# @param [String] name of the binary package from S3.
# Example: https://s3.amazonaws.com/language-pack-ruby/node-0.4.7.tgz, where name is "node-0.4.7"
def install_binary(name)
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir.chdir(bin_dir) do |dir|
run("curl #{VENDOR_URL}/#{name}.tgz -s -o - | tar xzf -")
end
end
# removes a binary from the slug
# @param [String] relative path of the binary on the slug
def uninstall_binary(path)
FileUtils.rm File.join('bin', File.basename(path)), :force => true
end
# install libyaml into the LP to be referenced for psych compilation
# @param [String] tmpdir to store the libyaml files
def install_libyaml(dir)
FileUtils.mkdir_p dir
Dir.chdir(dir) do |dir|
run("curl #{VENDOR_URL}/#{LIBYAML_PATH}.tgz -s -o - | tar xzf -")
end
end
# install sqlite into the LP to be referenced for psych compilation
# @param [String] tmpdir to store the sqlite files
def install_sqlite(dir)
FileUtils.mkdir_p dir
Dir.chdir(dir) do |dir|
run("curl #{MY_VENDOR_URL}/#{SQLITE_PATH}.tar.gz -s -o - | tar xzf - 2>&1")
end
end
# remove `vendor/bundle` that comes from the git repo
# in case there are native ext.
# users should be using `bundle pack` instead.
# https://github.com/heroku/heroku-buildpack-ruby/issues/21
def remove_vendor_bundle
if File.exists?("vendor/bundle")
topic "WARNING: Removing `vendor/bundle`."
puts "Checking in `vendor/bundle` is not supported. Please remove this directory"
puts "and add it to your .gitignore. To vendor your gems with Bundler, use"
puts "`bundle pack` instead."
FileUtils.rm_rf("vendor/bundle")
end
end
# runs bundler to install the dependencies
def build_bundler
log("bundle") do
bundle_without = ENV["BUNDLE_WITHOUT"] || "development:test"
bundle_command = "bundle install --without #{bundle_without} --path vendor/bundle --binstubs vendor/bundle/bin"
unless File.exist?("Gemfile.lock")
error "Gemfile.lock is required. Please run \"bundle install\" locally\nand commit your Gemfile.lock."
end
if has_windows_gemfile_lock?
topic "WARNING: Removing `Gemfile.lock` because it was generated on Windows."
puts "Bundler will do a full resolve so native gems are handled properly."
puts "This may result in unexpected gem versions being used in your app."
log("bundle", "has_windows_gemfile_lock")
File.unlink("Gemfile.lock")
else
# using --deployment is preferred if we can
bundle_command += " --deployment"
cache_load ".bundle"
end
version = run_stdout("bundle version").strip
topic("Installing dependencies using #{version}")
load_bundler_cache
bundler_output = ""
Dir.mktmpdir("yamltag-") do |tmpdir|
libyaml_dir = "#{tmpdir}/#{LIBYAML_PATH}"
sqlite_dir = "#{tmpdir}/#{SQLITE_PATH}"
puts "Installing libyaml to #{libyaml_dir}"
install_libyaml(libyaml_dir)
puts "Installing SQLite to #{sqlite_dir}"
install_sqlite(sqlite_dir)
# need to setup compile environment for the psych gem
yaml_include = File.expand_path("#{libyaml_dir}/include")
yaml_lib = File.expand_path("#{libyaml_dir}/lib")
sqlite_include = File.expand_path("#{sqlite_dir}/include")
sqlite_lib = File.expand_path("#{sqlite_dir}/lib")
pwd = run("pwd").chomp
bundler_path = "#{pwd}/#{slug_vendor_base}/gems/#{BUNDLER_GEM_PATH}/lib"
# we need to set BUNDLE_CONFIG and BUNDLE_GEMFILE for
# codon since it uses bundler.
env_vars = "env BUNDLE_GEMFILE=#{pwd}/Gemfile BUNDLE_CONFIG=#{pwd}/.bundle/config CPATH=#{yaml_include}:#{sqlite_include}:$CPATH CPPATH=#{yaml_include}:#{sqlite_include}:$CPPATH LIBRARY_PATH=#{yaml_lib}:#{sqlite_lib}:$LIBRARY_PATH RUBYOPT=\"#{syck_hack}\""
env_vars += " BUNDLER_LIB_PATH=#{bundler_path}" if ruby_version == "ruby-1.8.7"
sqlite_command = "gem install sqlite3 -- --with-sqlite3-dir=#{sqlite_dir}/#{SQLITE_PATH}/"
puts "Running: #{sqlite_command}"
bundler_output << pipe(sqlite_command)
puts "Running: #{bundle_command}"
bundler_output << pipe("#{env_vars} #{bundle_command} --no-clean 2>&1")
end
if $?.success?
log "bundle", :status => "success"
puts "Cleaning up the bundler cache."
pipe "bundle clean 2> /dev/null"
cache_store ".bundle"
cache_store "vendor/bundle"
# Keep gem cache out of the slug
FileUtils.rm_rf("#{slug_vendor_base}/cache")
# symlink binstubs
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir["#{slug_vendor_base}/bin/*"].each do |bin|
run("ln -s ../#{bin} #{bin_dir}") unless File.exist?("#{bin_dir}/#{bin}")
end
else
log "bundle", :status => "failure"
error_message = "Failed to install gems via Bundler."
# if bundler_output.match(/Installing sqlite3 \([\w.]+\) with native extensions\s+Gem::Installer::ExtensionBuildError: ERROR: Failed to build gem native extension./)
# error_message += <<ERROR
#
#
#Detected sqlite3 gem which is not supported on Heroku.
#http://devcenter.heroku.com/articles/how-do-i-use-sqlite3-for-development
#ERROR
# end
error error_message
end
end
end
# RUBYOPT line that requires syck_hack file
# @return [String] require string if needed or else an empty string
def syck_hack
syck_hack_file = File.expand_path(File.join(File.dirname(__FILE__), "../../vendor/syck_hack"))
ruby_version = run_stdout('ruby -e "puts RUBY_VERSION"').chomp
# < 1.9.3 includes syck, so we need to use the syck hack
if Gem::Version.new(ruby_version) < Gem::Version.new("1.9.3")
"-r#{syck_hack_file}"
else
""
end
end
# writes ERB based database.yml for Rails. The database.yml uses the DATABASE_URL from the environment during runtime.
def create_database_yml
log("create_database_yml") do
return unless File.directory?("config")
topic("Writing config/database.yml to read from DATABASE_URL")
File.open("config/database.yml", "w") do |file|
file.puts <<-DATABASE_YML
<%
require 'cgi'
require 'uri'
begin
uri = URI.parse(ENV["DATABASE_URL"])
rescue URI::InvalidURIError
raise "Invalid DATABASE_URL"
end
raise "No RACK_ENV or RAILS_ENV found" unless ENV["RAILS_ENV"] || ENV["RACK_ENV"]
def attribute(name, value, force_string = false)
if value
value_string =
if force_string
'"' + value + '"'
else
value
end
"\#{name}: \#{value_string}"
else
""
end
end
adapter = uri.scheme
adapter = "postgresql" if adapter == "postgres"
database = (uri.path || "").split("/")[1]
username = uri.user
password = uri.password
host = uri.host
port = uri.port
params = CGI.parse(uri.query || "")
%>
<%= ENV["RAILS_ENV"] || ENV["RACK_ENV"] %>:
<%= attribute "adapter", adapter %>
<%= attribute "database", database %>
<%= attribute "username", username %>
<%= attribute "password", password, true %>
<%= attribute "host", host %>
<%= attribute "port", port %>
<% params.each do |key, value| %>
<%= key %>: <%= value.first %>
<% end %>
DATABASE_YML
end
end
end
# add bundler to the load path
# @note it sets a flag, so the path can only be loaded once
def add_bundler_to_load_path
return if @bundler_loadpath
$: << File.expand_path(Dir["#{slug_vendor_base}/gems/bundler*/lib"].first)
@bundler_loadpath = true
end
# detects whether the Gemfile.lock contains the Windows platform
# @return [Boolean] true if the Gemfile.lock was created on Windows
def has_windows_gemfile_lock?
lockfile_parser.platforms.detect do |platform|
/mingw|mswin/.match(platform.os) if platform.is_a?(Gem::Platform)
end
end
# detects if a gem is in the bundle.
# @param [String] name of the gem in question
# @return [String, nil] if it finds the gem, it will return the line from bundle show or nil if nothing is found.
def gem_is_bundled?(gem)
@bundler_gems ||= lockfile_parser.specs.map(&:name)
@bundler_gems.include?(gem)
end
# setup the lockfile parser
# @return [Bundler::LockfileParser] a Bundler::LockfileParser
def lockfile_parser
add_bundler_to_load_path
@lockfile_parser ||= LanguagePack::Ruby.lockfile_parser
end
# detects if a rake task is defined in the app
# @param [String] the task in question
# @return [Boolean] true if the rake task is defined in the app
def rake_task_defined?(task)
run("env PATH=$PATH bundle exec rake #{task} --dry-run") && $?.success?
end
# executes the block with GIT_DIR environment variable removed since it can mess with the current working directory git thinks it's in
# @param [block] block to be executed in the GIT_DIR free context
def allow_git(&blk)
git_dir = ENV.delete("GIT_DIR") # can mess with bundler
blk.call
ENV["GIT_DIR"] = git_dir
end
# decides if we need to enable the dev database addon
# @return [Array] the database addon if the pg gem is detected or an empty Array if it isn't.
def add_dev_database_addon
gem_is_bundled?("pg") ? ['heroku-postgresql:dev'] : []
end
# decides if we need to install the node.js binary
# @note execjs will blow up if no JS RUNTIME is detected and is loaded.
# @return [Array] the node.js binary path if we need it or an empty Array
def add_node_js_binary
gem_is_bundled?('execjs') ? [NODE_JS_BINARY_PATH] : []
end
def run_assets_precompile_rake_task
if rake_task_defined?("assets:precompile")
require 'benchmark'
topic "Running: rake assets:precompile"
time = Benchmark.realtime { pipe("env PATH=$PATH:bin bundle exec rake assets:precompile 2>&1") }
if $?.success?
puts "Asset precompilation completed (#{"%.2f" % time}s)"
end
end
end
def bundler_cache
"vendor/bundle"
end
def load_bundler_cache
cache_load "vendor"
full_ruby_version = run_stdout(%q(ruby -v)).chomp
rubygems_version = run_stdout(%q(gem -v)).chomp
heroku_metadata = "vendor/heroku"
old_rubygems_version = nil
ruby_version_cache = "#{heroku_metadata}/ruby_version"
buildpack_version_cache = "#{heroku_metadata}/buildpack_version"
bundler_version_cache = "#{heroku_metadata}/bundler_version"
rubygems_version_cache = "#{heroku_metadata}/rubygems_version"
old_rubygems_version = File.read(rubygems_version_cache).chomp if File.exists?(rubygems_version_cache)
# fix bug from v37 deploy
if File.exists?("vendor/ruby_version")
puts "Broken cache detected. Purging build cache."
cache_clear("vendor")
FileUtils.rm_rf("vendor/ruby_version")
purge_bundler_cache
# fix bug introduced in v38
elsif !File.exists?(buildpack_version_cache) && File.exists?(ruby_version_cache)
puts "Broken cache detected. Purging build cache."
purge_bundler_cache
elsif cache_exists?(bundler_cache) && File.exists?(ruby_version_cache) && full_ruby_version != File.read(ruby_version_cache).chomp
puts "Ruby version change detected. Clearing bundler cache."
puts "Old: #{File.read(ruby_version_cache).chomp}"
puts "New: #{full_ruby_version}"
purge_bundler_cache
end
# fix git gemspec bug from Bundler 1.3.0+ upgrade
if File.exists?(bundler_cache) && !File.exists?(bundler_version_cache) && !run("find vendor/bundle/*/*/bundler/gems/*/ -name *.gemspec").include?("No such file or directory")
puts "Old bundler cache detected. Clearing bundler cache."
purge_bundler_cache
end
# fix for https://github.com/heroku/heroku-buildpack-ruby/issues/86
if (!File.exists?(rubygems_version_cache) ||
(old_rubygems_version == "2.0.0" && old_rubygems_version != rubygems_version)) &&
File.exists?(ruby_version_cache) && File.read(ruby_version_cache).chomp.include?("ruby 2.0.0p0")
puts "Updating to rubygems #{rubygems_version}. Clearing bundler cache."
purge_bundler_cache
end
FileUtils.mkdir_p(heroku_metadata)
File.open(ruby_version_cache, 'w') do |file|
file.puts full_ruby_version
end
File.open(buildpack_version_cache, 'w') do |file|
file.puts BUILDPACK_VERSION
end
File.open(bundler_version_cache, 'w') do |file|
file.puts BUNDLER_VERSION
end
File.open(rubygems_version_cache, 'w') do |file|
file.puts rubygems_version
end
cache_store heroku_metadata
end
def purge_bundler_cache
FileUtils.rm_rf(bundler_cache)
cache_clear bundler_cache
# need to reinstall language pack gems
install_language_pack_gems
end
end
[TEST] file exist sqlite3.h
require "tmpdir"
require "rubygems"
require "language_pack"
require "language_pack/base"
require "language_pack/bundler_lockfile"
# base Ruby Language Pack. This is for any base ruby app.
class LanguagePack::Ruby < LanguagePack::Base
include LanguagePack::BundlerLockfile
extend LanguagePack::BundlerLockfile
BUILDPACK_VERSION = "v61"
LIBYAML_VERSION = "0.1.4"
LIBYAML_PATH = "libyaml-#{LIBYAML_VERSION}"
BUNDLER_VERSION = "1.3.2"
BUNDLER_GEM_PATH = "bundler-#{BUNDLER_VERSION}"
NODE_VERSION = "0.4.7"
NODE_JS_BINARY_PATH = "node-#{NODE_VERSION}"
JVM_BASE_URL = "http://heroku-jdk.s3.amazonaws.com"
JVM_VERSION = "openjdk7-latest"
SQLITE_VERSION = "3071700"
SQLITE_PATH = "sqlite-autoconf-#{SQLITE_VERSION}"
MY_VENDOR_URL = "https://s3-eu-west-1.amazonaws.com/prod-audi-leasing-us"
# detects if this is a valid Ruby app
# @return [Boolean] true if it's a Ruby app
def self.use?
File.exist?("Gemfile")
end
def self.lockfile_parser
require "bundler"
Bundler::LockfileParser.new(File.read("Gemfile.lock"))
end
def self.gem_version(name)
gem_version = nil
bootstrap_bundler do |bundler_path|
$: << "#{bundler_path}/gems/bundler-#{LanguagePack::Ruby::BUNDLER_VERSION}/lib"
gem = lockfile_parser.specs.detect { |gem| gem.name == name }
gem_version = gem.version if gem
end
gem_version
end
def name
"Ruby"
end
def default_addons
add_dev_database_addon
end
def default_config_vars
vars = {
"LANG" => "en_US.UTF-8",
"PATH" => default_path,
"GEM_PATH" => slug_vendor_base,
}
ruby_version_jruby? ? vars.merge({
"JAVA_OPTS" => default_java_opts,
"JRUBY_OPTS" => default_jruby_opts,
"JAVA_TOOL_OPTIONS" => default_java_tool_options
}) : vars
end
def default_process_types
{
"rake" => "bundle exec rake",
"console" => "bundle exec irb"
}
end
def compile
Dir.chdir(build_path)
remove_vendor_bundle
install_ruby
install_jvm
setup_language_pack_environment
setup_profiled
allow_git do
install_language_pack_gems
build_bundler
create_database_yml
install_binaries
run_assets_precompile_rake_task
end
end
private
# the base PATH environment variable to be used
# @return [String] the resulting PATH
def default_path
"bin:#{slug_vendor_base}/bin:/usr/local/bin:/usr/bin:/bin"
end
# the relative path to the bundler directory of gems
# @return [String] resulting path
def slug_vendor_base
if @slug_vendor_base
@slug_vendor_base
elsif @ruby_version == "ruby-1.8.7"
@slug_vendor_base = "vendor/bundle/1.8"
else
@slug_vendor_base = run(%q(ruby -e "require 'rbconfig';puts \"vendor/bundle/#{RUBY_ENGINE}/#{RbConfig::CONFIG['ruby_version']}\"")).chomp
end
end
# the relative path to the vendored ruby directory
# @return [String] resulting path
def slug_vendor_ruby
"vendor/#{ruby_version}"
end
# the relative path to the vendored jvm
# @return [String] resulting path
def slug_vendor_jvm
"vendor/jvm"
end
# the absolute path of the build ruby to use during the buildpack
# @return [String] resulting path
def build_ruby_path
"/tmp/#{ruby_version}"
end
# fetch the ruby version from bundler
# @return [String, nil] returns the ruby version if detected or nil if none is detected
def ruby_version
return @ruby_version if @ruby_version_run
@ruby_version_run = true
bootstrap_bundler do |bundler_path|
old_system_path = "/usr/local/bin:/usr/local/sbin:/usr/bin:/bin:/usr/sbin:/sbin"
@ruby_version = run_stdout("env PATH=#{old_system_path}:#{bundler_path}/bin GEM_PATH=#{bundler_path} bundle platform --ruby").chomp
end
if @ruby_version == "No ruby version specified" && ENV['RUBY_VERSION']
# for backwards compatibility.
# this will go away in the future
@ruby_version = ENV['RUBY_VERSION']
@ruby_version_env_var = true
elsif @ruby_version == "No ruby version specified"
@ruby_version = nil
else
@ruby_version = @ruby_version.sub('(', '').sub(')', '').split.join('-')
@ruby_version_env_var = false
end
@ruby_version
end
# determine if we're using rbx
# @return [Boolean] true if we are and false if we aren't
def ruby_version_rbx?
ruby_version ? ruby_version.match(/rbx-/) : false
end
# determine if we're using jruby
# @return [Boolean] true if we are and false if we aren't
def ruby_version_jruby?
@ruby_version_jruby ||= ruby_version ? ruby_version.match(/jruby-/) : false
end
# default JAVA_OPTS
# return [String] string of JAVA_OPTS
def default_java_opts
"-Xmx384m -Xss512k -XX:+UseCompressedOops -Dfile.encoding=UTF-8"
end
# default JRUBY_OPTS
# return [String] string of JRUBY_OPTS
def default_jruby_opts
"-Xcompile.invokedynamic=true"
end
# default JAVA_TOOL_OPTIONS
# return [String] string of JAVA_TOOL_OPTIONS
def default_java_tool_options
"-Djava.rmi.server.useCodebaseOnly=true"
end
# list the available valid ruby versions
# @note the value is memoized
# @return [Array] list of Strings of the ruby versions available
def ruby_versions
return @ruby_versions if @ruby_versions
Dir.mktmpdir("ruby_versions-") do |tmpdir|
Dir.chdir(tmpdir) do
run("curl -O #{VENDOR_URL}/ruby_versions.yml")
@ruby_versions = YAML::load_file("ruby_versions.yml")
end
end
@ruby_versions
end
# sets up the environment variables for the build process
def setup_language_pack_environment
setup_ruby_install_env
config_vars = default_config_vars.each do |key, value|
ENV[key] ||= value
end
ENV["GEM_HOME"] = slug_vendor_base
ENV["PATH"] = "#{ruby_install_binstub_path}:#{config_vars["PATH"]}"
end
# sets up the profile.d script for this buildpack
def setup_profiled
set_env_override "GEM_PATH", "$HOME/#{slug_vendor_base}:$GEM_PATH"
set_env_default "LANG", "en_US.UTF-8"
set_env_override "PATH", "$HOME/bin:$HOME/#{slug_vendor_base}/bin:$PATH"
if ruby_version_jruby?
set_env_default "JAVA_OPTS", default_java_opts
set_env_default "JRUBY_OPTS", default_jruby_opts
set_env_default "JAVA_TOOL_OPTIONS", default_java_tool_options
end
end
# determines if a build ruby is required
# @return [Boolean] true if a build ruby is required
def build_ruby?
@build_ruby ||= !ruby_version_rbx? && !ruby_version_jruby? && !%w{ruby-1.9.3 ruby-2.0.0}.include?(ruby_version)
end
# install the vendored ruby
# @return [Boolean] true if it installs the vendored ruby and false otherwise
def install_ruby
return false unless ruby_version
invalid_ruby_version_message = <<ERROR
Invalid RUBY_VERSION specified: #{ruby_version}
Valid versions: #{ruby_versions.join(", ")}
ERROR
if build_ruby?
FileUtils.mkdir_p(build_ruby_path)
Dir.chdir(build_ruby_path) do
ruby_vm = ruby_version_rbx? ? "rbx" : "ruby"
run("curl #{VENDOR_URL}/#{ruby_version.sub(ruby_vm, "#{ruby_vm}-build")}.tgz -s -o - | tar zxf -")
end
error invalid_ruby_version_message unless $?.success?
end
FileUtils.mkdir_p(slug_vendor_ruby)
Dir.chdir(slug_vendor_ruby) do
run("curl #{VENDOR_URL}/#{ruby_version}.tgz -s -o - | tar zxf -")
end
error invalid_ruby_version_message unless $?.success?
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir["#{slug_vendor_ruby}/bin/*"].each do |bin|
run("ln -s ../#{bin} #{bin_dir}")
end
if !@ruby_version_env_var
topic "Using Ruby version: #{ruby_version}"
else
topic "Using RUBY_VERSION: #{ruby_version}"
puts "WARNING: RUBY_VERSION support has been deprecated and will be removed entirely on August 1, 2012."
puts "See https://devcenter.heroku.com/articles/ruby-versions#selecting_a_version_of_ruby for more information."
end
true
end
# vendors JVM into the slug for JRuby
def install_jvm
if ruby_version_jruby?
topic "Installing JVM: #{JVM_VERSION}"
FileUtils.mkdir_p(slug_vendor_jvm)
Dir.chdir(slug_vendor_jvm) do
run("curl #{JVM_BASE_URL}/#{JVM_VERSION}.tar.gz -s -o - | tar xzf -")
end
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir["#{slug_vendor_jvm}/bin/*"].each do |bin|
run("ln -s ../#{bin} #{bin_dir}")
end
end
end
# find the ruby install path for its binstubs during build
# @return [String] resulting path or empty string if ruby is not vendored
def ruby_install_binstub_path
@ruby_install_binstub_path ||=
if build_ruby?
"#{build_ruby_path}/bin"
elsif ruby_version
"#{slug_vendor_ruby}/bin"
else
""
end
end
# setup the environment so we can use the vendored ruby
def setup_ruby_install_env
ENV["PATH"] = "#{ruby_install_binstub_path}:#{ENV["PATH"]}"
if ruby_version_jruby?
ENV['JAVA_OPTS'] = default_java_opts
end
end
# list of default gems to vendor into the slug
# @return [Array] resulting list of gems
def gems
[BUNDLER_GEM_PATH]
end
# installs vendored gems into the slug
def install_language_pack_gems
FileUtils.mkdir_p(slug_vendor_base)
Dir.chdir(slug_vendor_base) do |dir|
gems.each do |gem|
run("curl #{VENDOR_URL}/#{gem}.tgz -s -o - | tar xzf -")
end
Dir["bin/*"].each { |path| run("chmod 755 #{path}") }
end
end
# default set of binaries to install
# @return [Array] resulting list
def binaries
add_node_js_binary
end
# vendors binaries into the slug
def install_binaries
binaries.each { |binary| install_binary(binary) }
Dir["bin/*"].each { |path| run("chmod +x #{path}") }
end
# vendors individual binary into the slug
# @param [String] name of the binary package from S3.
# Example: https://s3.amazonaws.com/language-pack-ruby/node-0.4.7.tgz, where name is "node-0.4.7"
def install_binary(name)
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir.chdir(bin_dir) do |dir|
run("curl #{VENDOR_URL}/#{name}.tgz -s -o - | tar xzf -")
end
end
# removes a binary from the slug
# @param [String] relative path of the binary on the slug
def uninstall_binary(path)
FileUtils.rm File.join('bin', File.basename(path)), :force => true
end
# install libyaml into the LP to be referenced for psych compilation
# @param [String] tmpdir to store the libyaml files
def install_libyaml(dir)
FileUtils.mkdir_p dir
Dir.chdir(dir) do |dir|
run("curl #{VENDOR_URL}/#{LIBYAML_PATH}.tgz -s -o - | tar xzf -")
end
end
# install sqlite into the LP to be referenced for psych compilation
# @param [String] tmpdir to store the sqlite files
def install_sqlite(dir)
FileUtils.mkdir_p dir
Dir.chdir(dir) do |dir|
puts "curl #{MY_VENDOR_URL}/#{SQLITE_PATH}.tar.gz -s -o - | tar xzf - 2>&1"
run("curl #{MY_VENDOR_URL}/#{SQLITE_PATH}.tar.gz -s -o - | tar xzf - 2>&1")
end
end
# remove `vendor/bundle` that comes from the git repo
# in case there are native ext.
# users should be using `bundle pack` instead.
# https://github.com/heroku/heroku-buildpack-ruby/issues/21
def remove_vendor_bundle
if File.exists?("vendor/bundle")
topic "WARNING: Removing `vendor/bundle`."
puts "Checking in `vendor/bundle` is not supported. Please remove this directory"
puts "and add it to your .gitignore. To vendor your gems with Bundler, use"
puts "`bundle pack` instead."
FileUtils.rm_rf("vendor/bundle")
end
end
# runs bundler to install the dependencies
def build_bundler
log("bundle") do
bundle_without = ENV["BUNDLE_WITHOUT"] || "development:test"
bundle_command = "bundle install --without #{bundle_without} --path vendor/bundle --binstubs vendor/bundle/bin"
unless File.exist?("Gemfile.lock")
error "Gemfile.lock is required. Please run \"bundle install\" locally\nand commit your Gemfile.lock."
end
if has_windows_gemfile_lock?
topic "WARNING: Removing `Gemfile.lock` because it was generated on Windows."
puts "Bundler will do a full resolve so native gems are handled properly."
puts "This may result in unexpected gem versions being used in your app."
log("bundle", "has_windows_gemfile_lock")
File.unlink("Gemfile.lock")
else
# using --deployment is preferred if we can
bundle_command += " --deployment"
cache_load ".bundle"
end
version = run_stdout("bundle version").strip
topic("Installing dependencies using #{version}")
load_bundler_cache
bundler_output = ""
Dir.mktmpdir("yamltag-") do |tmpdir|
libyaml_dir = "#{tmpdir}/#{LIBYAML_PATH}"
sqlite_dir = "#{tmpdir}/#{SQLITE_PATH}"
puts "Installing libyaml to #{libyaml_dir}"
install_libyaml(libyaml_dir)
puts "Installing SQLite to #{sqlite_dir}"
install_sqlite(sqlite_dir)
if File.exist? "#{sqlite_dir}/#{SQLITE_PATH}/"
puts "folder exists"
else
puts "folder missing"
end
if File.exist? "#{sqlite_dir}/#{SQLITE_PATH}/sqlite3.h"
puts "sqlite3.h exists"
else
puts "sqlite3.h missing"
end
# need to setup compile environment for the psych gem
yaml_include = File.expand_path("#{libyaml_dir}/include")
yaml_lib = File.expand_path("#{libyaml_dir}/lib")
sqlite_include = File.expand_path("#{sqlite_dir}/include")
sqlite_lib = File.expand_path("#{sqlite_dir}/lib")
pwd = run("pwd").chomp
bundler_path = "#{pwd}/#{slug_vendor_base}/gems/#{BUNDLER_GEM_PATH}/lib"
# we need to set BUNDLE_CONFIG and BUNDLE_GEMFILE for
# codon since it uses bundler.
env_vars = "env BUNDLE_GEMFILE=#{pwd}/Gemfile BUNDLE_CONFIG=#{pwd}/.bundle/config CPATH=#{yaml_include}:#{sqlite_include}:$CPATH CPPATH=#{yaml_include}:#{sqlite_include}:$CPPATH LIBRARY_PATH=#{yaml_lib}:#{sqlite_lib}:$LIBRARY_PATH RUBYOPT=\"#{syck_hack}\""
env_vars += " BUNDLER_LIB_PATH=#{bundler_path}" if ruby_version == "ruby-1.8.7"
sqlite_command = "gem install sqlite3 -- --with-sqlite3-dir=#{sqlite_dir}/#{SQLITE_PATH}/"
puts "Running: #{sqlite_command}"
bundler_output << pipe(sqlite_command)
puts "Running: #{bundle_command}"
bundler_output << pipe("#{env_vars} #{bundle_command} --no-clean 2>&1")
end
if $?.success?
log "bundle", :status => "success"
puts "Cleaning up the bundler cache."
pipe "bundle clean 2> /dev/null"
cache_store ".bundle"
cache_store "vendor/bundle"
# Keep gem cache out of the slug
FileUtils.rm_rf("#{slug_vendor_base}/cache")
# symlink binstubs
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir["#{slug_vendor_base}/bin/*"].each do |bin|
run("ln -s ../#{bin} #{bin_dir}") unless File.exist?("#{bin_dir}/#{bin}")
end
else
log "bundle", :status => "failure"
error_message = "Failed to install gems via Bundler."
# if bundler_output.match(/Installing sqlite3 \([\w.]+\) with native extensions\s+Gem::Installer::ExtensionBuildError: ERROR: Failed to build gem native extension./)
# error_message += <<ERROR
#
#
#Detected sqlite3 gem which is not supported on Heroku.
#http://devcenter.heroku.com/articles/how-do-i-use-sqlite3-for-development
#ERROR
# end
error error_message
end
end
end
# RUBYOPT line that requires syck_hack file
# @return [String] require string if needed or else an empty string
def syck_hack
syck_hack_file = File.expand_path(File.join(File.dirname(__FILE__), "../../vendor/syck_hack"))
ruby_version = run_stdout('ruby -e "puts RUBY_VERSION"').chomp
# < 1.9.3 includes syck, so we need to use the syck hack
if Gem::Version.new(ruby_version) < Gem::Version.new("1.9.3")
"-r#{syck_hack_file}"
else
""
end
end
# writes ERB based database.yml for Rails. The database.yml uses the DATABASE_URL from the environment during runtime.
def create_database_yml
log("create_database_yml") do
return unless File.directory?("config")
topic("Writing config/database.yml to read from DATABASE_URL")
File.open("config/database.yml", "w") do |file|
file.puts <<-DATABASE_YML
<%
require 'cgi'
require 'uri'
begin
uri = URI.parse(ENV["DATABASE_URL"])
rescue URI::InvalidURIError
raise "Invalid DATABASE_URL"
end
raise "No RACK_ENV or RAILS_ENV found" unless ENV["RAILS_ENV"] || ENV["RACK_ENV"]
def attribute(name, value, force_string = false)
if value
value_string =
if force_string
'"' + value + '"'
else
value
end
"\#{name}: \#{value_string}"
else
""
end
end
adapter = uri.scheme
adapter = "postgresql" if adapter == "postgres"
database = (uri.path || "").split("/")[1]
username = uri.user
password = uri.password
host = uri.host
port = uri.port
params = CGI.parse(uri.query || "")
%>
<%= ENV["RAILS_ENV"] || ENV["RACK_ENV"] %>:
<%= attribute "adapter", adapter %>
<%= attribute "database", database %>
<%= attribute "username", username %>
<%= attribute "password", password, true %>
<%= attribute "host", host %>
<%= attribute "port", port %>
<% params.each do |key, value| %>
<%= key %>: <%= value.first %>
<% end %>
DATABASE_YML
end
end
end
# add bundler to the load path
# @note it sets a flag, so the path can only be loaded once
def add_bundler_to_load_path
return if @bundler_loadpath
$: << File.expand_path(Dir["#{slug_vendor_base}/gems/bundler*/lib"].first)
@bundler_loadpath = true
end
# detects whether the Gemfile.lock contains the Windows platform
# @return [Boolean] true if the Gemfile.lock was created on Windows
def has_windows_gemfile_lock?
lockfile_parser.platforms.detect do |platform|
/mingw|mswin/.match(platform.os) if platform.is_a?(Gem::Platform)
end
end
# detects if a gem is in the bundle.
# @param [String] name of the gem in question
# @return [String, nil] if it finds the gem, it will return the line from bundle show or nil if nothing is found.
def gem_is_bundled?(gem)
@bundler_gems ||= lockfile_parser.specs.map(&:name)
@bundler_gems.include?(gem)
end
# setup the lockfile parser
# @return [Bundler::LockfileParser] a Bundler::LockfileParser
def lockfile_parser
add_bundler_to_load_path
@lockfile_parser ||= LanguagePack::Ruby.lockfile_parser
end
# detects if a rake task is defined in the app
# @param [String] the task in question
# @return [Boolean] true if the rake task is defined in the app
def rake_task_defined?(task)
run("env PATH=$PATH bundle exec rake #{task} --dry-run") && $?.success?
end
# executes the block with GIT_DIR environment variable removed since it can mess with the current working directory git thinks it's in
# @param [block] block to be executed in the GIT_DIR free context
def allow_git(&blk)
git_dir = ENV.delete("GIT_DIR") # can mess with bundler
blk.call
ENV["GIT_DIR"] = git_dir
end
# decides if we need to enable the dev database addon
# @return [Array] the database addon if the pg gem is detected or an empty Array if it isn't.
def add_dev_database_addon
gem_is_bundled?("pg") ? ['heroku-postgresql:dev'] : []
end
# decides if we need to install the node.js binary
# @note execjs will blow up if no JS RUNTIME is detected and is loaded.
# @return [Array] the node.js binary path if we need it or an empty Array
def add_node_js_binary
gem_is_bundled?('execjs') ? [NODE_JS_BINARY_PATH] : []
end
def run_assets_precompile_rake_task
if rake_task_defined?("assets:precompile")
require 'benchmark'
topic "Running: rake assets:precompile"
time = Benchmark.realtime { pipe("env PATH=$PATH:bin bundle exec rake assets:precompile 2>&1") }
if $?.success?
puts "Asset precompilation completed (#{"%.2f" % time}s)"
end
end
end
def bundler_cache
"vendor/bundle"
end
def load_bundler_cache
cache_load "vendor"
full_ruby_version = run_stdout(%q(ruby -v)).chomp
rubygems_version = run_stdout(%q(gem -v)).chomp
heroku_metadata = "vendor/heroku"
old_rubygems_version = nil
ruby_version_cache = "#{heroku_metadata}/ruby_version"
buildpack_version_cache = "#{heroku_metadata}/buildpack_version"
bundler_version_cache = "#{heroku_metadata}/bundler_version"
rubygems_version_cache = "#{heroku_metadata}/rubygems_version"
old_rubygems_version = File.read(rubygems_version_cache).chomp if File.exists?(rubygems_version_cache)
# fix bug from v37 deploy
if File.exists?("vendor/ruby_version")
puts "Broken cache detected. Purging build cache."
cache_clear("vendor")
FileUtils.rm_rf("vendor/ruby_version")
purge_bundler_cache
# fix bug introduced in v38
elsif !File.exists?(buildpack_version_cache) && File.exists?(ruby_version_cache)
puts "Broken cache detected. Purging build cache."
purge_bundler_cache
elsif cache_exists?(bundler_cache) && File.exists?(ruby_version_cache) && full_ruby_version != File.read(ruby_version_cache).chomp
puts "Ruby version change detected. Clearing bundler cache."
puts "Old: #{File.read(ruby_version_cache).chomp}"
puts "New: #{full_ruby_version}"
purge_bundler_cache
end
# fix git gemspec bug from Bundler 1.3.0+ upgrade
if File.exists?(bundler_cache) && !File.exists?(bundler_version_cache) && !run("find vendor/bundle/*/*/bundler/gems/*/ -name *.gemspec").include?("No such file or directory")
puts "Old bundler cache detected. Clearing bundler cache."
purge_bundler_cache
end
# fix for https://github.com/heroku/heroku-buildpack-ruby/issues/86
if (!File.exists?(rubygems_version_cache) ||
(old_rubygems_version == "2.0.0" && old_rubygems_version != rubygems_version)) &&
File.exists?(ruby_version_cache) && File.read(ruby_version_cache).chomp.include?("ruby 2.0.0p0")
puts "Updating to rubygems #{rubygems_version}. Clearing bundler cache."
purge_bundler_cache
end
FileUtils.mkdir_p(heroku_metadata)
File.open(ruby_version_cache, 'w') do |file|
file.puts full_ruby_version
end
File.open(buildpack_version_cache, 'w') do |file|
file.puts BUILDPACK_VERSION
end
File.open(bundler_version_cache, 'w') do |file|
file.puts BUNDLER_VERSION
end
File.open(rubygems_version_cache, 'w') do |file|
file.puts rubygems_version
end
cache_store heroku_metadata
end
def purge_bundler_cache
FileUtils.rm_rf(bundler_cache)
cache_clear bundler_cache
# need to reinstall language pack gems
install_language_pack_gems
end
end
|
# -*- encoding: utf-8 -*-
require 'rubygems'
require 'fluent/plugin/out_elasticsearch'
require 'aws-sdk'
require 'faraday_middleware/aws_signers_v4'
module Fluent
class AwsElasticsearchServiceOutput < ElasticsearchOutput
Plugin.register_output('aws-elasticsearch-service', self)
config_section :endpoint do
config_param :region, :string
config_param :url, :string
config_param :access_key_id, :string, :default => ""
config_param :secret_access_key, :string, :default => ""
end
#
# @override
#
def get_connection_options
raise "`endpoint` require." if @endpoint.empty?
hosts =
begin
@endpoint.map do |ep|
uri = URI(ep[:url])
host = %w(user password path).inject(host: uri.host, port: uri.port, scheme: uri.scheme) do |hash, key|
hash[key.to_sym] = uri.public_send(key) unless uri.public_send(key).nil? || uri.public_send(key) == ''
hash
end
host[:aws_elasticsearch_service] = {
:credentials => credentials(ep[:access_key_id], ep[:secret_access_key]),
:region => ep[:region]
}
host
end
end
{
hosts: hosts
}
end
private
#
# get AWS Credentials
#
def credentials(access_key, secret_key)
@credentials ||= {}
@credentials[access_key] ||= {}
@credentials[access_key][secret_key] ||=
begin
credentials = nil
if access_key.empty? or secret_key.empty?
credentials = Aws::InstanceProfileCredentials.new.credentials
credentials ||= Aws::SharedCredentials.new.credentials
end
credentials ||= Aws::Credentials.new access_key, secret_key
credentials
end
end
end
#
# monkey patch
#
class ElasticsearchOutput
module Elasticsearch
module Client
include ::Elasticsearch::Client
extend self
end
module Transport
module Transport
module HTTP
class Faraday < ::Elasticsearch::Transport::Transport::HTTP::Faraday
alias :__build_connections_origin_from_aws_elasticsearch_service_output :__build_connections
# Builds and returns a collection of connections.
#
# @return [Connections::Collection]
# @override
#
def __build_connections
::Elasticsearch::Transport::Transport::Connections::Collection.new(
:connections => hosts.map { |host|
host[:protocol] = host[:scheme] || DEFAULT_PROTOCOL
host[:port] ||= DEFAULT_PORT
url = __full_url(host)
::Elasticsearch::Transport::Transport::Connections::Connection.new(
:host => host,
:connection => ::Faraday::Connection.new(
url,
(options[:transport_options] || {}),
&__aws_elasticsearch_service_setting(host, &@block)
),
:options => host[:connection_options]
)
},
:selector_class => options[:selector_class],
:selector => options[:selector]
)
end
def __aws_elasticsearch_service_setting(host, &block)
lambda do |faraday|
if host[:aws_elasticsearch_service]
faraday.request :aws_signers_v4,
credentials: host[:aws_elasticsearch_service][:credentials],
service_name: 'es',
region: host[:aws_elasticsearch_service][:region]
end
block.call faraday
end
end
end
end
end
end
end
end
end
quit memorize of "credentials".
# -*- encoding: utf-8 -*-
require 'rubygems'
require 'fluent/plugin/out_elasticsearch'
require 'aws-sdk'
require 'faraday_middleware/aws_signers_v4'
module Fluent
class AwsElasticsearchServiceOutput < ElasticsearchOutput
Plugin.register_output('aws-elasticsearch-service', self)
config_section :endpoint do
config_param :region, :string
config_param :url, :string
config_param :access_key_id, :string, :default => ""
config_param :secret_access_key, :string, :default => ""
end
#
# @override
#
def get_connection_options
raise "`endpoint` require." if @endpoint.empty?
hosts =
begin
@endpoint.map do |ep|
uri = URI(ep[:url])
host = %w(user password path).inject(host: uri.host, port: uri.port, scheme: uri.scheme) do |hash, key|
hash[key.to_sym] = uri.public_send(key) unless uri.public_send(key).nil? || uri.public_send(key) == ''
hash
end
host[:aws_elasticsearch_service] = {
:credentials => credentials(ep[:access_key_id], ep[:secret_access_key]),
:region => ep[:region]
}
host
end
end
{
hosts: hosts
}
end
private
#
# get AWS Credentials
#
def credentials(access_key, secret_key)
lambda do
credentials = nil
if access_key.empty? or secret_key.empty?
credentials = Aws::InstanceProfileCredentials.new.credentials
credentials ||= Aws::SharedCredentials.new.credentials
end
credentials ||= Aws::Credentials.new access_key, secret_key
credentials
end
end
end
#
# monkey patch
#
class ElasticsearchOutput
module Elasticsearch
module Client
include ::Elasticsearch::Client
extend self
end
module Transport
module Transport
module HTTP
class Faraday < ::Elasticsearch::Transport::Transport::HTTP::Faraday
alias :__build_connections_origin_from_aws_elasticsearch_service_output :__build_connections
# Builds and returns a collection of connections.
#
# @return [Connections::Collection]
# @override
#
def __build_connections
::Elasticsearch::Transport::Transport::Connections::Collection.new(
:connections => hosts.map { |host|
host[:protocol] = host[:scheme] || DEFAULT_PROTOCOL
host[:port] ||= DEFAULT_PORT
url = __full_url(host)
::Elasticsearch::Transport::Transport::Connections::Connection.new(
:host => host,
:connection => ::Faraday::Connection.new(
url,
(options[:transport_options] || {}),
&__aws_elasticsearch_service_setting(host, &@block)
),
:options => host[:connection_options]
)
},
:selector_class => options[:selector_class],
:selector => options[:selector]
)
end
def __aws_elasticsearch_service_setting(host, &block)
lambda do |faraday|
if host[:aws_elasticsearch_service]
faraday.request :aws_signers_v4,
credentials: host[:aws_elasticsearch_service][:credentials].call,
service_name: 'es',
region: host[:aws_elasticsearch_service][:region]
end
block.call faraday
end
end
end
end
end
end
end
end
end
|
require "tmpdir"
require "digest/md5"
require "benchmark"
require "rubygems"
require "language_pack"
require "language_pack/base"
require "language_pack/ruby_version"
require "language_pack/helpers/nodebin"
require "language_pack/helpers/node_installer"
require "language_pack/helpers/yarn_installer"
require "language_pack/helpers/jvm_installer"
require "language_pack/version"
# base Ruby Language Pack. This is for any base ruby app.
class LanguagePack::Ruby < LanguagePack::Base
NAME = "ruby"
LIBYAML_VERSION = "0.1.7"
LIBYAML_PATH = "libyaml-#{LIBYAML_VERSION}"
BUNDLER_VERSION = "1.15.2"
BUNDLER_GEM_PATH = "bundler-#{BUNDLER_VERSION}"
RBX_BASE_URL = "http://binaries.rubini.us/heroku"
NODE_BP_PATH = "vendor/node/bin"
# detects if this is a valid Ruby app
# @return [Boolean] true if it's a Ruby app
def self.use?
instrument "ruby.use" do
File.exist?("Gemfile")
end
end
def self.bundler
@@bundler ||= LanguagePack::Helpers::BundlerWrapper.new.install
end
def bundler
self.class.bundler
end
def initialize(build_path, cache_path=nil)
super(build_path, cache_path)
@fetchers[:mri] = LanguagePack::Fetcher.new(VENDOR_URL, @stack)
@fetchers[:rbx] = LanguagePack::Fetcher.new(RBX_BASE_URL, @stack)
@node_installer = LanguagePack::Helpers::NodeInstaller.new
@yarn_installer = LanguagePack::Helpers::YarnInstaller.new
@jvm_installer = LanguagePack::Helpers::JvmInstaller.new(slug_vendor_jvm, @stack)
end
def name
"Ruby"
end
def default_addons
instrument "ruby.default_addons" do
add_dev_database_addon
end
end
def default_config_vars
instrument "ruby.default_config_vars" do
vars = {
"LANG" => env("LANG") || "en_US.UTF-8"
}
ruby_version.jruby? ? vars.merge({
"JAVA_OPTS" => default_java_opts,
"JRUBY_OPTS" => default_jruby_opts
}) : vars
end
end
def default_process_types
instrument "ruby.default_process_types" do
{
"rake" => "bundle exec rake",
"console" => "bundle exec irb"
}
end
end
def best_practice_warnings
## TODO No asset sync on Scalingo?
# if bundler.has_gem?("asset_sync")
# warn(<<-WARNING)
# You are using the `asset_sync` gem.
# This is not recommended.
# See https://devcenter.heroku.com/articles/please-do-not-use-asset-sync for more information.
# WARNING
# end
end
def compile
instrument 'ruby.compile' do
# check for new app at the beginning of the compile
new_app?
Dir.chdir(build_path)
remove_vendor_bundle
warn_bundler_upgrade
install_ruby
install_jvm
setup_language_pack_environment
setup_export
setup_profiled
allow_git do
install_bundler_in_app
build_bundler("development:test")
post_bundler
create_database_yml
install_binaries
run_assets_precompile_rake_task
end
best_practice_warnings
super
end
end
private
def warn_bundler_upgrade
old_bundler_version = @metadata.read("bundler_version").chomp if @metadata.exists?("bundler_version")
if old_bundler_version && old_bundler_version != BUNDLER_VERSION
puts(<<-WARNING)
Your app was upgraded to bundler #{ BUNDLER_VERSION }.
Previously you had a successful deploy with bundler #{ old_bundler_version }.
If you see problems related to the bundler version please refer to:
http://doc.scalingo.com/languages/ruby/bundle-configuration
WARNING
end
end
# the base PATH environment variable to be used
# @return [String] the resulting PATH
def default_path
# need to remove bin/ folder since it links
# to the wrong --prefix ruby binstubs
# breaking require. This only applies to Ruby 1.9.2 and 1.8.7.
safe_binstubs = binstubs_relative_paths - ["bin"]
paths = [
ENV["PATH"],
"bin",
system_paths,
]
paths.unshift("#{slug_vendor_jvm}/bin") if ruby_version.jruby?
paths.unshift(safe_binstubs)
paths.join(":")
end
def binstubs_relative_paths
[
"bin",
bundler_binstubs_path,
"#{slug_vendor_base}/bin"
]
end
def system_paths
"/usr/local/bin:/usr/bin:/bin"
end
def self.slug_vendor_base
command = %q(ruby -e "require 'rbconfig';puts \"vendor/bundle/#{RUBY_ENGINE}/#{RbConfig::CONFIG['ruby_version']}\"")
slug_vendor_base = run_no_pipe(command, user_env: true).chomp
error "Problem detecting bundler vendor directory: #{@slug_vendor_base}" unless $?.success?
return slug_vendor_base
end
# the relative path to the bundler directory of gems
# @return [String] resulting path
def slug_vendor_base
instrument 'ruby.slug_vendor_base' do
@slug_vendor_base ||= self.class.slug_vendor_base
end
end
# the relative path to the vendored ruby directory
# @return [String] resulting path
def slug_vendor_ruby
"vendor/#{ruby_version.version_without_patchlevel}"
end
# the relative path to the vendored jvm
# @return [String] resulting path
def slug_vendor_jvm
"vendor/jvm"
end
# the absolute path of the build ruby to use during the buildpack
# @return [String] resulting path
def build_ruby_path
"/tmp/#{ruby_version.version_without_patchlevel}"
end
# fetch the ruby version from bundler
# @return [String, nil] returns the ruby version if detected or nil if none is detected
def ruby_version
instrument 'ruby.ruby_version' do
return @ruby_version if @ruby_version
new_app = !File.exist?("vendor/heroku")
last_version_file = "buildpack_ruby_version"
last_version = nil
last_version = @metadata.read(last_version_file).chomp if @metadata.exists?(last_version_file)
@ruby_version = LanguagePack::RubyVersion.new(bundler.ruby_version,
is_new: new_app,
last_version: last_version)
return @ruby_version
end
end
# default JAVA_OPTS
# return [String] string of JAVA_OPTS
def default_java_opts
"-Xss512k -XX:+UseCompressedOops -Dfile.encoding=UTF-8"
end
def set_jvm_max_heap
<<-EOF
case $(ulimit -u) in
256) # 1X Dyno
JVM_MAX_HEAP=384
;;
512) # 2X Dyno
JVM_MAX_HEAP=768
;;
16384) # IX Dyno
JVM_MAX_HEAP=2048
;;
32768) # PX Dyno
JVM_MAX_HEAP=5120
;;
esac
EOF
end
def set_java_mem
<<-EOF
if ! [[ "${JAVA_OPTS}" == *-Xmx* ]]; then
export JAVA_MEM=${JAVA_MEM:--Xmx${JVM_MAX_HEAP:-384}m}
fi
EOF
end
def set_default_web_concurrency
<<-EOF
case $(ulimit -u) in
256)
export HEROKU_RAM_LIMIT_MB=${HEROKU_RAM_LIMIT_MB:-512}
export WEB_CONCURRENCY=${WEB_CONCURRENCY:-2}
;;
512)
export HEROKU_RAM_LIMIT_MB=${HEROKU_RAM_LIMIT_MB:-1024}
export WEB_CONCURRENCY=${WEB_CONCURRENCY:-4}
;;
16384)
export HEROKU_RAM_LIMIT_MB=${HEROKU_RAM_LIMIT_MB:-2560}
export WEB_CONCURRENCY=${WEB_CONCURRENCY:-8}
;;
32768)
export HEROKU_RAM_LIMIT_MB=${HEROKU_RAM_LIMIT_MB:-6144}
export WEB_CONCURRENCY=${WEB_CONCURRENCY:-16}
;;
*)
;;
esac
EOF
end
# default JRUBY_OPTS
# return [String] string of JRUBY_OPTS
def default_jruby_opts
"-Xcompile.invokedynamic=false"
end
# default Java Xmx
# return [String] string of Java Xmx
def default_java_mem
"-Xmx${JVM_MAX_HEAP:-384}m"
end
# sets up the environment variables for the build process
def setup_language_pack_environment
instrument 'ruby.setup_language_pack_environment' do
if ruby_version.jruby?
ENV["PATH"] += ":bin"
ENV["JAVA_MEM"] = run(<<-SHELL).chomp
#{set_jvm_max_heap}
echo #{default_java_mem}
SHELL
ENV["JRUBY_OPTS"] = env('JRUBY_BUILD_OPTS') || env('JRUBY_OPTS')
ENV["JAVA_HOME"] = @jvm_installer.java_home
end
setup_ruby_install_env
ENV["PATH"] += ":#{node_preinstall_bin_path}" if node_js_installed?
ENV["PATH"] += ":#{yarn_preinstall_bin_path}" if !yarn_not_preinstalled?
# TODO when buildpack-env-args rolls out, we can get rid of
# ||= and the manual setting below
default_config_vars.each do |key, value|
ENV[key] ||= value
end
ENV["GEM_PATH"] = slug_vendor_base
ENV["GEM_HOME"] = slug_vendor_base
ENV["PATH"] = default_path
end
end
# Sets up the environment variables for subsequent processes run by
# muiltibuildpack. We can't use profile.d because $HOME isn't set up
def setup_export
instrument 'ruby.setup_export' do
paths = ENV["PATH"].split(":")
set_export_override "GEM_PATH", "#{build_path}/#{slug_vendor_base}:$GEM_PATH"
set_export_default "LANG", "en_US.UTF-8"
set_export_override "PATH", paths.map { |path| /^\/.*/ !~ path ? "#{build_path}/#{path}" : path }.join(":")
if ruby_version.jruby?
add_to_export set_jvm_max_heap
add_to_export set_java_mem
set_export_default "JAVA_OPTS", default_java_opts
set_export_default "JRUBY_OPTS", default_jruby_opts
end
end
end
# sets up the profile.d script for this buildpack
def setup_profiled
instrument 'setup_profiled' do
profiled_path = [binstubs_relative_paths.map {|path| "$HOME/#{path}" }.join(":")]
profiled_path << "vendor/#{@yarn_installer.binary_path}" if has_yarn_binary?
profiled_path << "$PATH"
set_env_default "LANG", "en_US.UTF-8"
set_env_override "GEM_PATH", "$HOME/#{slug_vendor_base}:$GEM_PATH"
set_env_override "PATH", profiled_path.join(":")
add_to_profiled set_default_web_concurrency if env("SENSIBLE_DEFAULTS")
if ruby_version.jruby?
add_to_profiled set_jvm_max_heap
add_to_profiled set_java_mem
set_env_default "JAVA_OPTS", default_java_opts
set_env_default "JRUBY_OPTS", default_jruby_opts
end
end
end
# install the vendored ruby
# @return [Boolean] true if it installs the vendored ruby and false otherwise
def install_ruby
instrument 'ruby.install_ruby' do
return false unless ruby_version
installer = LanguagePack::Installers::RubyInstaller.installer(ruby_version).new(@stack)
if ruby_version.build?
installer.fetch_unpack(ruby_version, build_ruby_path, true)
end
installer.install(ruby_version, slug_vendor_ruby)
@metadata.write("buildpack_ruby_version", ruby_version.version_for_download)
topic "Using Ruby version: #{ruby_version.version_for_download}"
if !ruby_version.set
warn(<<-WARNING)
You have not declared a Ruby version in your Gemfile.
To set your Ruby version add this line to your Gemfile:
#{ruby_version.to_gemfile}
# See http://doc.scalingo.com/languages/ruby for more information.
WARNING
end
end
true
rescue LanguagePack::Fetcher::FetchError => error
message = <<ERROR
An error occurred while installing #{ruby_version.version_for_download}
Heroku recommends you use the latest supported Ruby version listed here:
http://doc.scalingo.com/languages/ruby#list-of-the-compatible-runtimes
For more information on syntax for declaring a Ruby version see:
http://doc.scalingo.com/languages/ruby#declaring-version
ERROR
if ruby_version.jruby?
message << "Note: Only JRuby 1.7.13 and newer are supported on Cedar-14"
end
message << "\nDebug Information"
message << error.message
error message
end
def new_app?
@new_app ||= !File.exist?("vendor/heroku")
end
# vendors JVM into the slug for JRuby
def install_jvm(forced = false)
instrument 'ruby.install_jvm' do
if ruby_version.jruby? || forced
@jvm_installer.install(ruby_version.engine_version, forced)
end
end
end
# find the ruby install path for its binstubs during build
# @return [String] resulting path or empty string if ruby is not vendored
def ruby_install_binstub_path
@ruby_install_binstub_path ||=
if ruby_version.build?
"#{build_ruby_path}/bin"
elsif ruby_version
"#{slug_vendor_ruby}/bin"
else
""
end
end
# setup the environment so we can use the vendored ruby
def setup_ruby_install_env
instrument 'ruby.setup_ruby_install_env' do
ENV["PATH"] = "#{ruby_install_binstub_path}:#{ENV["PATH"]}"
if ruby_version.jruby?
ENV['JAVA_OPTS'] = default_java_opts
end
end
end
# installs vendored gems into the slug
def install_bundler_in_app
instrument 'ruby.install_language_pack_gems' do
FileUtils.mkdir_p(slug_vendor_base)
Dir.chdir(slug_vendor_base) do |dir|
`cp -R #{bundler.bundler_path}/. .`
end
# write bundler shim, so we can control the version bundler used
# Ruby 2.6.0 started vendoring bundler
write_bundler_shim("vendor/bundle/bin") if ruby_version.vendored_bundler?
end
end
# default set of binaries to install
# @return [Array] resulting list
def binaries
add_node_js_binary + add_yarn_binary
end
# vendors binaries into the slug
def install_binaries
instrument 'ruby.install_binaries' do
binaries.each {|binary| install_binary(binary) }
Dir["bin/*"].each {|path| run("chmod +x #{path}") }
end
end
# vendors individual binary into the slug
# @param [String] name of the binary package from S3.
# Example: https://s3.amazonaws.com/language-pack-ruby/node-0.4.7.tgz, where name is "node-0.4.7"
def install_binary(name)
topic "Installing #{name}"
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir.chdir(bin_dir) do |dir|
if name.match(/^node\-/)
@node_installer.install
# need to set PATH here b/c `node-gyp` can change the CWD, but still depends on executing node.
# the current PATH is relative, but it needs to be absolute for this.
# doing this here also prevents it from being exported during runtime
node_bin_path = File.absolute_path(".")
# this needs to be set after so other binaries in bin/ don't take precedence"
ENV["PATH"] = "#{ENV["PATH"]}:#{node_bin_path}"
elsif name.match(/^yarn\-/)
FileUtils.mkdir_p("../vendor")
Dir.chdir("../vendor") do |vendor_dir|
@yarn_installer.install
yarn_path = File.absolute_path("#{vendor_dir}/#{@yarn_installer.binary_path}")
ENV["PATH"] = "#{yarn_path}:#{ENV["PATH"]}"
end
else
@fetchers[:buildpack].fetch_untar("#{name}.tgz")
end
end
end
# removes a binary from the slug
# @param [String] relative path of the binary on the slug
def uninstall_binary(path)
FileUtils.rm File.join('bin', File.basename(path)), :force => true
end
def load_default_cache?
return false # CloudFoundry cannot use the precompiled heroku gems.
new_app? && ruby_version.default?
end
# loads a default bundler cache for new apps to speed up initial bundle installs
def load_default_cache
instrument "ruby.load_default_cache" do
if false # load_default_cache?
puts "New app detected loading default bundler cache"
patchlevel = run("ruby -e 'puts RUBY_PATCHLEVEL'").chomp
cache_name = "#{LanguagePack::RubyVersion::DEFAULT_VERSION}-p#{patchlevel}-default-cache"
@fetchers[:buildpack].fetch_untar("#{cache_name}.tgz")
end
end
end
# install libyaml into the LP to be referenced for psych compilation
# @param [String] tmpdir to store the libyaml files
def install_libyaml(dir)
instrument 'ruby.install_libyaml' do
FileUtils.mkdir_p dir
Dir.chdir(dir) do
@fetchers[:buildpack].fetch_untar("#{@stack}/#{LIBYAML_PATH}.tgz")
end
end
end
# remove `vendor/bundle` that comes from the git repo
# in case there are native ext.
# users should be using `bundle pack` instead.
# https://github.com/heroku/heroku-buildpack-ruby/issues/21
def remove_vendor_bundle
if File.exists?("vendor/bundle")
warn(<<-WARNING)
Removing `vendor/bundle`.
Checking in `vendor/bundle` is not supported. Please remove this directory
and add it to your .gitignore. To vendor your gems with Bundler, use
`bundle pack` instead.
WARNING
FileUtils.rm_rf("vendor/bundle")
end
end
def bundler_binstubs_path
"vendor/bundle/bin"
end
def bundler_path
@bundler_path ||= "#{slug_vendor_base}/gems/#{BUNDLER_GEM_PATH}"
end
def write_bundler_shim(path)
FileUtils.mkdir_p(path)
shim_path = "#{path}/bundle"
File.open(shim_path, "w") do |file|
file.print <<-BUNDLE
#!/usr/bin/env ruby
require 'rubygems'
version = "#{BUNDLER_VERSION}"
if ARGV.first
str = ARGV.first
str = str.dup.force_encoding("BINARY") if str.respond_to? :force_encoding
if str =~ /\A_(.*)_\z/ and Gem::Version.correct?($1) then
version = $1
ARGV.shift
end
end
if Gem.respond_to?(:activate_bin_path)
load Gem.activate_bin_path('bundler', 'bundle', version)
else
gem "bundler", version
load Gem.bin_path("bundler", "bundle", version)
end
BUNDLE
end
FileUtils.chmod(0755, shim_path)
end
# runs bundler to install the dependencies
def build_bundler(default_bundle_without)
instrument 'ruby.build_bundler' do
log("bundle") do
bundle_without = env("BUNDLE_WITHOUT") || default_bundle_without
bundle_bin = "bundle"
bundle_command = "#{bundle_bin} install --without #{bundle_without} --path vendor/bundle --binstubs #{bundler_binstubs_path}"
bundle_command << " -j4"
if File.exist?("#{Dir.pwd}/.bundle/config")
warn(<<-WARNING, inline: true)
You have the `.bundle/config` file checked into your repository
It contains local state like the location of the installed bundle
as well as configured git local gems, and other settings that should
not be shared between multiple checkouts of a single repo. Please
remove the `.bundle/` folder from your repo and add it to your `.gitignore` file.
http://doc.scalingo.com/languages/ruby/bundle-configuration
WARNING
end
if bundler.windows_gemfile_lock?
warn(<<-WARNING, inline: true)
Removing `Gemfile.lock` because it was generated on Windows.
Bundler will do a full resolve so native gems are handled properly.
This may result in unexpected gem versions being used in your app.
In rare occasions Bundler may not be able to resolve your dependencies at all.
http://doc.scalingo.com/languages/ruby/bundle-gemfile-on-windows
WARNING
log("bundle", "has_windows_gemfile_lock")
File.unlink("Gemfile.lock")
else
# using --deployment is preferred if we can
bundle_command += " --deployment"
end
topic("Installing dependencies using bundler #{bundler.version}")
load_bundler_cache
bundler_output = ""
bundle_time = nil
Dir.mktmpdir("libyaml-") do |tmpdir|
libyaml_dir = "#{tmpdir}/#{LIBYAML_PATH}"
install_libyaml(libyaml_dir)
# need to setup compile environment for the psych gem
yaml_include = File.expand_path("#{libyaml_dir}/include").shellescape
yaml_lib = File.expand_path("#{libyaml_dir}/lib").shellescape
pwd = Dir.pwd
bundler_path = "#{pwd}/#{slug_vendor_base}/gems/#{BUNDLER_GEM_PATH}/lib"
# we need to set BUNDLE_CONFIG and BUNDLE_GEMFILE for
# codon since it uses bundler.
env_vars = {
"BUNDLE_GEMFILE" => "#{pwd}/#{ENV['BUNDLE_GEMFILE']}",
"BUNDLE_CONFIG" => "#{pwd}/.bundle/config",
"CPATH" => noshellescape("#{yaml_include}:$CPATH"),
"CPPATH" => noshellescape("#{yaml_include}:$CPPATH"),
"LIBRARY_PATH" => noshellescape("#{yaml_lib}:$LIBRARY_PATH"),
"RUBYOPT" => syck_hack,
"NOKOGIRI_USE_SYSTEM_LIBRARIES" => "true",
"BUNDLE_DISABLE_VERSION_CHECK" => "true"
}
env_vars["JAVA_HOME"] = noshellescape("#{pwd}/$JAVA_HOME") if ruby_version.jruby?
env_vars["BUNDLER_LIB_PATH"] = "#{bundler_path}" if ruby_version.ruby_version == "1.8.7"
puts "Running: #{bundle_command}"
instrument "ruby.bundle_install" do
bundle_time = Benchmark.realtime do
bundler_output << pipe("#{bundle_command} --no-clean", out: "2>&1", env: env_vars, user_env: true)
end
end
end
if $?.success?
puts "Bundle completed (#{"%.2f" % bundle_time}s)"
log "bundle", :status => "success"
puts "Cleaning up the bundler cache."
instrument "ruby.bundle_clean" do
# Only show bundle clean output when not using default cache
if load_default_cache?
run("#{bundle_bin} clean > /dev/null", user_env: true)
else
pipe("#{bundle_bin} clean", out: "2> /dev/null", user_env: true)
end
end
@bundler_cache.store
# Keep gem cache out of the slug
FileUtils.rm_rf("#{slug_vendor_base}/cache")
Dir["#{slug_vendor_base}/bundler/gems/*/.git"].each{|d| FileUtils.rm_rf d}
else
log "bundle", :status => "failure"
error_message = "Failed to install gems via Bundler."
puts "Bundler Output: #{bundler_output}"
if bundler_output.match(/An error occurred while installing sqlite3/)
error_message += <<-ERROR
Detected sqlite3 gem which is not supported on Scalingo.
http://doc.scalingo.com/databases/sqlite3
ERROR
end
if bundler_output.match(/but your Gemfile specified/)
error_message += <<-ERROR
Detected a mismatch between your Ruby version installed and
Ruby version specified in Gemfile or Gemfile.lock:
http://doc.scalingo.com/languages/ruby#gemfile-specified-x
ERROR
end
error error_message
end
end
end
end
def post_bundler
instrument "ruby.post_bundler" do
Dir[File.join(slug_vendor_base, "**", ".git")].each do |dir|
FileUtils.rm_rf(dir)
end
bundler.clean
end
end
# RUBYOPT line that requires syck_hack file
# @return [String] require string if needed or else an empty string
def syck_hack
instrument "ruby.syck_hack" do
syck_hack_file = File.expand_path(File.join(File.dirname(__FILE__), "../../vendor/syck_hack"))
rv = run_stdout('ruby -e "puts RUBY_VERSION"').chomp
# < 1.9.3 includes syck, so we need to use the syck hack
if Gem::Version.new(rv) < Gem::Version.new("1.9.3")
"-r#{syck_hack_file}"
else
""
end
end
end
# writes ERB based database.yml for Rails. The database.yml uses the DATABASE_URL from the environment during runtime.
def create_database_yml
instrument 'ruby.create_database_yml' do
return false unless File.directory?("config")
return false if bundler.has_gem?('activerecord') && bundler.gem_version('activerecord') >= Gem::Version.new('4.1.0.beta1')
log("create_database_yml") do
topic("Writing config/database.yml to read from DATABASE_URL")
File.open("config/database.yml", "w") do |file|
file.puts <<-DATABASE_YML
<%
require 'cgi'
require 'uri'
begin
uri = URI.parse(ENV["DATABASE_URL"])
rescue URI::InvalidURIError
raise "Invalid DATABASE_URL"
end
raise "No RACK_ENV or RAILS_ENV found" unless ENV["RAILS_ENV"] || ENV["RACK_ENV"]
def attribute(name, value, force_string = false)
if value
value_string =
if force_string
'"' + value + '"'
else
value
end
"\#{name}: \#{value_string}"
else
""
end
end
adapter = uri.scheme
adapter = "mysql2" if adapter == "mysql" and Module::const_defined?("Mysql2")
adapter = "postgresql" if adapter == "postgres"
database = (uri.path || "").split("/")[1]
username = uri.user
password = uri.password
host = uri.host
port = uri.port
params = CGI.parse(uri.query || "")
%>
<%= ENV["RAILS_ENV"] || ENV["RACK_ENV"] %>:
<%= attribute "adapter", adapter %>
<%= attribute "database", database %>
<%= attribute "username", username %>
<%= attribute "password", password, true %>
<%= attribute "host", host %>
<%= attribute "port", port %>
<% params.each do |key, value| %>
<%= key %>: <%= value.first %>
<% end %>
DATABASE_YML
end
end
end
end
def rake
@rake ||= begin
rake_gem_available = bundler.has_gem?("rake") || ruby_version.rake_is_vendored?
raise_on_fail = bundler.gem_version('railties') && bundler.gem_version('railties') > Gem::Version.new('3.x')
topic "Detecting rake tasks"
rake = LanguagePack::Helpers::RakeRunner.new(rake_gem_available)
rake.load_rake_tasks!({ env: rake_env }, raise_on_fail)
rake
end
end
def rake_env
if database_url
{ "DATABASE_URL" => database_url }
else
{}
end.merge(user_env_hash)
end
def database_url
env("DATABASE_URL") if env("DATABASE_URL")
end
# executes the block with GIT_DIR environment variable removed since it can mess with the current working directory git thinks it's in
# @param [block] block to be executed in the GIT_DIR free context
def allow_git(&blk)
git_dir = ENV.delete("GIT_DIR") # can mess with bundler
blk.call
ENV["GIT_DIR"] = git_dir
end
# decides if we need to enable the dev database addon
# @return [Array] the database addon if the pg gem is detected or an empty Array if it isn't.
def add_dev_database_addon
pg_adapters.any? {|a| bundler.has_gem?(a) } ? ['heroku-postgresql'] : []
end
def pg_adapters
[
"pg",
"activerecord-jdbcpostgresql-adapter",
"jdbc-postgres",
"jdbc-postgresql",
"jruby-pg",
"rjack-jdbc-postgres",
"tgbyte-activerecord-jdbcpostgresql-adapter"
]
end
# decides if we need to install the node.js binary
# @note execjs will blow up if no JS RUNTIME is detected and is loaded.
# @return [Array] the node.js binary path if we need it or an empty Array
def add_node_js_binary
if (bundler.has_gem?('execjs') || bundler.has_gem?('webpacker')) && node_not_preinstalled?
[@node_installer.binary_path]
else
[]
end
end
def add_yarn_binary
bundler.has_gem?('webpacker') && yarn_not_preinstalled? ? [@yarn_installer.name] : []
end
def has_yarn_binary?
add_yarn_binary.any?
end
# checks if node.js is installed via the official heroku-buildpack-nodejs using multibuildpack
# @return String if it's detected and false if it isn't
def node_preinstall_bin_path
return @node_preinstall_bin_path if defined?(@node_preinstall_bin_path)
legacy_path = "#{Dir.pwd}/#{NODE_BP_PATH}"
path = run("which node")
if path && $?.success?
@node_preinstall_bin_path = path
elsif run("#{legacy_path}/node -v") && $?.success?
@node_preinstall_bin_path = legacy_path
else
@node_preinstall_bin_path = false
end
end
alias :node_js_installed? :node_preinstall_bin_path
def node_not_preinstalled?
!node_js_installed?
end
def yarn_preinstall_bin_path
return @yarn_preinstall_bin_path if defined?(@yarn_preinstall_bin_path)
path = run("which yarn")
if path && $?.success?
@yarn_preinstall_bin_path = path
else
@yarn_preinstall_bin_path = false
end
end
def yarn_not_preinstalled?
!yarn_preinstall_bin_path
end
def run_assets_precompile_rake_task
instrument 'ruby.run_assets_precompile_rake_task' do
precompile = rake.task("assets:precompile")
return true unless precompile.is_defined?
topic "Precompiling assets"
precompile.invoke(env: rake_env)
if precompile.success?
puts "Asset precompilation completed (#{"%.2f" % precompile.time}s)"
else
precompile_fail(precompile.output)
end
end
end
def precompile_fail(output)
log "assets_precompile", :status => "failure"
msg = "Precompiling assets failed.\n"
if output.match(/(127\.0\.0\.1)|(org\.postgresql\.util)/)
msg << "Attempted to access a nonexistent database:\n"
msg << "Refer a database in your environment and redeploy\n"
end
error msg
end
def bundler_cache
"vendor/bundle"
end
def load_bundler_cache
instrument "ruby.load_bundler_cache" do
cache.load "vendor"
full_ruby_version = run_stdout(%q(ruby -v)).chomp
rubygems_version = run_stdout(%q(gem -v)).chomp
heroku_metadata = "vendor/heroku"
old_rubygems_version = nil
ruby_version_cache = "ruby_version"
buildpack_version_cache = "buildpack_version"
cf_buildpack_version_cache = "cf_buildpack_version"
bundler_version_cache = "bundler_version"
rubygems_version_cache = "rubygems_version"
stack_cache = "stack"
old_rubygems_version = @metadata.read(ruby_version_cache).chomp if @metadata.exists?(ruby_version_cache)
old_stack = @metadata.read(stack_cache).chomp if @metadata.exists?(stack_cache)
old_stack ||= DEFAULT_LEGACY_STACK
stack_change = old_stack != @stack
convert_stack = @bundler_cache.old?
@bundler_cache.convert_stack(stack_change) if convert_stack
if !new_app? && stack_change
puts "Purging Cache. Changing stack from #{old_stack} to #{@stack}"
purge_bundler_cache(old_stack)
elsif !new_app? && !convert_stack
@bundler_cache.load
end
# fix bug from v37 deploy
if File.exists?("vendor/ruby_version")
puts "Broken cache detected. Purging build cache."
cache.clear("vendor")
FileUtils.rm_rf("vendor/ruby_version")
purge_bundler_cache
# fix bug introduced in v38
elsif !@metadata.exists?(buildpack_version_cache) && @metadata.exists?(ruby_version_cache)
puts "Broken cache detected. Purging build cache."
purge_bundler_cache
elsif (@bundler_cache.exists? || @bundler_cache.old?) && @metadata.exists?(ruby_version_cache) && full_ruby_version != @metadata.read(ruby_version_cache).chomp
puts "Ruby version change detected. Clearing bundler cache."
puts "Old: #{@metadata.read(ruby_version_cache).chomp}"
puts "New: #{full_ruby_version}"
purge_bundler_cache
end
# fix git gemspec bug from Bundler 1.3.0+ upgrade
if File.exists?(bundler_cache) && !@metadata.exists?(bundler_version_cache) && !run("find vendor/bundle/*/*/bundler/gems/*/ -name *.gemspec").include?("No such file or directory")
puts "Old bundler cache detected. Clearing bundler cache."
purge_bundler_cache
end
# fix for https://github.com/heroku/heroku-buildpack-ruby/issues/86
if (!@metadata.exists?(rubygems_version_cache) ||
(old_rubygems_version == "2.0.0" && old_rubygems_version != rubygems_version)) &&
@metadata.exists?(ruby_version_cache) && @metadata.read(ruby_version_cache).chomp.include?("ruby 2.0.0p0")
puts "Updating to rubygems #{rubygems_version}. Clearing bundler cache."
purge_bundler_cache
end
# fix for https://github.com/sparklemotion/nokogiri/issues/923
if @metadata.exists?(buildpack_version_cache) && (bv = @metadata.read(buildpack_version_cache).sub('v', '').to_i) && bv != 0 && bv <= 76
puts "Fixing nokogiri install. Clearing bundler cache."
puts "See https://github.com/sparklemotion/nokogiri/issues/923."
purge_bundler_cache
end
# recompile nokogiri to use new libyaml
if @metadata.exists?(buildpack_version_cache) && (bv = @metadata.read(buildpack_version_cache).sub('v', '').to_i) && bv != 0 && bv <= 99 && bundler.has_gem?("psych")
puts "Need to recompile psych for CVE-2013-6393. Clearing bundler cache."
puts "See http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=737076."
purge_bundler_cache
end
# recompile gems for libyaml 0.1.7 update
if @metadata.exists?(buildpack_version_cache) && (bv = @metadata.read(buildpack_version_cache).sub('v', '').to_i) && bv != 0 && bv <= 147 &&
(@metadata.exists?(ruby_version_cache) && @metadata.read(ruby_version_cache).chomp.match(/ruby 2\.1\.(9|10)/) ||
bundler.has_gem?("psych")
)
puts "Need to recompile gems for CVE-2014-2014-9130. Clearing bundler cache."
puts "See https://devcenter.heroku.com/changelog-items/1016."
purge_bundler_cache
end
FileUtils.mkdir_p(heroku_metadata)
@metadata.write(ruby_version_cache, full_ruby_version, false)
@metadata.write(buildpack_version_cache, BUILDPACK_VERSION, false)
@metadata.write(cf_buildpack_version_cache, CF_BUILDPACK_VERSION, false)
@metadata.write(bundler_version_cache, BUNDLER_VERSION, false)
@metadata.write(rubygems_version_cache, rubygems_version, false)
@metadata.write(stack_cache, @stack, false)
@metadata.save
end
end
def purge_bundler_cache(stack = nil)
instrument "ruby.purge_bundler_cache" do
@bundler_cache.clear(stack)
# need to reinstall language pack gems
install_bundler_in_app
end
end
end
Change order of path to have local nodejs first
require "tmpdir"
require "digest/md5"
require "benchmark"
require "rubygems"
require "language_pack"
require "language_pack/base"
require "language_pack/ruby_version"
require "language_pack/helpers/nodebin"
require "language_pack/helpers/node_installer"
require "language_pack/helpers/yarn_installer"
require "language_pack/helpers/jvm_installer"
require "language_pack/version"
# base Ruby Language Pack. This is for any base ruby app.
class LanguagePack::Ruby < LanguagePack::Base
NAME = "ruby"
LIBYAML_VERSION = "0.1.7"
LIBYAML_PATH = "libyaml-#{LIBYAML_VERSION}"
BUNDLER_VERSION = "1.15.2"
BUNDLER_GEM_PATH = "bundler-#{BUNDLER_VERSION}"
RBX_BASE_URL = "http://binaries.rubini.us/heroku"
NODE_BP_PATH = "vendor/node/bin"
# detects if this is a valid Ruby app
# @return [Boolean] true if it's a Ruby app
def self.use?
instrument "ruby.use" do
File.exist?("Gemfile")
end
end
def self.bundler
@@bundler ||= LanguagePack::Helpers::BundlerWrapper.new.install
end
def bundler
self.class.bundler
end
def initialize(build_path, cache_path=nil)
super(build_path, cache_path)
@fetchers[:mri] = LanguagePack::Fetcher.new(VENDOR_URL, @stack)
@fetchers[:rbx] = LanguagePack::Fetcher.new(RBX_BASE_URL, @stack)
@node_installer = LanguagePack::Helpers::NodeInstaller.new
@yarn_installer = LanguagePack::Helpers::YarnInstaller.new
@jvm_installer = LanguagePack::Helpers::JvmInstaller.new(slug_vendor_jvm, @stack)
end
def name
"Ruby"
end
def default_addons
instrument "ruby.default_addons" do
add_dev_database_addon
end
end
def default_config_vars
instrument "ruby.default_config_vars" do
vars = {
"LANG" => env("LANG") || "en_US.UTF-8"
}
ruby_version.jruby? ? vars.merge({
"JAVA_OPTS" => default_java_opts,
"JRUBY_OPTS" => default_jruby_opts
}) : vars
end
end
def default_process_types
instrument "ruby.default_process_types" do
{
"rake" => "bundle exec rake",
"console" => "bundle exec irb"
}
end
end
def best_practice_warnings
## TODO No asset sync on Scalingo?
# if bundler.has_gem?("asset_sync")
# warn(<<-WARNING)
# You are using the `asset_sync` gem.
# This is not recommended.
# See https://devcenter.heroku.com/articles/please-do-not-use-asset-sync for more information.
# WARNING
# end
end
def compile
instrument 'ruby.compile' do
# check for new app at the beginning of the compile
new_app?
Dir.chdir(build_path)
remove_vendor_bundle
warn_bundler_upgrade
install_ruby
install_jvm
setup_language_pack_environment
setup_export
setup_profiled
allow_git do
install_bundler_in_app
build_bundler("development:test")
post_bundler
create_database_yml
install_binaries
run_assets_precompile_rake_task
end
best_practice_warnings
super
end
end
private
def warn_bundler_upgrade
old_bundler_version = @metadata.read("bundler_version").chomp if @metadata.exists?("bundler_version")
if old_bundler_version && old_bundler_version != BUNDLER_VERSION
puts(<<-WARNING)
Your app was upgraded to bundler #{ BUNDLER_VERSION }.
Previously you had a successful deploy with bundler #{ old_bundler_version }.
If you see problems related to the bundler version please refer to:
http://doc.scalingo.com/languages/ruby/bundle-configuration
WARNING
end
end
# the base PATH environment variable to be used
# @return [String] the resulting PATH
def default_path
# need to remove bin/ folder since it links
# to the wrong --prefix ruby binstubs
# breaking require. This only applies to Ruby 1.9.2 and 1.8.7.
safe_binstubs = binstubs_relative_paths - ["bin"]
paths = [
"bin",
ENV["PATH"],
system_paths,
]
paths.unshift("#{slug_vendor_jvm}/bin") if ruby_version.jruby?
paths.unshift(safe_binstubs)
paths.join(":")
end
def binstubs_relative_paths
[
"bin",
bundler_binstubs_path,
"#{slug_vendor_base}/bin"
]
end
def system_paths
"/usr/local/bin:/usr/bin:/bin"
end
def self.slug_vendor_base
command = %q(ruby -e "require 'rbconfig';puts \"vendor/bundle/#{RUBY_ENGINE}/#{RbConfig::CONFIG['ruby_version']}\"")
slug_vendor_base = run_no_pipe(command, user_env: true).chomp
error "Problem detecting bundler vendor directory: #{@slug_vendor_base}" unless $?.success?
return slug_vendor_base
end
# the relative path to the bundler directory of gems
# @return [String] resulting path
def slug_vendor_base
instrument 'ruby.slug_vendor_base' do
@slug_vendor_base ||= self.class.slug_vendor_base
end
end
# the relative path to the vendored ruby directory
# @return [String] resulting path
def slug_vendor_ruby
"vendor/#{ruby_version.version_without_patchlevel}"
end
# the relative path to the vendored jvm
# @return [String] resulting path
def slug_vendor_jvm
"vendor/jvm"
end
# the absolute path of the build ruby to use during the buildpack
# @return [String] resulting path
def build_ruby_path
"/tmp/#{ruby_version.version_without_patchlevel}"
end
# fetch the ruby version from bundler
# @return [String, nil] returns the ruby version if detected or nil if none is detected
def ruby_version
instrument 'ruby.ruby_version' do
return @ruby_version if @ruby_version
new_app = !File.exist?("vendor/heroku")
last_version_file = "buildpack_ruby_version"
last_version = nil
last_version = @metadata.read(last_version_file).chomp if @metadata.exists?(last_version_file)
@ruby_version = LanguagePack::RubyVersion.new(bundler.ruby_version,
is_new: new_app,
last_version: last_version)
return @ruby_version
end
end
# default JAVA_OPTS
# return [String] string of JAVA_OPTS
def default_java_opts
"-Xss512k -XX:+UseCompressedOops -Dfile.encoding=UTF-8"
end
def set_jvm_max_heap
<<-EOF
case $(ulimit -u) in
256) # 1X Dyno
JVM_MAX_HEAP=384
;;
512) # 2X Dyno
JVM_MAX_HEAP=768
;;
16384) # IX Dyno
JVM_MAX_HEAP=2048
;;
32768) # PX Dyno
JVM_MAX_HEAP=5120
;;
esac
EOF
end
def set_java_mem
<<-EOF
if ! [[ "${JAVA_OPTS}" == *-Xmx* ]]; then
export JAVA_MEM=${JAVA_MEM:--Xmx${JVM_MAX_HEAP:-384}m}
fi
EOF
end
def set_default_web_concurrency
<<-EOF
case $(ulimit -u) in
256)
export HEROKU_RAM_LIMIT_MB=${HEROKU_RAM_LIMIT_MB:-512}
export WEB_CONCURRENCY=${WEB_CONCURRENCY:-2}
;;
512)
export HEROKU_RAM_LIMIT_MB=${HEROKU_RAM_LIMIT_MB:-1024}
export WEB_CONCURRENCY=${WEB_CONCURRENCY:-4}
;;
16384)
export HEROKU_RAM_LIMIT_MB=${HEROKU_RAM_LIMIT_MB:-2560}
export WEB_CONCURRENCY=${WEB_CONCURRENCY:-8}
;;
32768)
export HEROKU_RAM_LIMIT_MB=${HEROKU_RAM_LIMIT_MB:-6144}
export WEB_CONCURRENCY=${WEB_CONCURRENCY:-16}
;;
*)
;;
esac
EOF
end
# default JRUBY_OPTS
# return [String] string of JRUBY_OPTS
def default_jruby_opts
"-Xcompile.invokedynamic=false"
end
# default Java Xmx
# return [String] string of Java Xmx
def default_java_mem
"-Xmx${JVM_MAX_HEAP:-384}m"
end
# sets up the environment variables for the build process
def setup_language_pack_environment
instrument 'ruby.setup_language_pack_environment' do
if ruby_version.jruby?
ENV["PATH"] += ":bin"
ENV["JAVA_MEM"] = run(<<-SHELL).chomp
#{set_jvm_max_heap}
echo #{default_java_mem}
SHELL
ENV["JRUBY_OPTS"] = env('JRUBY_BUILD_OPTS') || env('JRUBY_OPTS')
ENV["JAVA_HOME"] = @jvm_installer.java_home
end
setup_ruby_install_env
ENV["PATH"] += ":#{node_preinstall_bin_path}" if node_js_installed?
ENV["PATH"] += ":#{yarn_preinstall_bin_path}" if !yarn_not_preinstalled?
# TODO when buildpack-env-args rolls out, we can get rid of
# ||= and the manual setting below
default_config_vars.each do |key, value|
ENV[key] ||= value
end
ENV["GEM_PATH"] = slug_vendor_base
ENV["GEM_HOME"] = slug_vendor_base
ENV["PATH"] = default_path
end
end
# Sets up the environment variables for subsequent processes run by
# muiltibuildpack. We can't use profile.d because $HOME isn't set up
def setup_export
instrument 'ruby.setup_export' do
paths = ENV["PATH"].split(":")
set_export_override "GEM_PATH", "#{build_path}/#{slug_vendor_base}:$GEM_PATH"
set_export_default "LANG", "en_US.UTF-8"
set_export_override "PATH", paths.map { |path| /^\/.*/ !~ path ? "#{build_path}/#{path}" : path }.join(":")
if ruby_version.jruby?
add_to_export set_jvm_max_heap
add_to_export set_java_mem
set_export_default "JAVA_OPTS", default_java_opts
set_export_default "JRUBY_OPTS", default_jruby_opts
end
end
end
# sets up the profile.d script for this buildpack
def setup_profiled
instrument 'setup_profiled' do
profiled_path = [binstubs_relative_paths.map {|path| "$HOME/#{path}" }.join(":")]
profiled_path << "vendor/#{@yarn_installer.binary_path}" if has_yarn_binary?
profiled_path << "$PATH"
set_env_default "LANG", "en_US.UTF-8"
set_env_override "GEM_PATH", "$HOME/#{slug_vendor_base}:$GEM_PATH"
set_env_override "PATH", profiled_path.join(":")
add_to_profiled set_default_web_concurrency if env("SENSIBLE_DEFAULTS")
if ruby_version.jruby?
add_to_profiled set_jvm_max_heap
add_to_profiled set_java_mem
set_env_default "JAVA_OPTS", default_java_opts
set_env_default "JRUBY_OPTS", default_jruby_opts
end
end
end
# install the vendored ruby
# @return [Boolean] true if it installs the vendored ruby and false otherwise
def install_ruby
instrument 'ruby.install_ruby' do
return false unless ruby_version
installer = LanguagePack::Installers::RubyInstaller.installer(ruby_version).new(@stack)
if ruby_version.build?
installer.fetch_unpack(ruby_version, build_ruby_path, true)
end
installer.install(ruby_version, slug_vendor_ruby)
@metadata.write("buildpack_ruby_version", ruby_version.version_for_download)
topic "Using Ruby version: #{ruby_version.version_for_download}"
if !ruby_version.set
warn(<<-WARNING)
You have not declared a Ruby version in your Gemfile.
To set your Ruby version add this line to your Gemfile:
#{ruby_version.to_gemfile}
# See http://doc.scalingo.com/languages/ruby for more information.
WARNING
end
end
true
rescue LanguagePack::Fetcher::FetchError => error
message = <<ERROR
An error occurred while installing #{ruby_version.version_for_download}
Heroku recommends you use the latest supported Ruby version listed here:
http://doc.scalingo.com/languages/ruby#list-of-the-compatible-runtimes
For more information on syntax for declaring a Ruby version see:
http://doc.scalingo.com/languages/ruby#declaring-version
ERROR
if ruby_version.jruby?
message << "Note: Only JRuby 1.7.13 and newer are supported on Cedar-14"
end
message << "\nDebug Information"
message << error.message
error message
end
def new_app?
@new_app ||= !File.exist?("vendor/heroku")
end
# vendors JVM into the slug for JRuby
def install_jvm(forced = false)
instrument 'ruby.install_jvm' do
if ruby_version.jruby? || forced
@jvm_installer.install(ruby_version.engine_version, forced)
end
end
end
# find the ruby install path for its binstubs during build
# @return [String] resulting path or empty string if ruby is not vendored
def ruby_install_binstub_path
@ruby_install_binstub_path ||=
if ruby_version.build?
"#{build_ruby_path}/bin"
elsif ruby_version
"#{slug_vendor_ruby}/bin"
else
""
end
end
# setup the environment so we can use the vendored ruby
def setup_ruby_install_env
instrument 'ruby.setup_ruby_install_env' do
ENV["PATH"] = "#{ruby_install_binstub_path}:#{ENV["PATH"]}"
if ruby_version.jruby?
ENV['JAVA_OPTS'] = default_java_opts
end
end
end
# installs vendored gems into the slug
def install_bundler_in_app
instrument 'ruby.install_language_pack_gems' do
FileUtils.mkdir_p(slug_vendor_base)
Dir.chdir(slug_vendor_base) do |dir|
`cp -R #{bundler.bundler_path}/. .`
end
# write bundler shim, so we can control the version bundler used
# Ruby 2.6.0 started vendoring bundler
write_bundler_shim("vendor/bundle/bin") if ruby_version.vendored_bundler?
end
end
# default set of binaries to install
# @return [Array] resulting list
def binaries
add_node_js_binary + add_yarn_binary
end
# vendors binaries into the slug
def install_binaries
instrument 'ruby.install_binaries' do
binaries.each {|binary| install_binary(binary) }
Dir["bin/*"].each {|path| run("chmod +x #{path}") }
end
end
# vendors individual binary into the slug
# @param [String] name of the binary package from S3.
# Example: https://s3.amazonaws.com/language-pack-ruby/node-0.4.7.tgz, where name is "node-0.4.7"
def install_binary(name)
topic "Installing #{name}"
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir.chdir(bin_dir) do |dir|
if name.match(/^node\-/)
@node_installer.install
# need to set PATH here b/c `node-gyp` can change the CWD, but still depends on executing node.
# the current PATH is relative, but it needs to be absolute for this.
# doing this here also prevents it from being exported during runtime
node_bin_path = File.absolute_path(".")
# this needs to be set after so other binaries in bin/ don't take precedence"
ENV["PATH"] = "#{ENV["PATH"]}:#{node_bin_path}"
elsif name.match(/^yarn\-/)
FileUtils.mkdir_p("../vendor")
Dir.chdir("../vendor") do |vendor_dir|
@yarn_installer.install
yarn_path = File.absolute_path("#{vendor_dir}/#{@yarn_installer.binary_path}")
ENV["PATH"] = "#{yarn_path}:#{ENV["PATH"]}"
end
else
@fetchers[:buildpack].fetch_untar("#{name}.tgz")
end
end
end
# removes a binary from the slug
# @param [String] relative path of the binary on the slug
def uninstall_binary(path)
FileUtils.rm File.join('bin', File.basename(path)), :force => true
end
def load_default_cache?
return false # CloudFoundry cannot use the precompiled heroku gems.
new_app? && ruby_version.default?
end
# loads a default bundler cache for new apps to speed up initial bundle installs
def load_default_cache
instrument "ruby.load_default_cache" do
if false # load_default_cache?
puts "New app detected loading default bundler cache"
patchlevel = run("ruby -e 'puts RUBY_PATCHLEVEL'").chomp
cache_name = "#{LanguagePack::RubyVersion::DEFAULT_VERSION}-p#{patchlevel}-default-cache"
@fetchers[:buildpack].fetch_untar("#{cache_name}.tgz")
end
end
end
# install libyaml into the LP to be referenced for psych compilation
# @param [String] tmpdir to store the libyaml files
def install_libyaml(dir)
instrument 'ruby.install_libyaml' do
FileUtils.mkdir_p dir
Dir.chdir(dir) do
@fetchers[:buildpack].fetch_untar("#{@stack}/#{LIBYAML_PATH}.tgz")
end
end
end
# remove `vendor/bundle` that comes from the git repo
# in case there are native ext.
# users should be using `bundle pack` instead.
# https://github.com/heroku/heroku-buildpack-ruby/issues/21
def remove_vendor_bundle
if File.exists?("vendor/bundle")
warn(<<-WARNING)
Removing `vendor/bundle`.
Checking in `vendor/bundle` is not supported. Please remove this directory
and add it to your .gitignore. To vendor your gems with Bundler, use
`bundle pack` instead.
WARNING
FileUtils.rm_rf("vendor/bundle")
end
end
def bundler_binstubs_path
"vendor/bundle/bin"
end
def bundler_path
@bundler_path ||= "#{slug_vendor_base}/gems/#{BUNDLER_GEM_PATH}"
end
def write_bundler_shim(path)
FileUtils.mkdir_p(path)
shim_path = "#{path}/bundle"
File.open(shim_path, "w") do |file|
file.print <<-BUNDLE
#!/usr/bin/env ruby
require 'rubygems'
version = "#{BUNDLER_VERSION}"
if ARGV.first
str = ARGV.first
str = str.dup.force_encoding("BINARY") if str.respond_to? :force_encoding
if str =~ /\A_(.*)_\z/ and Gem::Version.correct?($1) then
version = $1
ARGV.shift
end
end
if Gem.respond_to?(:activate_bin_path)
load Gem.activate_bin_path('bundler', 'bundle', version)
else
gem "bundler", version
load Gem.bin_path("bundler", "bundle", version)
end
BUNDLE
end
FileUtils.chmod(0755, shim_path)
end
# runs bundler to install the dependencies
def build_bundler(default_bundle_without)
instrument 'ruby.build_bundler' do
log("bundle") do
bundle_without = env("BUNDLE_WITHOUT") || default_bundle_without
bundle_bin = "bundle"
bundle_command = "#{bundle_bin} install --without #{bundle_without} --path vendor/bundle --binstubs #{bundler_binstubs_path}"
bundle_command << " -j4"
if File.exist?("#{Dir.pwd}/.bundle/config")
warn(<<-WARNING, inline: true)
You have the `.bundle/config` file checked into your repository
It contains local state like the location of the installed bundle
as well as configured git local gems, and other settings that should
not be shared between multiple checkouts of a single repo. Please
remove the `.bundle/` folder from your repo and add it to your `.gitignore` file.
http://doc.scalingo.com/languages/ruby/bundle-configuration
WARNING
end
if bundler.windows_gemfile_lock?
warn(<<-WARNING, inline: true)
Removing `Gemfile.lock` because it was generated on Windows.
Bundler will do a full resolve so native gems are handled properly.
This may result in unexpected gem versions being used in your app.
In rare occasions Bundler may not be able to resolve your dependencies at all.
http://doc.scalingo.com/languages/ruby/bundle-gemfile-on-windows
WARNING
log("bundle", "has_windows_gemfile_lock")
File.unlink("Gemfile.lock")
else
# using --deployment is preferred if we can
bundle_command += " --deployment"
end
topic("Installing dependencies using bundler #{bundler.version}")
load_bundler_cache
bundler_output = ""
bundle_time = nil
Dir.mktmpdir("libyaml-") do |tmpdir|
libyaml_dir = "#{tmpdir}/#{LIBYAML_PATH}"
install_libyaml(libyaml_dir)
# need to setup compile environment for the psych gem
yaml_include = File.expand_path("#{libyaml_dir}/include").shellescape
yaml_lib = File.expand_path("#{libyaml_dir}/lib").shellescape
pwd = Dir.pwd
bundler_path = "#{pwd}/#{slug_vendor_base}/gems/#{BUNDLER_GEM_PATH}/lib"
# we need to set BUNDLE_CONFIG and BUNDLE_GEMFILE for
# codon since it uses bundler.
env_vars = {
"BUNDLE_GEMFILE" => "#{pwd}/#{ENV['BUNDLE_GEMFILE']}",
"BUNDLE_CONFIG" => "#{pwd}/.bundle/config",
"CPATH" => noshellescape("#{yaml_include}:$CPATH"),
"CPPATH" => noshellescape("#{yaml_include}:$CPPATH"),
"LIBRARY_PATH" => noshellescape("#{yaml_lib}:$LIBRARY_PATH"),
"RUBYOPT" => syck_hack,
"NOKOGIRI_USE_SYSTEM_LIBRARIES" => "true",
"BUNDLE_DISABLE_VERSION_CHECK" => "true"
}
env_vars["JAVA_HOME"] = noshellescape("#{pwd}/$JAVA_HOME") if ruby_version.jruby?
env_vars["BUNDLER_LIB_PATH"] = "#{bundler_path}" if ruby_version.ruby_version == "1.8.7"
puts "Running: #{bundle_command}"
instrument "ruby.bundle_install" do
bundle_time = Benchmark.realtime do
bundler_output << pipe("#{bundle_command} --no-clean", out: "2>&1", env: env_vars, user_env: true)
end
end
end
if $?.success?
puts "Bundle completed (#{"%.2f" % bundle_time}s)"
log "bundle", :status => "success"
puts "Cleaning up the bundler cache."
instrument "ruby.bundle_clean" do
# Only show bundle clean output when not using default cache
if load_default_cache?
run("#{bundle_bin} clean > /dev/null", user_env: true)
else
pipe("#{bundle_bin} clean", out: "2> /dev/null", user_env: true)
end
end
@bundler_cache.store
# Keep gem cache out of the slug
FileUtils.rm_rf("#{slug_vendor_base}/cache")
Dir["#{slug_vendor_base}/bundler/gems/*/.git"].each{|d| FileUtils.rm_rf d}
else
log "bundle", :status => "failure"
error_message = "Failed to install gems via Bundler."
puts "Bundler Output: #{bundler_output}"
if bundler_output.match(/An error occurred while installing sqlite3/)
error_message += <<-ERROR
Detected sqlite3 gem which is not supported on Scalingo.
http://doc.scalingo.com/databases/sqlite3
ERROR
end
if bundler_output.match(/but your Gemfile specified/)
error_message += <<-ERROR
Detected a mismatch between your Ruby version installed and
Ruby version specified in Gemfile or Gemfile.lock:
http://doc.scalingo.com/languages/ruby#gemfile-specified-x
ERROR
end
error error_message
end
end
end
end
def post_bundler
instrument "ruby.post_bundler" do
Dir[File.join(slug_vendor_base, "**", ".git")].each do |dir|
FileUtils.rm_rf(dir)
end
bundler.clean
end
end
# RUBYOPT line that requires syck_hack file
# @return [String] require string if needed or else an empty string
def syck_hack
instrument "ruby.syck_hack" do
syck_hack_file = File.expand_path(File.join(File.dirname(__FILE__), "../../vendor/syck_hack"))
rv = run_stdout('ruby -e "puts RUBY_VERSION"').chomp
# < 1.9.3 includes syck, so we need to use the syck hack
if Gem::Version.new(rv) < Gem::Version.new("1.9.3")
"-r#{syck_hack_file}"
else
""
end
end
end
# writes ERB based database.yml for Rails. The database.yml uses the DATABASE_URL from the environment during runtime.
def create_database_yml
instrument 'ruby.create_database_yml' do
return false unless File.directory?("config")
return false if bundler.has_gem?('activerecord') && bundler.gem_version('activerecord') >= Gem::Version.new('4.1.0.beta1')
log("create_database_yml") do
topic("Writing config/database.yml to read from DATABASE_URL")
File.open("config/database.yml", "w") do |file|
file.puts <<-DATABASE_YML
<%
require 'cgi'
require 'uri'
begin
uri = URI.parse(ENV["DATABASE_URL"])
rescue URI::InvalidURIError
raise "Invalid DATABASE_URL"
end
raise "No RACK_ENV or RAILS_ENV found" unless ENV["RAILS_ENV"] || ENV["RACK_ENV"]
def attribute(name, value, force_string = false)
if value
value_string =
if force_string
'"' + value + '"'
else
value
end
"\#{name}: \#{value_string}"
else
""
end
end
adapter = uri.scheme
adapter = "mysql2" if adapter == "mysql" and Module::const_defined?("Mysql2")
adapter = "postgresql" if adapter == "postgres"
database = (uri.path || "").split("/")[1]
username = uri.user
password = uri.password
host = uri.host
port = uri.port
params = CGI.parse(uri.query || "")
%>
<%= ENV["RAILS_ENV"] || ENV["RACK_ENV"] %>:
<%= attribute "adapter", adapter %>
<%= attribute "database", database %>
<%= attribute "username", username %>
<%= attribute "password", password, true %>
<%= attribute "host", host %>
<%= attribute "port", port %>
<% params.each do |key, value| %>
<%= key %>: <%= value.first %>
<% end %>
DATABASE_YML
end
end
end
end
def rake
@rake ||= begin
rake_gem_available = bundler.has_gem?("rake") || ruby_version.rake_is_vendored?
raise_on_fail = bundler.gem_version('railties') && bundler.gem_version('railties') > Gem::Version.new('3.x')
topic "Detecting rake tasks"
rake = LanguagePack::Helpers::RakeRunner.new(rake_gem_available)
rake.load_rake_tasks!({ env: rake_env }, raise_on_fail)
rake
end
end
def rake_env
if database_url
{ "DATABASE_URL" => database_url }
else
{}
end.merge(user_env_hash)
end
def database_url
env("DATABASE_URL") if env("DATABASE_URL")
end
# executes the block with GIT_DIR environment variable removed since it can mess with the current working directory git thinks it's in
# @param [block] block to be executed in the GIT_DIR free context
def allow_git(&blk)
git_dir = ENV.delete("GIT_DIR") # can mess with bundler
blk.call
ENV["GIT_DIR"] = git_dir
end
# decides if we need to enable the dev database addon
# @return [Array] the database addon if the pg gem is detected or an empty Array if it isn't.
def add_dev_database_addon
pg_adapters.any? {|a| bundler.has_gem?(a) } ? ['heroku-postgresql'] : []
end
def pg_adapters
[
"pg",
"activerecord-jdbcpostgresql-adapter",
"jdbc-postgres",
"jdbc-postgresql",
"jruby-pg",
"rjack-jdbc-postgres",
"tgbyte-activerecord-jdbcpostgresql-adapter"
]
end
# decides if we need to install the node.js binary
# @note execjs will blow up if no JS RUNTIME is detected and is loaded.
# @return [Array] the node.js binary path if we need it or an empty Array
def add_node_js_binary
if (bundler.has_gem?('execjs') || bundler.has_gem?('webpacker')) && node_not_preinstalled?
[@node_installer.binary_path]
else
[]
end
end
def add_yarn_binary
bundler.has_gem?('webpacker') && yarn_not_preinstalled? ? [@yarn_installer.name] : []
end
def has_yarn_binary?
add_yarn_binary.any?
end
# checks if node.js is installed via the official heroku-buildpack-nodejs using multibuildpack
# @return String if it's detected and false if it isn't
def node_preinstall_bin_path
return @node_preinstall_bin_path if defined?(@node_preinstall_bin_path)
legacy_path = "#{Dir.pwd}/#{NODE_BP_PATH}"
path = run("which node")
if path && $?.success?
@node_preinstall_bin_path = path
elsif run("#{legacy_path}/node -v") && $?.success?
@node_preinstall_bin_path = legacy_path
else
@node_preinstall_bin_path = false
end
end
alias :node_js_installed? :node_preinstall_bin_path
def node_not_preinstalled?
!node_js_installed?
end
def yarn_preinstall_bin_path
return @yarn_preinstall_bin_path if defined?(@yarn_preinstall_bin_path)
path = run("which yarn")
if path && $?.success?
@yarn_preinstall_bin_path = path
else
@yarn_preinstall_bin_path = false
end
end
def yarn_not_preinstalled?
!yarn_preinstall_bin_path
end
def run_assets_precompile_rake_task
instrument 'ruby.run_assets_precompile_rake_task' do
precompile = rake.task("assets:precompile")
return true unless precompile.is_defined?
topic "Precompiling assets"
precompile.invoke(env: rake_env)
if precompile.success?
puts "Asset precompilation completed (#{"%.2f" % precompile.time}s)"
else
precompile_fail(precompile.output)
end
end
end
def precompile_fail(output)
log "assets_precompile", :status => "failure"
msg = "Precompiling assets failed.\n"
if output.match(/(127\.0\.0\.1)|(org\.postgresql\.util)/)
msg << "Attempted to access a nonexistent database:\n"
msg << "Refer a database in your environment and redeploy\n"
end
error msg
end
def bundler_cache
"vendor/bundle"
end
def load_bundler_cache
instrument "ruby.load_bundler_cache" do
cache.load "vendor"
full_ruby_version = run_stdout(%q(ruby -v)).chomp
rubygems_version = run_stdout(%q(gem -v)).chomp
heroku_metadata = "vendor/heroku"
old_rubygems_version = nil
ruby_version_cache = "ruby_version"
buildpack_version_cache = "buildpack_version"
cf_buildpack_version_cache = "cf_buildpack_version"
bundler_version_cache = "bundler_version"
rubygems_version_cache = "rubygems_version"
stack_cache = "stack"
old_rubygems_version = @metadata.read(ruby_version_cache).chomp if @metadata.exists?(ruby_version_cache)
old_stack = @metadata.read(stack_cache).chomp if @metadata.exists?(stack_cache)
old_stack ||= DEFAULT_LEGACY_STACK
stack_change = old_stack != @stack
convert_stack = @bundler_cache.old?
@bundler_cache.convert_stack(stack_change) if convert_stack
if !new_app? && stack_change
puts "Purging Cache. Changing stack from #{old_stack} to #{@stack}"
purge_bundler_cache(old_stack)
elsif !new_app? && !convert_stack
@bundler_cache.load
end
# fix bug from v37 deploy
if File.exists?("vendor/ruby_version")
puts "Broken cache detected. Purging build cache."
cache.clear("vendor")
FileUtils.rm_rf("vendor/ruby_version")
purge_bundler_cache
# fix bug introduced in v38
elsif !@metadata.exists?(buildpack_version_cache) && @metadata.exists?(ruby_version_cache)
puts "Broken cache detected. Purging build cache."
purge_bundler_cache
elsif (@bundler_cache.exists? || @bundler_cache.old?) && @metadata.exists?(ruby_version_cache) && full_ruby_version != @metadata.read(ruby_version_cache).chomp
puts "Ruby version change detected. Clearing bundler cache."
puts "Old: #{@metadata.read(ruby_version_cache).chomp}"
puts "New: #{full_ruby_version}"
purge_bundler_cache
end
# fix git gemspec bug from Bundler 1.3.0+ upgrade
if File.exists?(bundler_cache) && !@metadata.exists?(bundler_version_cache) && !run("find vendor/bundle/*/*/bundler/gems/*/ -name *.gemspec").include?("No such file or directory")
puts "Old bundler cache detected. Clearing bundler cache."
purge_bundler_cache
end
# fix for https://github.com/heroku/heroku-buildpack-ruby/issues/86
if (!@metadata.exists?(rubygems_version_cache) ||
(old_rubygems_version == "2.0.0" && old_rubygems_version != rubygems_version)) &&
@metadata.exists?(ruby_version_cache) && @metadata.read(ruby_version_cache).chomp.include?("ruby 2.0.0p0")
puts "Updating to rubygems #{rubygems_version}. Clearing bundler cache."
purge_bundler_cache
end
# fix for https://github.com/sparklemotion/nokogiri/issues/923
if @metadata.exists?(buildpack_version_cache) && (bv = @metadata.read(buildpack_version_cache).sub('v', '').to_i) && bv != 0 && bv <= 76
puts "Fixing nokogiri install. Clearing bundler cache."
puts "See https://github.com/sparklemotion/nokogiri/issues/923."
purge_bundler_cache
end
# recompile nokogiri to use new libyaml
if @metadata.exists?(buildpack_version_cache) && (bv = @metadata.read(buildpack_version_cache).sub('v', '').to_i) && bv != 0 && bv <= 99 && bundler.has_gem?("psych")
puts "Need to recompile psych for CVE-2013-6393. Clearing bundler cache."
puts "See http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=737076."
purge_bundler_cache
end
# recompile gems for libyaml 0.1.7 update
if @metadata.exists?(buildpack_version_cache) && (bv = @metadata.read(buildpack_version_cache).sub('v', '').to_i) && bv != 0 && bv <= 147 &&
(@metadata.exists?(ruby_version_cache) && @metadata.read(ruby_version_cache).chomp.match(/ruby 2\.1\.(9|10)/) ||
bundler.has_gem?("psych")
)
puts "Need to recompile gems for CVE-2014-2014-9130. Clearing bundler cache."
puts "See https://devcenter.heroku.com/changelog-items/1016."
purge_bundler_cache
end
FileUtils.mkdir_p(heroku_metadata)
@metadata.write(ruby_version_cache, full_ruby_version, false)
@metadata.write(buildpack_version_cache, BUILDPACK_VERSION, false)
@metadata.write(cf_buildpack_version_cache, CF_BUILDPACK_VERSION, false)
@metadata.write(bundler_version_cache, BUNDLER_VERSION, false)
@metadata.write(rubygems_version_cache, rubygems_version, false)
@metadata.write(stack_cache, @stack, false)
@metadata.save
end
end
def purge_bundler_cache(stack = nil)
instrument "ruby.purge_bundler_cache" do
@bundler_cache.clear(stack)
# need to reinstall language pack gems
install_bundler_in_app
end
end
end
|
module Fog
module AWS
class RDS
class Real
require 'fog/aws/parsers/rds/create_db_subnet_group'
# Creates a db subnet group
# http://docs.amazonwebservices.com/AmazonRDS/2012-01-15/APIReference/API_CreateDBSubnetGroup.html
# ==== Parameters
# * DBSubnetGroupName <~String> - The name for the DB Subnet Group. This value is stored as a lowercase string. Must contain no more than 255 alphanumeric characters or hyphens. Must not be "Default".
# * SubnetIds <~Array> - The EC2 Subnet IDs for the DB Subnet Group.
# * DBSubnetGroupDescription <~String> - The description for the DB Subnet Group
# ==== Returns
# * response<~Excon::Response>:
# * body<~Hash>:
def create_db_subnet_group(name, subnet_ids, description = name)
params = { 'Action' => 'CreateDBSubnetGroup',
'DBSubnetGroupName' => name,
'DBSubnetGroupDescription' => description,
:parser => Fog::Parsers::AWS::RDS::CreateDBSubnetGroup.new }
params.merge!(Fog::AWS.indexed_param("SubnetIds.member", Array(subnet_ids)))
request(params)
end
end
class Mock
def create_db_subnet_group(name, subnet_ids, description = name)
response = Excon::Response.new
if self.data[:subnet_groups] && self.data[:subnet_groups][name]
raise Fog::AWS::RDS::IdentifierTaken.new("DBSubnetGroupAlreadyExists => The subnet group '#{name}' already exists")
end
# collection = Fog::Compute::AWS.new(:aws_access_key_id => 'mock key', :aws_secret_access_key => 'mock secret')
collection = Fog::Compute[:aws]
collection.region = @region
subnets = subnet_ids.map do |snid|
subnet = collection.subnets.get(snid)
raise Fog::AWS::RDS::NotFound.new("InvalidSubnet => The subnet '#{snid}' was not found") if subnet.nil?
subnet
end
vpc_id = subnets.first.vpc_id
data = {
'DBSubnetGroupName' => name,
'DBSubnetGroupDescription' => description,
'SubnetGroupStatus' => 'Complete',
'Subnets' => subnet_ids,
'VpcId' => vpc_id
}
self.data[:subnet_groups][name] = data
response.body = {
"ResponseMetadata"=>{ "RequestId"=> Fog::AWS::Mock.request_id },
'CreateDBSubnetGroupResult' => { 'DBSubnetGroup' => data }
}
response
end
end
end
end
end
fix create_db_subnet_group mock
module Fog
module AWS
class RDS
class Real
require 'fog/aws/parsers/rds/create_db_subnet_group'
# Creates a db subnet group
# http://docs.amazonwebservices.com/AmazonRDS/2012-01-15/APIReference/API_CreateDBSubnetGroup.html
# ==== Parameters
# * DBSubnetGroupName <~String> - The name for the DB Subnet Group. This value is stored as a lowercase string. Must contain no more than 255 alphanumeric characters or hyphens. Must not be "Default".
# * SubnetIds <~Array> - The EC2 Subnet IDs for the DB Subnet Group.
# * DBSubnetGroupDescription <~String> - The description for the DB Subnet Group
# ==== Returns
# * response<~Excon::Response>:
# * body<~Hash>:
def create_db_subnet_group(name, subnet_ids, description = name)
params = { 'Action' => 'CreateDBSubnetGroup',
'DBSubnetGroupName' => name,
'DBSubnetGroupDescription' => description,
:parser => Fog::Parsers::AWS::RDS::CreateDBSubnetGroup.new }
params.merge!(Fog::AWS.indexed_param("SubnetIds.member", Array(subnet_ids)))
request(params)
end
end
class Mock
def create_db_subnet_group(name, subnet_ids, description = name)
response = Excon::Response.new
if self.data[:subnet_groups] && self.data[:subnet_groups][name]
raise Fog::AWS::RDS::IdentifierTaken.new("DBSubnetGroupAlreadyExists => The subnet group '#{name}' already exists")
end
# collection = Fog::Compute::AWS.new(:aws_access_key_id => 'mock key', :aws_secret_access_key => 'mock secret')
compute_data = Fog::Compute::AWS::Mock.data[self.region][self.aws_access_key_id]
subnets = subnet_ids.map do |snid|
subnet = compute_data[:subnets].detect { |s| s['subnetId'] == snid }
raise Fog::AWS::RDS::NotFound.new("InvalidSubnet => The subnet '#{snid}' was not found") if subnet.nil?
subnet
end
vpc_id = subnets.first['vpcId']
data = {
'DBSubnetGroupName' => name,
'DBSubnetGroupDescription' => description,
'SubnetGroupStatus' => 'Complete',
'Subnets' => subnet_ids,
'VpcId' => vpc_id
}
self.data[:subnet_groups][name] = data
response.body = {
"ResponseMetadata"=>{ "RequestId"=> Fog::AWS::Mock.request_id },
'CreateDBSubnetGroupResult' => { 'DBSubnetGroup' => data }
}
response
end
end
end
end
end
|
require "tmpdir"
require "digest/md5"
require "rubygems"
require "language_pack"
require "language_pack/base"
require "language_pack/bundler_lockfile"
# base Ruby Language Pack. This is for any base ruby app.
class LanguagePack::Ruby < LanguagePack::Base
include LanguagePack::BundlerLockfile
extend LanguagePack::BundlerLockfile::ClassMethods
NAME = "ruby"
BUILDPACK_VERSION = "v79"
LIBYAML_VERSION = "0.1.4"
LIBYAML_PATH = "libyaml-#{LIBYAML_VERSION}"
BUNDLER_VERSION = "1.3.2"
BUNDLER_GEM_PATH = "bundler-#{BUNDLER_VERSION}"
#NODE_VERSION = "0.4.7"
NODE_JS_BINARY_PATH = "node"
JVM_BASE_URL = "http://heroku-jdk.s3.amazonaws.com"
JVM_VERSION = "openjdk7-latest"
DEFAULT_RUBY_VERSION = "ruby-2.0.0"
RBX_BASE_URL = "http://binaries.rubini.us/heroku"
BOWER_VERSION = "1.2.6"
BOWER_BASE_URL = "http://heroku-buildpack-ruby-bower.s3.amazonaws.com"
# detects if this is a valid Ruby app
# @return [Boolean] true if it's a Ruby app
def self.use?
instrument "ruby.use" do
File.exist?("Gemfile")
end
end
def self.gem_version(name)
instrument "ruby.gem_version" do
if gem = bundle.specs.detect {|g| g.name == name }
gem.version
end
end
end
def initialize(build_path, cache_path=nil)
super(build_path, cache_path)
@fetchers[:jvm] = LanguagePack::Fetcher.new(JVM_BASE_URL)
@fetchers[:rbx] = LanguagePack::Fetcher.new(RBX_BASE_URL)
end
def name
"Ruby"
end
def default_addons
instrument "ruby.default_addons" do
add_dev_database_addon
end
end
def default_config_vars
instrument "ruby.default_config_vars" do
vars = {
"LANG" => "en_US.UTF-8",
"PATH" => default_path,
"GEM_PATH" => slug_vendor_base,
}
ruby_version_jruby? ? vars.merge({
"JAVA_OPTS" => default_java_opts,
"JRUBY_OPTS" => default_jruby_opts,
"JAVA_TOOL_OPTIONS" => default_java_tool_options
}) : vars
end
end
def default_process_types
instrument "ruby.default_process_types" do
{
"rake" => "bundle exec rake",
"console" => "bundle exec irb"
}
end
end
def compile
instrument 'ruby.compile' do
Dir.chdir(build_path)
remove_vendor_bundle
install_ruby
install_jvm
setup_language_pack_environment
setup_profiled
allow_git do
install_language_pack_gems
build_bundler
create_database_yml
install_binaries
install_node
install_bower
build_bower
run_assets_precompile_rake_task
end
super
end
end
private
# the base PATH environment variable to be used
# @return [String] the resulting PATH
def default_path
"bin:#{bundler_binstubs_path}:/usr/local/bin:/usr/bin:/bin"
end
# the relative path to the bundler directory of gems
# @return [String] resulting path
def slug_vendor_base
instrument 'ruby.slug_vendor_base' do
if @slug_vendor_base
@slug_vendor_base
elsif @ruby_version == "ruby-1.8.7"
@slug_vendor_base = "vendor/bundle/1.8"
else
@slug_vendor_base = run(%q(ruby -e "require 'rbconfig';puts \"vendor/bundle/#{RUBY_ENGINE}/#{RbConfig::CONFIG['ruby_version']}\"")).chomp
end
end
end
# the relative path to the vendored ruby directory
# @return [String] resulting path
def slug_vendor_ruby
"vendor/#{ruby_version}"
end
# the relative path to the vendored jvm
# @return [String] resulting path
def slug_vendor_jvm
"vendor/jvm"
end
# the absolute path of the build ruby to use during the buildpack
# @return [String] resulting path
def build_ruby_path
"/tmp/#{ruby_version}"
end
# fetch the ruby version from bundler
# @return [String, nil] returns the ruby version if detected or nil if none is detected
def ruby_version
instrument 'ruby.ruby_version' do
return @ruby_version if @ruby_version_run
@ruby_version_run = true
@ruby_version_env_var = false
@ruby_version_set = false
old_system_path = "/usr/local/bin:/usr/local/sbin:/usr/bin:/bin:/usr/sbin:/sbin"
@ruby_version = run_stdout("env PATH=#{bundler_path}/bin:#{old_system_path} GEM_PATH=#{bundler_path} bundle platform --ruby").chomp
if @ruby_version == "No ruby version specified" && ENV['RUBY_VERSION']
# for backwards compatibility.
# this will go away in the future
@ruby_version = ENV['RUBY_VERSION']
@ruby_version_env_var = true
elsif @ruby_version == "No ruby version specified"
if new_app?
@ruby_version = DEFAULT_RUBY_VERSION
elsif !@metadata.exists?("buildpack_ruby_version")
@ruby_version = "ruby-1.9.2"
else
@ruby_version = @metadata.read("buildpack_ruby_version").chomp
end
else
@ruby_version = @ruby_version.sub('(', '').sub(')', '').split.join('-')
@ruby_version_set = true
end
end
@ruby_version
end
# determine if we're using rbx
# @return [Boolean] true if we are and false if we aren't
def ruby_version_rbx?
ruby_version ? ruby_version.match(/rbx-/) : false
end
# determine if we're using jruby
# @return [Boolean] true if we are and false if we aren't
def ruby_version_jruby?
@ruby_version_jruby ||= ruby_version ? ruby_version.match(/jruby-/) : false
end
# default JAVA_OPTS
# return [String] string of JAVA_OPTS
def default_java_opts
"-Xmx384m -Xss512k -XX:+UseCompressedOops -Dfile.encoding=UTF-8"
end
# default JRUBY_OPTS
# return [String] string of JRUBY_OPTS
def default_jruby_opts
"-Xcompile.invokedynamic=true"
end
# default JAVA_TOOL_OPTIONS
# return [String] string of JAVA_TOOL_OPTIONS
def default_java_tool_options
"-Djava.rmi.server.useCodebaseOnly=true"
end
# list the available valid ruby versions
# @note the value is memoized
# @return [Array] list of Strings of the ruby versions available
def ruby_versions
return @ruby_versions if @ruby_versions
Dir.mktmpdir("ruby_versions-") do |tmpdir|
Dir.chdir(tmpdir) do
@fetchers[:buildpack].fetch("ruby_versions.yml")
@ruby_versions = YAML::load_file("ruby_versions.yml")
end
end
@ruby_versions
end
# sets up the environment variables for the build process
def setup_language_pack_environment
instrument 'ruby.setup_language_pack_environment' do
setup_ruby_install_env
config_vars = default_config_vars.each do |key, value|
ENV[key] ||= value
end
ENV["GEM_HOME"] = slug_vendor_base
ENV["GEM_PATH"] = slug_vendor_base
ENV["PATH"] = "#{ruby_install_binstub_path}:#{slug_vendor_base}/bin:#{config_vars["PATH"]}"
end
end
# sets up the profile.d script for this buildpack
def setup_profiled
instrument 'setup_profiled' do
set_env_override "GEM_PATH", "$HOME/#{slug_vendor_base}:$GEM_PATH"
set_env_default "LANG", "en_US.UTF-8"
set_env_override "PATH", "$HOME/bin:$HOME/#{slug_vendor_base}/bin:$PATH"
if ruby_version_jruby?
set_env_default "JAVA_OPTS", default_java_opts
set_env_default "JRUBY_OPTS", default_jruby_opts
set_env_default "JAVA_TOOL_OPTIONS", default_java_tool_options
end
end
end
# determines if a build ruby is required
# @return [Boolean] true if a build ruby is required
def build_ruby?
@build_ruby ||= !ruby_version_rbx? && !ruby_version_jruby? && !%w{ruby-1.9.3 ruby-2.0.0}.include?(ruby_version)
end
# install the vendored ruby
# @return [Boolean] true if it installs the vendored ruby and false otherwise
def install_ruby
instrument 'ruby.install_ruby' do
return false unless ruby_version
invalid_ruby_version_message = <<ERROR
Invalid RUBY_VERSION specified: #{ruby_version}
Valid versions: #{ruby_versions.join(", ")}
ERROR
if build_ruby?
FileUtils.mkdir_p(build_ruby_path)
Dir.chdir(build_ruby_path) do
ruby_vm = "ruby"
instrument "ruby.fetch_build_ruby" do
@fetchers[:buildpack].fetch_untar("#{ruby_version.sub(ruby_vm, "#{ruby_vm}-build")}.tgz")
end
end
error invalid_ruby_version_message unless $?.success?
end
FileUtils.mkdir_p(slug_vendor_ruby)
Dir.chdir(slug_vendor_ruby) do
instrument "ruby.fetch_ruby" do
if ruby_version_rbx?
file = "#{ruby_version}.tar.bz2"
sha_file = "#{file}.sha1"
@fetchers[:rbx].fetch(file)
@fetchers[:rbx].fetch(sha_file)
expected_checksum = File.read(sha_file).chomp
actual_checksum = Digest::SHA1.file(file).hexdigest
error <<-ERROR_MSG unless expected_checksum == actual_checksum
RBX Checksum for #{file} does not match.
Expected #{expected_checksum} but got #{actual_checksum}.
Please try pushing again in a few minutes.
ERROR_MSG
run("tar jxf #{file}")
FileUtils.mv(Dir.glob("app/#{slug_vendor_ruby}/*"), ".")
FileUtils.rm_rf("app")
FileUtils.rm(file)
FileUtils.rm(sha_file)
else
@fetchers[:buildpack].fetch_untar("#{ruby_version}.tgz")
end
end
end
error invalid_ruby_version_message unless $?.success?
app_bin_dir = "bin"
FileUtils.mkdir_p app_bin_dir
run("ln -s ruby #{slug_vendor_ruby}/bin/ruby.exe")
Dir["#{slug_vendor_ruby}/bin/*"].each do |vendor_bin|
run("ln -s ../#{vendor_bin} #{app_bin_dir}")
end
@metadata.write("buildpack_ruby_version", ruby_version)
if !@ruby_version_env_var
topic "Using Ruby version: #{ruby_version}"
if !@ruby_version_set
warn(<<WARNING)
You have not declared a Ruby version in your Gemfile.
To set your Ruby version add this line to your Gemfile:
#{ruby_version_to_gemfile}
# See https://devcenter.heroku.com/articles/ruby-versions for more information."
WARNING
end
else
warn(<<WARNING)
Using RUBY_VERSION: #{ruby_version}
RUBY_VERSION support has been deprecated and will be removed entirely on August 1, 2012.
See https://devcenter.heroku.com/articles/ruby-versions#selecting_a_version_of_ruby for more information.
WARNING
end
end
true
end
def ruby_version_to_gemfile
parts = ruby_version.split('-')
if parts.size > 2
# not mri
"ruby '#{parts[1]}', :engine => '#{parts[2]}', :engine_version => '#{parts.last}'"
else
"ruby '#{parts.last}'"
end
end
def new_app?
!File.exist?("vendor/heroku")
end
# vendors JVM into the slug for JRuby
def install_jvm
instrument 'ruby.install_jvm' do
if ruby_version_jruby?
topic "Installing JVM: #{JVM_VERSION}"
FileUtils.mkdir_p(slug_vendor_jvm)
Dir.chdir(slug_vendor_jvm) do
@fetchers[:jvm].fetch_untar("#{JVM_VERSION}.tar.gz")
end
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir["#{slug_vendor_jvm}/bin/*"].each do |bin|
run("ln -s ../#{bin} #{bin_dir}")
end
end
end
end
# find the ruby install path for its binstubs during build
# @return [String] resulting path or empty string if ruby is not vendored
def ruby_install_binstub_path
@ruby_install_binstub_path ||=
if build_ruby?
"#{build_ruby_path}/bin"
elsif ruby_version
"#{slug_vendor_ruby}/bin"
else
""
end
end
# setup the environment so we can use the vendored ruby
def setup_ruby_install_env
instrument 'ruby.setup_ruby_install_env' do
ENV["PATH"] = "#{ruby_install_binstub_path}:#{ENV["PATH"]}"
if ruby_version_jruby?
ENV['JAVA_OPTS'] = default_java_opts
end
end
end
# list of default gems to vendor into the slug
# @return [Array] resulting list of gems
def gems
[BUNDLER_GEM_PATH]
end
# installs vendored gems into the slug
def install_language_pack_gems
instrument 'ruby.install_language_pack_gems' do
FileUtils.mkdir_p(slug_vendor_base)
Dir.chdir(slug_vendor_base) do |dir|
gems.each do |gem|
@fetchers[:buildpack].fetch_untar("#{gem}.tgz")
end
Dir["bin/*"].each {|path| run("chmod 755 #{path}") }
end
end
end
# default set of binaries to install
# @return [Array] resulting list
def binaries
[]
end
# vendors binaries into the slug
def install_binaries
instrument 'ruby.install_binaries' do
binaries.each {|binary| install_binary(binary) }
Dir["bin/*"].each {|path| run("chmod +x #{path}") }
end
end
# vendors individual binary into the slug
# @param [String] name of the binary package from S3.
# Example: https://s3.amazonaws.com/language-pack-ruby/node-0.4.7.tgz, where name is "node-0.4.7"
def install_binary(name)
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir.chdir(bin_dir) do |dir|
@fetchers[:buildpack].fetch_untar("#{name}.tgz")
end
end
# removes a binary from the slug
# @param [String] relative path of the binary on the slug
def uninstall_binary(path)
FileUtils.rm File.join('bin', File.basename(path)), :force => true
end
# install libyaml into the LP to be referenced for psych compilation
# @param [String] tmpdir to store the libyaml files
def install_libyaml(dir)
instrument 'ruby.install_libyaml' do
FileUtils.mkdir_p dir
Dir.chdir(dir) do |dir|
@fetchers[:buildpack].fetch_untar("#{LIBYAML_PATH}.tgz")
end
end
end
# remove `vendor/bundle` that comes from the git repo
# in case there are native ext.
# users should be using `bundle pack` instead.
# https://github.com/heroku/heroku-buildpack-ruby/issues/21
def remove_vendor_bundle
if File.exists?("vendor/bundle")
warn(<<WARNING)
Removing `vendor/bundle`.
Checking in `vendor/bundle` is not supported. Please remove this directory
and add it to your .gitignore. To vendor your gems with Bundler, use
`bundle pack` instead.
WARNING
FileUtils.rm_rf("vendor/bundle")
end
end
def bundler_binstubs_path
"vendor/bundle/bin"
end
# runs bundler to install the dependencies
def build_bundler
instrument 'ruby.build_bundler' do
log("bundle") do
bundle_without = ENV["BUNDLE_WITHOUT"] || "development:test"
bundle_bin = "bundle"
bundle_command = "#{bundle_bin} install --without #{bundle_without} --path vendor/bundle --binstubs #{bundler_binstubs_path}"
unless File.exist?("Gemfile.lock")
error "Gemfile.lock is required. Please run \"bundle install\" locally\nand commit your Gemfile.lock."
end
if has_windows_gemfile_lock?
warn(<<WARNING)
Removing `Gemfile.lock` because it was generated on Windows.
Bundler will do a full resolve so native gems are handled properly.
This may result in unexpected gem versions being used in your app.
WARNING
log("bundle", "has_windows_gemfile_lock")
File.unlink("Gemfile.lock")
else
# using --deployment is preferred if we can
bundle_command += " --deployment"
cache.load ".bundle"
end
version = run_stdout("#{bundle_bin} version").strip
topic("Installing dependencies using #{version}")
load_bundler_cache
bundler_output = ""
Dir.mktmpdir("libyaml-") do |tmpdir|
libyaml_dir = "#{tmpdir}/#{LIBYAML_PATH}"
install_libyaml(libyaml_dir)
# need to setup compile environment for the psych gem
yaml_include = File.expand_path("#{libyaml_dir}/include")
yaml_lib = File.expand_path("#{libyaml_dir}/lib")
pwd = run("pwd").chomp
bundler_path = "#{pwd}/#{slug_vendor_base}/gems/#{BUNDLER_GEM_PATH}/lib"
# we need to set BUNDLE_CONFIG and BUNDLE_GEMFILE for
# codon since it uses bundler.
env_vars = "env BUNDLE_GEMFILE=#{pwd}/Gemfile BUNDLE_CONFIG=#{pwd}/.bundle/config CPATH=#{yaml_include}:$CPATH CPPATH=#{yaml_include}:$CPPATH LIBRARY_PATH=#{yaml_lib}:$LIBRARY_PATH RUBYOPT=\"#{syck_hack}\" NOKOGIRI_USE_SYSTEM_LIBRARIES=true"
env_vars += " BUNDLER_LIB_PATH=#{bundler_path}" if ruby_version && ruby_version.match(/^ruby-1\.8\.7/)
puts "Running: #{bundle_command}"
instrument "ruby.bundle_install" do
bundler_output << pipe("#{env_vars} #{bundle_command} --no-clean 2>&1")
end
end
if $?.success?
log "bundle", :status => "success"
puts "Cleaning up the bundler cache."
instrument "ruby.bundle_clean" do
pipe "#{bundle_bin} clean 2> /dev/null"
end
cache.store ".bundle"
cache.store "vendor/bundle"
# Keep gem cache out of the slug
FileUtils.rm_rf("#{slug_vendor_base}/cache")
else
log "bundle", :status => "failure"
error_message = "Failed to install gems via Bundler."
puts "Bundler Output: #{bundler_output}"
if bundler_output.match(/Installing sqlite3 \([\w.]+\)( with native extensions)?\s+Gem::Installer::ExtensionBuildError: ERROR: Failed to build gem native extension./)
error_message += <<ERROR
Detected sqlite3 gem which is not supported on Heroku.
https://devcenter.heroku.com/articles/sqlite3
ERROR
end
error error_message
end
end
end
end
def install_node
log("node") do
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
run("curl http://heroku-buildpack-nodejs.s3.amazonaws.com/nodejs-0.10.3.tgz -s -o - | tar xzf -")
unless $?.success?
error "Can't install node-0.10.3"
end
Dir["bin/*"].each {|path| run("chmod +x #{path}") }
end
end
# install bower as npm module
def install_bower
log("bower") do
topic "Using bower version: #{BOWER_VERSION}"
run("curl #{BOWER_BASE_URL}/bower-#{BOWER_VERSION}/node_modules.tar.gz -s -o - | tar xzf -")
unless $?.success?
error "Can't install bower"
end
end
end
# runs bower to install the dependencies
def build_bower
error_message = <<ERROR
Can't install JavaScript dependencies
Bower 1.0.0 released at 2013-07-23
https://github.com/bower/bower/blob/master/CHANGELOG.md
Check these points:
* Change from component.json to bower.json
* bower.json requires 'name' option
ERROR
log("bower") do
topic("Installing JavaScript dependencies using bower #{BOWER_VERSION}")
pipe("./node_modules/bower/bin/bower install 2>&1")
unless $?.success?
error error_message
end
end
end
# RUBYOPT line that requires syck_hack file
# @return [String] require string if needed or else an empty string
def syck_hack
instrument "ruby.syck_hack" do
syck_hack_file = File.expand_path(File.join(File.dirname(__FILE__), "../../vendor/syck_hack"))
ruby_version = run_stdout('ruby -e "puts RUBY_VERSION"').chomp
# < 1.9.3 includes syck, so we need to use the syck hack
if Gem::Version.new(ruby_version) < Gem::Version.new("1.9.3")
"-r#{syck_hack_file}"
else
""
end
end
end
# writes ERB based database.yml for Rails. The database.yml uses the DATABASE_URL from the environment during runtime.
def create_database_yml
instrument 'ruby.create_database_yml' do
log("create_database_yml") do
return unless File.directory?("config")
topic("Writing config/database.yml to read from DATABASE_URL")
File.open("config/database.yml", "w") do |file|
file.puts <<-DATABASE_YML
<%
require 'cgi'
require 'uri'
begin
uri = URI.parse(ENV["DATABASE_URL"])
rescue URI::InvalidURIError
raise "Invalid DATABASE_URL"
end
raise "No RACK_ENV or RAILS_ENV found" unless ENV["RAILS_ENV"] || ENV["RACK_ENV"]
def attribute(name, value, force_string = false)
if value
value_string =
if force_string
'"' + value + '"'
else
value
end
"\#{name}: \#{value_string}"
else
""
end
end
adapter = uri.scheme
adapter = "postgresql" if adapter == "postgres"
database = (uri.path || "").split("/")[1]
username = uri.user
password = uri.password
host = uri.host
port = uri.port
params = CGI.parse(uri.query || "")
%>
<%= ENV["RAILS_ENV"] || ENV["RACK_ENV"] %>:
<%= attribute "adapter", adapter %>
<%= attribute "database", database %>
<%= attribute "username", username %>
<%= attribute "password", password, true %>
<%= attribute "host", host %>
<%= attribute "port", port %>
<% params.each do |key, value| %>
<%= key %>: <%= value.first %>
<% end %>
DATABASE_YML
end
end
end
end
# detects whether the Gemfile.lock contains the Windows platform
# @return [Boolean] true if the Gemfile.lock was created on Windows
def has_windows_gemfile_lock?
bundle.platforms.detect do |platform|
/mingw|mswin/.match(platform.os) if platform.is_a?(Gem::Platform)
end
end
# detects if a gem is in the bundle.
# @param [String] name of the gem in question
# @return [String, nil] if it finds the gem, it will return the line from bundle show or nil if nothing is found.
def gem_is_bundled?(gem)
bundle.specs.map(&:name).include?(gem)
end
# detects if a rake task is defined in the app
# @param [String] the task in question
# @return [Boolean] true if the rake task is defined in the app
def rake_task_defined?(task)
instrument "ruby.rake_task_defined" do
run("env PATH=$PATH bundle exec rake #{task} --dry-run") && $?.success?
end
end
# executes the block with GIT_DIR environment variable removed since it can mess with the current working directory git thinks it's in
# @param [block] block to be executed in the GIT_DIR free context
def allow_git(&blk)
git_dir = ENV.delete("GIT_DIR") # can mess with bundler
blk.call
ENV["GIT_DIR"] = git_dir
end
# decides if we need to enable the dev database addon
# @return [Array] the database addon if the pg gem is detected or an empty Array if it isn't.
def add_dev_database_addon
gem_is_bundled?("pg") ? ['heroku-postgresql:dev'] : []
end
# decides if we need to install the node.js binary
# @note execjs will blow up if no JS RUNTIME is detected and is loaded.
# @return [Array] the node.js binary path if we need it or an empty Array
def add_node_js_binary
gem_is_bundled?('execjs') ? [NODE_JS_BINARY_PATH] : []
end
def run_assets_precompile_rake_task
instrument 'ruby.run_assets_precompile_rake_task' do
if rake_task_defined?("assets:precompile")
require 'benchmark'
topic "Running: rake assets:precompile"
time = Benchmark.realtime { pipe("env PATH=$PATH:bin bundle exec rake assets:precompile 2>&1") }
if $?.success?
puts "Asset precompilation completed (#{"%.2f" % time}s)"
end
end
end
end
def bundler_cache
"vendor/bundle"
end
def load_bundler_cache
instrument "ruby.load_bundler_cache" do
cache.load "vendor"
full_ruby_version = run_stdout(%q(ruby -v)).chomp
rubygems_version = run_stdout(%q(gem -v)).chomp
heroku_metadata = "vendor/heroku"
old_rubygems_version = nil
ruby_version_cache = "ruby_version"
buildpack_version_cache = "buildpack_version"
bundler_version_cache = "bundler_version"
rubygems_version_cache = "rubygems_version"
old_rubygems_version = @metadata.read(ruby_version_cache).chomp if @metadata.exists?(ruby_version_cache)
# fix bug from v37 deploy
if File.exists?("vendor/ruby_version")
puts "Broken cache detected. Purging build cache."
cache.clear("vendor")
FileUtils.rm_rf("vendor/ruby_version")
purge_bundler_cache
# fix bug introduced in v38
elsif !@metadata.exists?(buildpack_version_cache) && @metadata.exists?(ruby_version_cache)
puts "Broken cache detected. Purging build cache."
purge_bundler_cache
elsif cache.exists?(bundler_cache) && @metadata.exists?(ruby_version_cache) && full_ruby_version != @metadata.read(ruby_version_cache).chomp
puts "Ruby version change detected. Clearing bundler cache."
puts "Old: #{@metadata.read(ruby_version_cache).chomp}"
puts "New: #{full_ruby_version}"
purge_bundler_cache
end
# fix git gemspec bug from Bundler 1.3.0+ upgrade
if File.exists?(bundler_cache) && !@metadata.exists?(bundler_version_cache) && !run("find vendor/bundle/*/*/bundler/gems/*/ -name *.gemspec").include?("No such file or directory")
puts "Old bundler cache detected. Clearing bundler cache."
purge_bundler_cache
end
# fix for https://github.com/heroku/heroku-buildpack-ruby/issues/86
if (!@metadata.exists?(rubygems_version_cache) ||
(old_rubygems_version == "2.0.0" && old_rubygems_version != rubygems_version)) &&
@metadata.exists?(ruby_version_cache) && @metadata.read(ruby_version_cache).chomp.include?("ruby 2.0.0p0")
puts "Updating to rubygems #{rubygems_version}. Clearing bundler cache."
purge_bundler_cache
end
# fix for https://github.com/sparklemotion/nokogiri/issues/923
if @metadata.exists?(buildpack_version_cache) && @metadata.read(buildpack_version_cache).sub('v', '').to_i <= 76
puts "Fixing nokogiri install. Clearing bundler cache."
puts "See https://github.com/sparklemotion/nokogiri/issues/923."
purge_bundler_cache
end
FileUtils.mkdir_p(heroku_metadata)
@metadata.write(ruby_version_cache, full_ruby_version, false)
@metadata.write(buildpack_version_cache, BUILDPACK_VERSION, false)
@metadata.write(bundler_version_cache, BUNDLER_VERSION, false)
@metadata.write(rubygems_version_cache, rubygems_version, false)
@metadata.save
end
end
def purge_bundler_cache
instrument "ruby.purge_bundler_cache" do
FileUtils.rm_rf(bundler_cache)
cache.clear bundler_cache
# need to reinstall language pack gems
install_language_pack_gems
end
end
end
Caching bower packages and registry
require "tmpdir"
require "digest/md5"
require "rubygems"
require "language_pack"
require "language_pack/base"
require "language_pack/bundler_lockfile"
# base Ruby Language Pack. This is for any base ruby app.
class LanguagePack::Ruby < LanguagePack::Base
include LanguagePack::BundlerLockfile
extend LanguagePack::BundlerLockfile::ClassMethods
NAME = "ruby"
BUILDPACK_VERSION = "v79"
LIBYAML_VERSION = "0.1.4"
LIBYAML_PATH = "libyaml-#{LIBYAML_VERSION}"
BUNDLER_VERSION = "1.3.2"
BUNDLER_GEM_PATH = "bundler-#{BUNDLER_VERSION}"
#NODE_VERSION = "0.4.7"
NODE_JS_BINARY_PATH = "node"
JVM_BASE_URL = "http://heroku-jdk.s3.amazonaws.com"
JVM_VERSION = "openjdk7-latest"
DEFAULT_RUBY_VERSION = "ruby-2.0.0"
RBX_BASE_URL = "http://binaries.rubini.us/heroku"
BOWER_VERSION = "1.2.6"
BOWER_BASE_URL = "http://heroku-buildpack-ruby-bower.s3.amazonaws.com"
# detects if this is a valid Ruby app
# @return [Boolean] true if it's a Ruby app
def self.use?
instrument "ruby.use" do
File.exist?("Gemfile")
end
end
def self.gem_version(name)
instrument "ruby.gem_version" do
if gem = bundle.specs.detect {|g| g.name == name }
gem.version
end
end
end
def initialize(build_path, cache_path=nil)
super(build_path, cache_path)
@fetchers[:jvm] = LanguagePack::Fetcher.new(JVM_BASE_URL)
@fetchers[:rbx] = LanguagePack::Fetcher.new(RBX_BASE_URL)
end
def name
"Ruby"
end
def default_addons
instrument "ruby.default_addons" do
add_dev_database_addon
end
end
def default_config_vars
instrument "ruby.default_config_vars" do
vars = {
"LANG" => "en_US.UTF-8",
"PATH" => default_path,
"GEM_PATH" => slug_vendor_base,
}
ruby_version_jruby? ? vars.merge({
"JAVA_OPTS" => default_java_opts,
"JRUBY_OPTS" => default_jruby_opts,
"JAVA_TOOL_OPTIONS" => default_java_tool_options
}) : vars
end
end
def default_process_types
instrument "ruby.default_process_types" do
{
"rake" => "bundle exec rake",
"console" => "bundle exec irb"
}
end
end
def compile
instrument 'ruby.compile' do
Dir.chdir(build_path)
remove_vendor_bundle
install_ruby
install_jvm
setup_language_pack_environment
setup_profiled
allow_git do
install_language_pack_gems
build_bundler
create_database_yml
install_binaries
install_node
install_bower
build_bower
run_assets_precompile_rake_task
end
super
end
end
private
# the base PATH environment variable to be used
# @return [String] the resulting PATH
def default_path
"bin:#{bundler_binstubs_path}:/usr/local/bin:/usr/bin:/bin"
end
# the relative path to the bundler directory of gems
# @return [String] resulting path
def slug_vendor_base
instrument 'ruby.slug_vendor_base' do
if @slug_vendor_base
@slug_vendor_base
elsif @ruby_version == "ruby-1.8.7"
@slug_vendor_base = "vendor/bundle/1.8"
else
@slug_vendor_base = run(%q(ruby -e "require 'rbconfig';puts \"vendor/bundle/#{RUBY_ENGINE}/#{RbConfig::CONFIG['ruby_version']}\"")).chomp
end
end
end
# the relative path to the vendored ruby directory
# @return [String] resulting path
def slug_vendor_ruby
"vendor/#{ruby_version}"
end
# the relative path to the vendored jvm
# @return [String] resulting path
def slug_vendor_jvm
"vendor/jvm"
end
# the absolute path of the build ruby to use during the buildpack
# @return [String] resulting path
def build_ruby_path
"/tmp/#{ruby_version}"
end
# fetch the ruby version from bundler
# @return [String, nil] returns the ruby version if detected or nil if none is detected
def ruby_version
instrument 'ruby.ruby_version' do
return @ruby_version if @ruby_version_run
@ruby_version_run = true
@ruby_version_env_var = false
@ruby_version_set = false
old_system_path = "/usr/local/bin:/usr/local/sbin:/usr/bin:/bin:/usr/sbin:/sbin"
@ruby_version = run_stdout("env PATH=#{bundler_path}/bin:#{old_system_path} GEM_PATH=#{bundler_path} bundle platform --ruby").chomp
if @ruby_version == "No ruby version specified" && ENV['RUBY_VERSION']
# for backwards compatibility.
# this will go away in the future
@ruby_version = ENV['RUBY_VERSION']
@ruby_version_env_var = true
elsif @ruby_version == "No ruby version specified"
if new_app?
@ruby_version = DEFAULT_RUBY_VERSION
elsif !@metadata.exists?("buildpack_ruby_version")
@ruby_version = "ruby-1.9.2"
else
@ruby_version = @metadata.read("buildpack_ruby_version").chomp
end
else
@ruby_version = @ruby_version.sub('(', '').sub(')', '').split.join('-')
@ruby_version_set = true
end
end
@ruby_version
end
# determine if we're using rbx
# @return [Boolean] true if we are and false if we aren't
def ruby_version_rbx?
ruby_version ? ruby_version.match(/rbx-/) : false
end
# determine if we're using jruby
# @return [Boolean] true if we are and false if we aren't
def ruby_version_jruby?
@ruby_version_jruby ||= ruby_version ? ruby_version.match(/jruby-/) : false
end
# default JAVA_OPTS
# return [String] string of JAVA_OPTS
def default_java_opts
"-Xmx384m -Xss512k -XX:+UseCompressedOops -Dfile.encoding=UTF-8"
end
# default JRUBY_OPTS
# return [String] string of JRUBY_OPTS
def default_jruby_opts
"-Xcompile.invokedynamic=true"
end
# default JAVA_TOOL_OPTIONS
# return [String] string of JAVA_TOOL_OPTIONS
def default_java_tool_options
"-Djava.rmi.server.useCodebaseOnly=true"
end
# list the available valid ruby versions
# @note the value is memoized
# @return [Array] list of Strings of the ruby versions available
def ruby_versions
return @ruby_versions if @ruby_versions
Dir.mktmpdir("ruby_versions-") do |tmpdir|
Dir.chdir(tmpdir) do
@fetchers[:buildpack].fetch("ruby_versions.yml")
@ruby_versions = YAML::load_file("ruby_versions.yml")
end
end
@ruby_versions
end
# sets up the environment variables for the build process
def setup_language_pack_environment
instrument 'ruby.setup_language_pack_environment' do
setup_ruby_install_env
config_vars = default_config_vars.each do |key, value|
ENV[key] ||= value
end
ENV["GEM_HOME"] = slug_vendor_base
ENV["GEM_PATH"] = slug_vendor_base
ENV["PATH"] = "#{ruby_install_binstub_path}:#{slug_vendor_base}/bin:#{config_vars["PATH"]}"
end
end
# sets up the profile.d script for this buildpack
def setup_profiled
instrument 'setup_profiled' do
set_env_override "GEM_PATH", "$HOME/#{slug_vendor_base}:$GEM_PATH"
set_env_default "LANG", "en_US.UTF-8"
set_env_override "PATH", "$HOME/bin:$HOME/#{slug_vendor_base}/bin:$PATH"
if ruby_version_jruby?
set_env_default "JAVA_OPTS", default_java_opts
set_env_default "JRUBY_OPTS", default_jruby_opts
set_env_default "JAVA_TOOL_OPTIONS", default_java_tool_options
end
end
end
# determines if a build ruby is required
# @return [Boolean] true if a build ruby is required
def build_ruby?
@build_ruby ||= !ruby_version_rbx? && !ruby_version_jruby? && !%w{ruby-1.9.3 ruby-2.0.0}.include?(ruby_version)
end
# install the vendored ruby
# @return [Boolean] true if it installs the vendored ruby and false otherwise
def install_ruby
instrument 'ruby.install_ruby' do
return false unless ruby_version
invalid_ruby_version_message = <<ERROR
Invalid RUBY_VERSION specified: #{ruby_version}
Valid versions: #{ruby_versions.join(", ")}
ERROR
if build_ruby?
FileUtils.mkdir_p(build_ruby_path)
Dir.chdir(build_ruby_path) do
ruby_vm = "ruby"
instrument "ruby.fetch_build_ruby" do
@fetchers[:buildpack].fetch_untar("#{ruby_version.sub(ruby_vm, "#{ruby_vm}-build")}.tgz")
end
end
error invalid_ruby_version_message unless $?.success?
end
FileUtils.mkdir_p(slug_vendor_ruby)
Dir.chdir(slug_vendor_ruby) do
instrument "ruby.fetch_ruby" do
if ruby_version_rbx?
file = "#{ruby_version}.tar.bz2"
sha_file = "#{file}.sha1"
@fetchers[:rbx].fetch(file)
@fetchers[:rbx].fetch(sha_file)
expected_checksum = File.read(sha_file).chomp
actual_checksum = Digest::SHA1.file(file).hexdigest
error <<-ERROR_MSG unless expected_checksum == actual_checksum
RBX Checksum for #{file} does not match.
Expected #{expected_checksum} but got #{actual_checksum}.
Please try pushing again in a few minutes.
ERROR_MSG
run("tar jxf #{file}")
FileUtils.mv(Dir.glob("app/#{slug_vendor_ruby}/*"), ".")
FileUtils.rm_rf("app")
FileUtils.rm(file)
FileUtils.rm(sha_file)
else
@fetchers[:buildpack].fetch_untar("#{ruby_version}.tgz")
end
end
end
error invalid_ruby_version_message unless $?.success?
app_bin_dir = "bin"
FileUtils.mkdir_p app_bin_dir
run("ln -s ruby #{slug_vendor_ruby}/bin/ruby.exe")
Dir["#{slug_vendor_ruby}/bin/*"].each do |vendor_bin|
run("ln -s ../#{vendor_bin} #{app_bin_dir}")
end
@metadata.write("buildpack_ruby_version", ruby_version)
if !@ruby_version_env_var
topic "Using Ruby version: #{ruby_version}"
if !@ruby_version_set
warn(<<WARNING)
You have not declared a Ruby version in your Gemfile.
To set your Ruby version add this line to your Gemfile:
#{ruby_version_to_gemfile}
# See https://devcenter.heroku.com/articles/ruby-versions for more information."
WARNING
end
else
warn(<<WARNING)
Using RUBY_VERSION: #{ruby_version}
RUBY_VERSION support has been deprecated and will be removed entirely on August 1, 2012.
See https://devcenter.heroku.com/articles/ruby-versions#selecting_a_version_of_ruby for more information.
WARNING
end
end
true
end
def ruby_version_to_gemfile
parts = ruby_version.split('-')
if parts.size > 2
# not mri
"ruby '#{parts[1]}', :engine => '#{parts[2]}', :engine_version => '#{parts.last}'"
else
"ruby '#{parts.last}'"
end
end
def new_app?
!File.exist?("vendor/heroku")
end
# vendors JVM into the slug for JRuby
def install_jvm
instrument 'ruby.install_jvm' do
if ruby_version_jruby?
topic "Installing JVM: #{JVM_VERSION}"
FileUtils.mkdir_p(slug_vendor_jvm)
Dir.chdir(slug_vendor_jvm) do
@fetchers[:jvm].fetch_untar("#{JVM_VERSION}.tar.gz")
end
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir["#{slug_vendor_jvm}/bin/*"].each do |bin|
run("ln -s ../#{bin} #{bin_dir}")
end
end
end
end
# find the ruby install path for its binstubs during build
# @return [String] resulting path or empty string if ruby is not vendored
def ruby_install_binstub_path
@ruby_install_binstub_path ||=
if build_ruby?
"#{build_ruby_path}/bin"
elsif ruby_version
"#{slug_vendor_ruby}/bin"
else
""
end
end
# setup the environment so we can use the vendored ruby
def setup_ruby_install_env
instrument 'ruby.setup_ruby_install_env' do
ENV["PATH"] = "#{ruby_install_binstub_path}:#{ENV["PATH"]}"
if ruby_version_jruby?
ENV['JAVA_OPTS'] = default_java_opts
end
end
end
# list of default gems to vendor into the slug
# @return [Array] resulting list of gems
def gems
[BUNDLER_GEM_PATH]
end
# installs vendored gems into the slug
def install_language_pack_gems
instrument 'ruby.install_language_pack_gems' do
FileUtils.mkdir_p(slug_vendor_base)
Dir.chdir(slug_vendor_base) do |dir|
gems.each do |gem|
@fetchers[:buildpack].fetch_untar("#{gem}.tgz")
end
Dir["bin/*"].each {|path| run("chmod 755 #{path}") }
end
end
end
# default set of binaries to install
# @return [Array] resulting list
def binaries
[]
end
# vendors binaries into the slug
def install_binaries
instrument 'ruby.install_binaries' do
binaries.each {|binary| install_binary(binary) }
Dir["bin/*"].each {|path| run("chmod +x #{path}") }
end
end
# vendors individual binary into the slug
# @param [String] name of the binary package from S3.
# Example: https://s3.amazonaws.com/language-pack-ruby/node-0.4.7.tgz, where name is "node-0.4.7"
def install_binary(name)
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
Dir.chdir(bin_dir) do |dir|
@fetchers[:buildpack].fetch_untar("#{name}.tgz")
end
end
# removes a binary from the slug
# @param [String] relative path of the binary on the slug
def uninstall_binary(path)
FileUtils.rm File.join('bin', File.basename(path)), :force => true
end
# install libyaml into the LP to be referenced for psych compilation
# @param [String] tmpdir to store the libyaml files
def install_libyaml(dir)
instrument 'ruby.install_libyaml' do
FileUtils.mkdir_p dir
Dir.chdir(dir) do |dir|
@fetchers[:buildpack].fetch_untar("#{LIBYAML_PATH}.tgz")
end
end
end
# remove `vendor/bundle` that comes from the git repo
# in case there are native ext.
# users should be using `bundle pack` instead.
# https://github.com/heroku/heroku-buildpack-ruby/issues/21
def remove_vendor_bundle
if File.exists?("vendor/bundle")
warn(<<WARNING)
Removing `vendor/bundle`.
Checking in `vendor/bundle` is not supported. Please remove this directory
and add it to your .gitignore. To vendor your gems with Bundler, use
`bundle pack` instead.
WARNING
FileUtils.rm_rf("vendor/bundle")
end
end
def bundler_binstubs_path
"vendor/bundle/bin"
end
# runs bundler to install the dependencies
def build_bundler
instrument 'ruby.build_bundler' do
log("bundle") do
bundle_without = ENV["BUNDLE_WITHOUT"] || "development:test"
bundle_bin = "bundle"
bundle_command = "#{bundle_bin} install --without #{bundle_without} --path vendor/bundle --binstubs #{bundler_binstubs_path}"
unless File.exist?("Gemfile.lock")
error "Gemfile.lock is required. Please run \"bundle install\" locally\nand commit your Gemfile.lock."
end
if has_windows_gemfile_lock?
warn(<<WARNING)
Removing `Gemfile.lock` because it was generated on Windows.
Bundler will do a full resolve so native gems are handled properly.
This may result in unexpected gem versions being used in your app.
WARNING
log("bundle", "has_windows_gemfile_lock")
File.unlink("Gemfile.lock")
else
# using --deployment is preferred if we can
bundle_command += " --deployment"
cache.load ".bundle"
end
version = run_stdout("#{bundle_bin} version").strip
topic("Installing dependencies using #{version}")
load_bundler_cache
bundler_output = ""
Dir.mktmpdir("libyaml-") do |tmpdir|
libyaml_dir = "#{tmpdir}/#{LIBYAML_PATH}"
install_libyaml(libyaml_dir)
# need to setup compile environment for the psych gem
yaml_include = File.expand_path("#{libyaml_dir}/include")
yaml_lib = File.expand_path("#{libyaml_dir}/lib")
pwd = run("pwd").chomp
bundler_path = "#{pwd}/#{slug_vendor_base}/gems/#{BUNDLER_GEM_PATH}/lib"
# we need to set BUNDLE_CONFIG and BUNDLE_GEMFILE for
# codon since it uses bundler.
env_vars = "env BUNDLE_GEMFILE=#{pwd}/Gemfile BUNDLE_CONFIG=#{pwd}/.bundle/config CPATH=#{yaml_include}:$CPATH CPPATH=#{yaml_include}:$CPPATH LIBRARY_PATH=#{yaml_lib}:$LIBRARY_PATH RUBYOPT=\"#{syck_hack}\" NOKOGIRI_USE_SYSTEM_LIBRARIES=true"
env_vars += " BUNDLER_LIB_PATH=#{bundler_path}" if ruby_version && ruby_version.match(/^ruby-1\.8\.7/)
puts "Running: #{bundle_command}"
instrument "ruby.bundle_install" do
bundler_output << pipe("#{env_vars} #{bundle_command} --no-clean 2>&1")
end
end
if $?.success?
log "bundle", :status => "success"
puts "Cleaning up the bundler cache."
instrument "ruby.bundle_clean" do
pipe "#{bundle_bin} clean 2> /dev/null"
end
cache.store ".bundle"
cache.store "vendor/bundle"
# Keep gem cache out of the slug
FileUtils.rm_rf("#{slug_vendor_base}/cache")
else
log "bundle", :status => "failure"
error_message = "Failed to install gems via Bundler."
puts "Bundler Output: #{bundler_output}"
if bundler_output.match(/Installing sqlite3 \([\w.]+\)( with native extensions)?\s+Gem::Installer::ExtensionBuildError: ERROR: Failed to build gem native extension./)
error_message += <<ERROR
Detected sqlite3 gem which is not supported on Heroku.
https://devcenter.heroku.com/articles/sqlite3
ERROR
end
error error_message
end
end
end
end
def install_node
log("node") do
bin_dir = "bin"
FileUtils.mkdir_p bin_dir
run("curl http://heroku-buildpack-nodejs.s3.amazonaws.com/nodejs-0.10.3.tgz -s -o - | tar xzf -")
unless $?.success?
error "Can't install node-0.10.3"
end
Dir["bin/*"].each {|path| run("chmod +x #{path}") }
end
end
# install bower as npm module
def install_bower
log("bower") do
topic "Using bower version: #{BOWER_VERSION}"
run("curl #{BOWER_BASE_URL}/bower-#{BOWER_VERSION}/node_modules.tar.gz -s -o - | tar xzf -")
unless $?.success?
error "Can't install bower"
end
end
end
# runs bower to install the dependencies
def build_bower
error_message = <<ERROR
Can't install JavaScript dependencies
Bower 1.0.0 released at 2013-07-23
https://github.com/bower/bower/blob/master/CHANGELOG.md
Check these points:
* Change from component.json to bower.json
* bower.json requires 'name' option
ERROR
log("bower") do
topic("Installing JavaScript dependencies using bower #{BOWER_VERSION}")
load_bower_cache
pipe("./node_modules/bower/bin/bower install --config.storage.packages=vendor/bower/packages --config.storage.registry=vendor/bower/registry --config.tmp=vendor/bower/tmp 2>&1")
if $?.success?
log "bower", :status => "success"
puts "Cleaning up the bower tmp."
FileUtils.rm_rf("vendor/bower/tmp")
cache.store "vendor/bower"
else
error error_message
end
end
end
# RUBYOPT line that requires syck_hack file
# @return [String] require string if needed or else an empty string
def syck_hack
instrument "ruby.syck_hack" do
syck_hack_file = File.expand_path(File.join(File.dirname(__FILE__), "../../vendor/syck_hack"))
ruby_version = run_stdout('ruby -e "puts RUBY_VERSION"').chomp
# < 1.9.3 includes syck, so we need to use the syck hack
if Gem::Version.new(ruby_version) < Gem::Version.new("1.9.3")
"-r#{syck_hack_file}"
else
""
end
end
end
# writes ERB based database.yml for Rails. The database.yml uses the DATABASE_URL from the environment during runtime.
def create_database_yml
instrument 'ruby.create_database_yml' do
log("create_database_yml") do
return unless File.directory?("config")
topic("Writing config/database.yml to read from DATABASE_URL")
File.open("config/database.yml", "w") do |file|
file.puts <<-DATABASE_YML
<%
require 'cgi'
require 'uri'
begin
uri = URI.parse(ENV["DATABASE_URL"])
rescue URI::InvalidURIError
raise "Invalid DATABASE_URL"
end
raise "No RACK_ENV or RAILS_ENV found" unless ENV["RAILS_ENV"] || ENV["RACK_ENV"]
def attribute(name, value, force_string = false)
if value
value_string =
if force_string
'"' + value + '"'
else
value
end
"\#{name}: \#{value_string}"
else
""
end
end
adapter = uri.scheme
adapter = "postgresql" if adapter == "postgres"
database = (uri.path || "").split("/")[1]
username = uri.user
password = uri.password
host = uri.host
port = uri.port
params = CGI.parse(uri.query || "")
%>
<%= ENV["RAILS_ENV"] || ENV["RACK_ENV"] %>:
<%= attribute "adapter", adapter %>
<%= attribute "database", database %>
<%= attribute "username", username %>
<%= attribute "password", password, true %>
<%= attribute "host", host %>
<%= attribute "port", port %>
<% params.each do |key, value| %>
<%= key %>: <%= value.first %>
<% end %>
DATABASE_YML
end
end
end
end
# detects whether the Gemfile.lock contains the Windows platform
# @return [Boolean] true if the Gemfile.lock was created on Windows
def has_windows_gemfile_lock?
bundle.platforms.detect do |platform|
/mingw|mswin/.match(platform.os) if platform.is_a?(Gem::Platform)
end
end
# detects if a gem is in the bundle.
# @param [String] name of the gem in question
# @return [String, nil] if it finds the gem, it will return the line from bundle show or nil if nothing is found.
def gem_is_bundled?(gem)
bundle.specs.map(&:name).include?(gem)
end
# detects if a rake task is defined in the app
# @param [String] the task in question
# @return [Boolean] true if the rake task is defined in the app
def rake_task_defined?(task)
instrument "ruby.rake_task_defined" do
run("env PATH=$PATH bundle exec rake #{task} --dry-run") && $?.success?
end
end
# executes the block with GIT_DIR environment variable removed since it can mess with the current working directory git thinks it's in
# @param [block] block to be executed in the GIT_DIR free context
def allow_git(&blk)
git_dir = ENV.delete("GIT_DIR") # can mess with bundler
blk.call
ENV["GIT_DIR"] = git_dir
end
# decides if we need to enable the dev database addon
# @return [Array] the database addon if the pg gem is detected or an empty Array if it isn't.
def add_dev_database_addon
gem_is_bundled?("pg") ? ['heroku-postgresql:dev'] : []
end
# decides if we need to install the node.js binary
# @note execjs will blow up if no JS RUNTIME is detected and is loaded.
# @return [Array] the node.js binary path if we need it or an empty Array
def add_node_js_binary
gem_is_bundled?('execjs') ? [NODE_JS_BINARY_PATH] : []
end
def run_assets_precompile_rake_task
instrument 'ruby.run_assets_precompile_rake_task' do
if rake_task_defined?("assets:precompile")
require 'benchmark'
topic "Running: rake assets:precompile"
time = Benchmark.realtime { pipe("env PATH=$PATH:bin bundle exec rake assets:precompile 2>&1") }
if $?.success?
puts "Asset precompilation completed (#{"%.2f" % time}s)"
end
end
end
end
def bundler_cache
"vendor/bundle"
end
def load_bower_cache
instrument "ruby.load_bower_cache" do
cache.load "vendor/bower"
end
end
def load_bundler_cache
instrument "ruby.load_bundler_cache" do
cache.load "vendor"
full_ruby_version = run_stdout(%q(ruby -v)).chomp
rubygems_version = run_stdout(%q(gem -v)).chomp
heroku_metadata = "vendor/heroku"
old_rubygems_version = nil
ruby_version_cache = "ruby_version"
buildpack_version_cache = "buildpack_version"
bundler_version_cache = "bundler_version"
rubygems_version_cache = "rubygems_version"
old_rubygems_version = @metadata.read(ruby_version_cache).chomp if @metadata.exists?(ruby_version_cache)
# fix bug from v37 deploy
if File.exists?("vendor/ruby_version")
puts "Broken cache detected. Purging build cache."
cache.clear("vendor")
FileUtils.rm_rf("vendor/ruby_version")
purge_bundler_cache
# fix bug introduced in v38
elsif !@metadata.exists?(buildpack_version_cache) && @metadata.exists?(ruby_version_cache)
puts "Broken cache detected. Purging build cache."
purge_bundler_cache
elsif cache.exists?(bundler_cache) && @metadata.exists?(ruby_version_cache) && full_ruby_version != @metadata.read(ruby_version_cache).chomp
puts "Ruby version change detected. Clearing bundler cache."
puts "Old: #{@metadata.read(ruby_version_cache).chomp}"
puts "New: #{full_ruby_version}"
purge_bundler_cache
end
# fix git gemspec bug from Bundler 1.3.0+ upgrade
if File.exists?(bundler_cache) && !@metadata.exists?(bundler_version_cache) && !run("find vendor/bundle/*/*/bundler/gems/*/ -name *.gemspec").include?("No such file or directory")
puts "Old bundler cache detected. Clearing bundler cache."
purge_bundler_cache
end
# fix for https://github.com/heroku/heroku-buildpack-ruby/issues/86
if (!@metadata.exists?(rubygems_version_cache) ||
(old_rubygems_version == "2.0.0" && old_rubygems_version != rubygems_version)) &&
@metadata.exists?(ruby_version_cache) && @metadata.read(ruby_version_cache).chomp.include?("ruby 2.0.0p0")
puts "Updating to rubygems #{rubygems_version}. Clearing bundler cache."
purge_bundler_cache
end
# fix for https://github.com/sparklemotion/nokogiri/issues/923
if @metadata.exists?(buildpack_version_cache) && @metadata.read(buildpack_version_cache).sub('v', '').to_i <= 76
puts "Fixing nokogiri install. Clearing bundler cache."
puts "See https://github.com/sparklemotion/nokogiri/issues/923."
purge_bundler_cache
end
FileUtils.mkdir_p(heroku_metadata)
@metadata.write(ruby_version_cache, full_ruby_version, false)
@metadata.write(buildpack_version_cache, BUILDPACK_VERSION, false)
@metadata.write(bundler_version_cache, BUNDLER_VERSION, false)
@metadata.write(rubygems_version_cache, rubygems_version, false)
@metadata.save
end
end
def purge_bundler_cache
instrument "ruby.purge_bundler_cache" do
FileUtils.rm_rf(bundler_cache)
cache.clear bundler_cache
# need to reinstall language pack gems
install_language_pack_gems
end
end
end
|
module Fog
module Storage
class AWS
class Real
# Change website configuration for an S3 bucket
#
# ==== Parameters
# * bucket_name<~String> - name of bucket to modify
# * suffix<~String> - suffix to append to requests for the bucket
# * options<~Hash>
# * key<~String> - key to use for 4XX class errors
#
# ==== See Also
# http://docs.amazonwebservices.com/AmazonS3/latest/API/RESTBucketPUTwebsite.html
def put_bucket_website(bucket_name, suffix, options = {})
data =
<<-DATA
<WebsiteConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<IndexDocument>
<Suffix>#{suffix}</Suffix>
</IndexDocument>
DATA
if options[:key]
data <<
<<-DATA
<ErrorDocument>
<Key>#{options[:key]}</Key>
</ErrorDocument>
DATA
end
data << '</WebsiteConfiguration>'
request({
:body => data,
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:method => 'PUT',
:query => {'website' => nil}
})
end
end
class Mock # :nodoc:all
def put_bucket_website(bucket_name, suffix, options = {})
response = Excon::Response.new
if self.data[:buckets][bucket_name]
response.status = 200
else
response.status = 403
raise(Excon::Errors.status_error({:expects => 200}, response))
end
response
end
end
end
end
end
[AWS|Storage] fix for put_bucket_website mock
module Fog
module Storage
class AWS
class Real
# Change website configuration for an S3 bucket
#
# ==== Parameters
# * bucket_name<~String> - name of bucket to modify
# * suffix<~String> - suffix to append to requests for the bucket
# * options<~Hash>
# * key<~String> - key to use for 4XX class errors
#
# ==== See Also
# http://docs.amazonwebservices.com/AmazonS3/latest/API/RESTBucketPUTwebsite.html
def put_bucket_website(bucket_name, suffix, options = {})
data =
<<-DATA
<WebsiteConfiguration xmlns="http://s3.amazonaws.com/doc/2006-03-01/">
<IndexDocument>
<Suffix>#{suffix}</Suffix>
</IndexDocument>
DATA
if options[:key]
data <<
<<-DATA
<ErrorDocument>
<Key>#{options[:key]}</Key>
</ErrorDocument>
DATA
end
data << '</WebsiteConfiguration>'
request({
:body => data,
:expects => 200,
:headers => {},
:host => "#{bucket_name}.#{@host}",
:method => 'PUT',
:query => {'website' => nil}
})
end
end
class Mock # :nodoc:all
def put_bucket_website(bucket_name, suffix, options = {})
response = Excon::Response.new
if self.data[:buckets][bucket_name]
response.status = 200
else
response.status = 404
raise(Excon::Errors.status_error({:expects => 200}, response))
end
response
end
end
end
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'database_fork/version'
Gem::Specification.new do |spec|
spec.name = "database_fork"
spec.version = '0.0.1'
spec.authors = ["the-architect"]
spec.email = ["marcel.scherf@epicteams.com"]
spec.summary = %q{Fork your database}
spec.description = %q{Fork your database}
spec.homepage = "http://github.com/"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake"
end
fix gemspec
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
Gem::Specification.new do |spec|
spec.name = "database_fork"
spec.version = '0.0.1'
spec.authors = ["the-architect"]
spec.email = ["marcel.scherf@epicteams.com"]
spec.summary = %q{Fork your database}
spec.description = %q{Fork your database}
spec.homepage = "http://github.com/"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.6"
spec.add_development_dependency "rake"
end
|
require 'rugged'
module Lanyon::Route::Files
def self.registered(app)
repo = Rugged::Repository.new(app.repo_dir)
fc = Lanyon::FileCollection.new(repo.index)
# CREATE
app.post '/files' do
end
# READ
app.get '/files/*/?:id?', provides: [:html, :json] do
respond_with :editor, file: fc.get(params[:id]) unless params[:id].nil?
path = params[:splat].first
path = path.empty? ? '.' : File.join('.', path)
respond_with :files, files: fc.ls(path)
end
# UPDATE
app.put '/files' do
end
# DELETE
app.delete '/files' do
end
end
end
Making / optional in files route
require 'rugged'
module Lanyon::Route::Files
def self.registered(app)
repo = Rugged::Repository.new(app.repo_dir)
fc = Lanyon::FileCollection.new(repo.index)
# CREATE
app.post '/files/?' do
end
# READ
app.get '/files/*/?:id?', provides: [:html, :json] do
respond_with :editor, file: fc.get(params[:id]) unless params[:id].nil?
path = params[:splat].first
path = path.empty? ? '.' : File.join('.', path)
respond_with :files, files: fc.ls(path)
end
# UPDATE
app.put '/files/?' do
end
# DELETE
app.delete '/files/?' do
end
end
end
|
module Voltron
class Encrypt
module Generators
class InstallGenerator < Rails::Generators::Base
source_root File.expand_path("../../../../../spec/railsapp", __FILE__)
desc "Add Voltron Notify initializer"
def inject_initializer
voltron_initialzer_path = Rails.root.join("config", "initializers", "voltron.rb")
unless File.exist? voltron_initialzer_path
unless system("cd #{Rails.root.to_s} && rails generate voltron:install")
puts "Voltron initializer does not exist. Please ensure you have the 'voltron' gem installed and run `rails g voltron:install` to create it"
return false
end
end
current_initiailzer = File.read voltron_initialzer_path
unless current_initiailzer.match(Regexp.new(/^\s# === Voltron Notify Configuration ===\n/))
inject_into_file(voltron_initialzer_path, after: "Voltron.setup do |config|\n") do
<<-CONTENT
# === Voltron Notify Configuration ===
# Whether or not to use the ActiveJob queue to handle sending email/sms messages
# A queue is still only used if configured via config.active_job.queue_adapter
# config.notify.use_queue = false
# How long to delay sending email/sms messages. Use this in conjunction with config.notify.use_queue
# config.notify.delay = 0.seconds
# Twilio account id number
# config.notify.sms_account_sid = ""
# Twilio authentication token
# config.notify.sms_auth_token = ""
# Default from phone number. Must be the number provided by Twilio.
# Avoid the overhead of pre-formatting the number by entering in the format "+1234567890"
# config.notify.sms_from = ""
# Default from email address. If not specified the default from in the mailer or the :from param on mail() is used
# config.notify.email_from = "no-reply@example.com"
CONTENT
end
end
end
def copy_migrations
copy_file "db/migrate/create_voltron_notifications.rb", Rails.root.join("db", "migrate", "#{Time.now.strftime("%Y%m%d%H%M%S")}_create_voltron_notifications.rb")
copy_file "db/migrate/create_voltron_notification_sms_notifications.rb", Rails.root.join("db", "migrate", "#{Time.now.strftime("%Y%m%d%H%M%S")}_create_voltron_notification_sms_notifications.rb")
copy_file "db/migrate/create_voltron_notification_email_notifications.rb", Rails.root.join("db", "migrate", "#{Time.now.strftime("%Y%m%d%H%M%S")}_create_voltron_notification_email_notifications.rb")
copy_file "db/migrate/create_voltron_notification_sms_notification_attachments.rb", Rails.root.join("db", "migrate", "#{Time.now.strftime("%Y%m%d%H%M%S")}_create_voltron_notification_sms_notification_attachments.rb")
end
end
end
end
end
Fixes typo in generator
module Voltron
class Notify
module Generators
class InstallGenerator < Rails::Generators::Base
source_root File.expand_path("../../../../../spec/railsapp", __FILE__)
desc "Add Voltron Notify initializer"
def inject_initializer
voltron_initialzer_path = Rails.root.join("config", "initializers", "voltron.rb")
unless File.exist? voltron_initialzer_path
unless system("cd #{Rails.root.to_s} && rails generate voltron:install")
puts "Voltron initializer does not exist. Please ensure you have the 'voltron' gem installed and run `rails g voltron:install` to create it"
return false
end
end
current_initiailzer = File.read voltron_initialzer_path
unless current_initiailzer.match(Regexp.new(/^\s# === Voltron Notify Configuration ===\n/))
inject_into_file(voltron_initialzer_path, after: "Voltron.setup do |config|\n") do
<<-CONTENT
# === Voltron Notify Configuration ===
# Whether or not to use the ActiveJob queue to handle sending email/sms messages
# A queue is still only used if configured via config.active_job.queue_adapter
# config.notify.use_queue = false
# How long to delay sending email/sms messages. Use this in conjunction with config.notify.use_queue
# config.notify.delay = 0.seconds
# Twilio account id number
# config.notify.sms_account_sid = ""
# Twilio authentication token
# config.notify.sms_auth_token = ""
# Default from phone number. Must be the number provided by Twilio.
# Avoid the overhead of pre-formatting the number by entering in the format "+1234567890"
# config.notify.sms_from = ""
# Default from email address. If not specified the default from in the mailer or the :from param on mail() is used
# config.notify.email_from = "no-reply@example.com"
CONTENT
end
end
end
def copy_migrations
copy_file "db/migrate/create_voltron_notifications.rb", Rails.root.join("db", "migrate", "#{Time.now.strftime("%Y%m%d%H%M%S")}_create_voltron_notifications.rb")
copy_file "db/migrate/create_voltron_notification_sms_notifications.rb", Rails.root.join("db", "migrate", "#{Time.now.strftime("%Y%m%d%H%M%S")}_create_voltron_notification_sms_notifications.rb")
copy_file "db/migrate/create_voltron_notification_email_notifications.rb", Rails.root.join("db", "migrate", "#{Time.now.strftime("%Y%m%d%H%M%S")}_create_voltron_notification_email_notifications.rb")
copy_file "db/migrate/create_voltron_notification_sms_notification_attachments.rb", Rails.root.join("db", "migrate", "#{Time.now.strftime("%Y%m%d%H%M%S")}_create_voltron_notification_sms_notification_attachments.rb")
end
end
end
end
end |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.