CombinedText stringlengths 4 3.42M |
|---|
class PBCoreImporter
attr_accessor :file, :collection
def initialize(options={})
PBCore.config[:date_formats] = ['%m/%d/%Y', '%Y-%m-%d']
raise "File missing or 0 length: #{options[:file]}" unless (File.size?(options[:file]).to_i > 0)
self.collection = Collection.find(options[:collection_id])
self.file = File.open(options[:file])
end
def import_omeka_description_document
doc = PBCore::V2::DescriptionDocument.parse(file)
item_for_omeka_doc(doc).save!
end
def import_omeka_collection
pbc_collection = PBCore::V2::Collection.parse(file)
pbc_collection.description_documents.each do |doc|
sleep(2)
item_for_omeka_doc(doc).save!
end
end
def is_audio_file?(url)
puts "is_audio_file? url:#{url}"
uri = URI.parse(url)
ext = (File.extname(uri.path)[1..-1] || "").downcase
['mp3', 'wav', 'mp2', 'aac'].include?(ext)
end
def item_for_omeka_doc(doc)
item = Item.new
item.collection = collection
item.date_created = doc.detect_element(:asset_dates, match_value: ['created', nil], value: :date)
item.identifier = doc.detect_element(:identifiers)
item.episode_title = doc.detect_element(:titles, match_value: 'episode', default_first: false)
item.series_title = doc.detect_element(:titles, match_value: 'series', default_first: false)
item.title = doc.detect_element(:titles)
item.tags = doc.subjects.collect{|s| s.value}.compact
item.description = doc.detect_element(:descriptions)
item.physical_location = doc.detect_element(:coverages, match_value: 'spatial', value: :info, default_first: false).try(:value)
item.creators = doc.creators.collect{|c| Person.for_name(c.name.value)}
item.contributions = doc.contributors.collect{|c| Contribution.new(person:Person.for_name(c.name.value), role:c.role.value)}
item.rights = doc.rights.collect{|r| [r.summary.try(:value), r.link.try(:value), r.embedded.try(:value)].compact.join("\n") }.compact.join("\n")
item.notes = doc.detect_element(:annotations, match_value: 'notes', default_first: false)
item.transcription = doc.detect_element(:annotations, match_value: 'transcript', default_first: false)
# process each instance
doc.instantiations.each do |pbcInstance|
next if pbcInstance.physical
instance = item.instances.build
instance.digital = true
instance.format = pbcInstance.try(:digital).try(:value)
instance.identifier = pbcInstance.detect_element(:identifiers)
instance.location = pbcInstance.location
if pbcInstance.parts.blank?
url = pbcInstance.detect_element(:identifiers, match_attr: :source, match_value: ['URL', nil])
next unless is_audio_file?(url)
audio = AudioFile.new
instance.audio_files << audio
item.audio_files << audio
audio.identifier = url
audio.remote_file_url = url
audio.format = instance.format
audio.size = pbcInstance.file_size.try(:value).to_i
else
pbcInstance.parts.each do |pbcPart|
url = pbcPart.detect_element(:identifiers, match_attr: :source, match_value: ['URL', nil])
next unless is_audio_file?(url)
audio = AudioFile.new
instance.audio_files << audio
item.audio_files << audio
audio.identifier = url
audio.remote_file_url = url
audio.format = pbcPart.try(:digital).try(:value) || instance.format
audio.size = pbcPart.file_size.try(:value).to_i
end
end
item.instances << instance
end
item
end
end
add file extensions
class PBCoreImporter
attr_accessor :file, :collection
def initialize(options={})
PBCore.config[:date_formats] = ['%m/%d/%Y', '%Y-%m-%d']
raise "File missing or 0 length: #{options[:file]}" unless (File.size?(options[:file]).to_i > 0)
self.collection = Collection.find(options[:collection_id])
self.file = File.open(options[:file])
end
def import_omeka_description_document
doc = PBCore::V2::DescriptionDocument.parse(file)
item_for_omeka_doc(doc).save!
end
def import_omeka_collection
pbc_collection = PBCore::V2::Collection.parse(file)
pbc_collection.description_documents.each do |doc|
sleep(2)
item_for_omeka_doc(doc).save!
end
end
def is_audio_file?(url)
puts "is_audio_file? url:#{url}"
uri = URI.parse(url)
ext = (File.extname(uri.path)[1..-1] || "").downcase
['aac', 'aif', 'aiff', 'alac', 'flac', 'm4a', 'm4p', 'mp2', 'mp3', 'mp4', 'ogg', 'raw', 'spx', 'wav', 'wma'].include?(ext)
end
def item_for_omeka_doc(doc)
item = Item.new
item.collection = collection
item.date_created = doc.detect_element(:asset_dates, match_value: ['created', nil], value: :date)
item.identifier = doc.detect_element(:identifiers)
item.episode_title = doc.detect_element(:titles, match_value: 'episode', default_first: false)
item.series_title = doc.detect_element(:titles, match_value: 'series', default_first: false)
item.title = doc.detect_element(:titles)
item.tags = doc.subjects.collect{|s| s.value}.compact
item.description = doc.detect_element(:descriptions)
item.physical_location = doc.detect_element(:coverages, match_value: 'spatial', value: :info, default_first: false).try(:value)
item.creators = doc.creators.collect{|c| Person.for_name(c.name.value)}
item.contributions = doc.contributors.collect{|c| Contribution.new(person:Person.for_name(c.name.value), role:c.role.value)}
item.rights = doc.rights.collect{|r| [r.summary.try(:value), r.link.try(:value), r.embedded.try(:value)].compact.join("\n") }.compact.join("\n")
item.notes = doc.detect_element(:annotations, match_value: 'notes', default_first: false)
item.transcription = doc.detect_element(:annotations, match_value: 'transcript', default_first: false)
# process each instance
doc.instantiations.each do |pbcInstance|
next if pbcInstance.physical
instance = item.instances.build
instance.digital = true
instance.format = pbcInstance.try(:digital).try(:value)
instance.identifier = pbcInstance.detect_element(:identifiers)
instance.location = pbcInstance.location
if pbcInstance.parts.blank?
url = pbcInstance.detect_element(:identifiers, match_attr: :source, match_value: ['URL', nil])
next unless is_audio_file?(url)
audio = AudioFile.new
instance.audio_files << audio
item.audio_files << audio
audio.identifier = url
audio.remote_file_url = url
audio.format = instance.format
audio.size = pbcInstance.file_size.try(:value).to_i
else
pbcInstance.parts.each do |pbcPart|
url = pbcPart.detect_element(:identifiers, match_attr: :source, match_value: ['URL', nil])
next unless is_audio_file?(url)
audio = AudioFile.new
instance.audio_files << audio
item.audio_files << audio
audio.identifier = url
audio.remote_file_url = url
audio.format = pbcPart.try(:digital).try(:value) || instance.format
audio.size = pbcPart.file_size.try(:value).to_i
end
end
item.instances << instance
end
item
end
end
|
#
# Author:: Christopher Walters (<cw@opscode.com>)
# Author:: Mark Anderson (<mark@opscode.com>)
# Copyright:: Copyright (c) 2010-2011 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'securerandom'
require 'dep_selector/exceptions'
begin
require "dep_gecode"
rescue LoadError
path = File.expand_path("../../../ext/dep_gecode", __FILE__)
$:.unshift(path)
require "dep_gecode"
end
module DepSelector
class GecodeWrapper
attr_reader :gecode_problem
attr_reader :debug_logs_on
DontCareConstraint = -1
NoMatchConstraint = -2
DumpStatistics = true
# This insures that we properly deallocate the c++ class at the heart of dep_gecode.
# modeled after http://www.mikeperham.com/2010/02/24/the-trouble-with-ruby-finalizers/
def initialize(problem_or_package_count, debug=false)
if (problem_or_package_count.is_a?(Numeric))
logId = SecureRandom.uuid
@debug_logs_on = debug
@gecode_problem = Dep_gecode.VersionProblemCreate(problem_or_package_count, DumpStatistics, debug, logId)
else
@gecode_problem = problem_or_package_count
end
ObjectSpace.define_finalizer(self, self.class.finalize(@gecode_problem))
end
def self.finalize(gecode_problem)
proc { Dep_gecode.VersionProblemDestroy(gecode_problem) }
end
def check_package_id(package_id, param_name)
raise "Gecode #{param_name} is out of range #{package_id}" unless (package_id >= 0 && package_id < self.size())
end
def size()
raise "Gecode internal failure" if gecode_problem.nil?
Dep_gecode.VersionProblemSize(gecode_problem)
end
def package_count()
raise "Gecode internal failure" if gecode_problem.nil?
Dep_gecode.VersionProblemPackageCount(gecode_problem)
end
def add_package(min, max, current_version)
raise "Gecode internal failure" if gecode_problem.nil?
Dep_gecode.AddPackage(gecode_problem, min, max, current_version)
end
def add_version_constraint(package_id, version, dependent_package_id, min_dependent_version, max_dependent_version)
raise "Gecode internal failure" if gecode_problem.nil?
check_package_id(package_id, "package_id")
check_package_id(dependent_package_id, "dependent_package_id")
# Valid package versions are between -1 and its max (-1 means
# don't care, meaning it doesn't need to be assigned). To
# indicate constraints that match no versions, -2 is used, since
# it's not a valid assignment of the variable; thus, any branch
# that assigns -2 will fail.
#
# This mechanism is also used when a dependent package has no
# versions, which only happens if the dependency's package is
# auto-vivified when creating the parent PackageVersion's
# dependency but with no corresponding set of PackageVersions
# (i.e. it's an invalid deendency, because it does not exist in
# the dependency graph). Again, we won't abort immediately, but
# we'll add a constraint to the package that makes exploring
# that portion of the solution space unsatisfiable. Thus it is
# impossible to find solutions dependent on non-existent
# packages.
min = min_dependent_version || NoMatchConstraint
max = max_dependent_version || NoMatchConstraint
Dep_gecode.AddVersionConstraint(gecode_problem, package_id, version, dependent_package_id, min, max)
# if the package was constrained to no versions, hint to the
# solver that in the event of failure, it should prefer to
# indicate constraints on dependent_package_id as the culprit
if min == NoMatchConstraint && max == NoMatchConstraint
Dep_gecode.MarkPackageSuspicious(gecode_problem, dependent_package_id)
end
end
def get_package_version(package_id)
raise "Gecode internal failure" if gecode_problem.nil?
check_package_id(package_id, "package_id")
Dep_gecode.GetPackageVersion(gecode_problem, package_id)
end
def is_package_disabled?(package_id)
raise "Gecode internal failure" if gecode_problem.nil?
check_package_id(package_id, "package_id")
Dep_gecode.GetPackageDisabledState(gecode_problem, package_id);
end
def get_package_max(package_id)
raise "Gecode internal failure" if gecode_problem.nil?
check_package_id(package_id, "package_id")
Dep_gecode.GetPackageMax(gecode_problem, package_id)
end
def get_package_min(package_id)
raise "Gecode internal failure" if gecode_problem.nil?
check_package_id(package_id, "package_id")
Dep_gecode.GetPackageMin(gecode_problem, package_id)
end
def dump()
raise "Gecode internal failure" if gecode_problem.nil?
Dep_gecode.VersionProblemDump(gecode_problem)
end
def dump_package_var(package_id)
raise "Gecode internal failure" if gecode_problem.nil?
check_package_id(package_id, "package_id")
Dep_gecode.VersionProblemPrintPackageVar(gecode_problem, package_id)
end
def package_disabled_count
raise "Gecode internal failure (package disabled count)" if gecode_problem.nil?
Dep_gecode.GetDisabledVariableCount(gecode_problem)
end
def mark_required(package_id)
raise "Gecode internal failure (mark_required)" if gecode_problem.nil?
check_package_id(package_id, "package_id")
Dep_gecode.MarkPackageRequired(gecode_problem, package_id);
end
def mark_preferred_to_be_at_latest(package_id, weight)
raise "Gecode internal failure (mark_preferred_to_be_at_latest)" if gecode_problem.nil?
check_package_id(package_id, "package_id")
Dep_gecode.MarkPackagePreferredToBeAtLatest(gecode_problem, package_id, weight);
end
def solve()
raise "Gecode internal failure (solve)" if gecode_problem.nil?
solution = GecodeWrapper.new(Dep_gecode.Solve(gecode_problem), debug_logs_on)
raise "Gecode internal failure (no solution found)" if (solution.nil?)
raise Exceptions::NoSolutionFound.new(solution) if solution.package_disabled_count > 0
solution
end
end
end
disable stats logging for now
#
# Author:: Christopher Walters (<cw@opscode.com>)
# Author:: Mark Anderson (<mark@opscode.com>)
# Copyright:: Copyright (c) 2010-2011 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'securerandom'
require 'dep_selector/exceptions'
begin
require "dep_gecode"
rescue LoadError
path = File.expand_path("../../../ext/dep_gecode", __FILE__)
$:.unshift(path)
require "dep_gecode"
end
module DepSelector
class GecodeWrapper
attr_reader :gecode_problem
attr_reader :debug_logs_on
DontCareConstraint = -1
NoMatchConstraint = -2
# This should be configurable...
DumpStatistics = false
# This insures that we properly deallocate the c++ class at the heart of dep_gecode.
# modeled after http://www.mikeperham.com/2010/02/24/the-trouble-with-ruby-finalizers/
def initialize(problem_or_package_count, debug=false)
if (problem_or_package_count.is_a?(Numeric))
logId = SecureRandom.uuid
@debug_logs_on = debug
@gecode_problem = Dep_gecode.VersionProblemCreate(problem_or_package_count, DumpStatistics, debug, logId)
else
@gecode_problem = problem_or_package_count
end
ObjectSpace.define_finalizer(self, self.class.finalize(@gecode_problem))
end
def self.finalize(gecode_problem)
proc { Dep_gecode.VersionProblemDestroy(gecode_problem) }
end
def check_package_id(package_id, param_name)
raise "Gecode #{param_name} is out of range #{package_id}" unless (package_id >= 0 && package_id < self.size())
end
def size()
raise "Gecode internal failure" if gecode_problem.nil?
Dep_gecode.VersionProblemSize(gecode_problem)
end
def package_count()
raise "Gecode internal failure" if gecode_problem.nil?
Dep_gecode.VersionProblemPackageCount(gecode_problem)
end
def add_package(min, max, current_version)
raise "Gecode internal failure" if gecode_problem.nil?
Dep_gecode.AddPackage(gecode_problem, min, max, current_version)
end
def add_version_constraint(package_id, version, dependent_package_id, min_dependent_version, max_dependent_version)
raise "Gecode internal failure" if gecode_problem.nil?
check_package_id(package_id, "package_id")
check_package_id(dependent_package_id, "dependent_package_id")
# Valid package versions are between -1 and its max (-1 means
# don't care, meaning it doesn't need to be assigned). To
# indicate constraints that match no versions, -2 is used, since
# it's not a valid assignment of the variable; thus, any branch
# that assigns -2 will fail.
#
# This mechanism is also used when a dependent package has no
# versions, which only happens if the dependency's package is
# auto-vivified when creating the parent PackageVersion's
# dependency but with no corresponding set of PackageVersions
# (i.e. it's an invalid deendency, because it does not exist in
# the dependency graph). Again, we won't abort immediately, but
# we'll add a constraint to the package that makes exploring
# that portion of the solution space unsatisfiable. Thus it is
# impossible to find solutions dependent on non-existent
# packages.
min = min_dependent_version || NoMatchConstraint
max = max_dependent_version || NoMatchConstraint
Dep_gecode.AddVersionConstraint(gecode_problem, package_id, version, dependent_package_id, min, max)
# if the package was constrained to no versions, hint to the
# solver that in the event of failure, it should prefer to
# indicate constraints on dependent_package_id as the culprit
if min == NoMatchConstraint && max == NoMatchConstraint
Dep_gecode.MarkPackageSuspicious(gecode_problem, dependent_package_id)
end
end
def get_package_version(package_id)
raise "Gecode internal failure" if gecode_problem.nil?
check_package_id(package_id, "package_id")
Dep_gecode.GetPackageVersion(gecode_problem, package_id)
end
def is_package_disabled?(package_id)
raise "Gecode internal failure" if gecode_problem.nil?
check_package_id(package_id, "package_id")
Dep_gecode.GetPackageDisabledState(gecode_problem, package_id);
end
def get_package_max(package_id)
raise "Gecode internal failure" if gecode_problem.nil?
check_package_id(package_id, "package_id")
Dep_gecode.GetPackageMax(gecode_problem, package_id)
end
def get_package_min(package_id)
raise "Gecode internal failure" if gecode_problem.nil?
check_package_id(package_id, "package_id")
Dep_gecode.GetPackageMin(gecode_problem, package_id)
end
def dump()
raise "Gecode internal failure" if gecode_problem.nil?
Dep_gecode.VersionProblemDump(gecode_problem)
end
def dump_package_var(package_id)
raise "Gecode internal failure" if gecode_problem.nil?
check_package_id(package_id, "package_id")
Dep_gecode.VersionProblemPrintPackageVar(gecode_problem, package_id)
end
def package_disabled_count
raise "Gecode internal failure (package disabled count)" if gecode_problem.nil?
Dep_gecode.GetDisabledVariableCount(gecode_problem)
end
def mark_required(package_id)
raise "Gecode internal failure (mark_required)" if gecode_problem.nil?
check_package_id(package_id, "package_id")
Dep_gecode.MarkPackageRequired(gecode_problem, package_id);
end
def mark_preferred_to_be_at_latest(package_id, weight)
raise "Gecode internal failure (mark_preferred_to_be_at_latest)" if gecode_problem.nil?
check_package_id(package_id, "package_id")
Dep_gecode.MarkPackagePreferredToBeAtLatest(gecode_problem, package_id, weight);
end
def solve()
raise "Gecode internal failure (solve)" if gecode_problem.nil?
solution = GecodeWrapper.new(Dep_gecode.Solve(gecode_problem), debug_logs_on)
raise "Gecode internal failure (no solution found)" if (solution.nil?)
raise Exceptions::NoSolutionFound.new(solution) if solution.package_disabled_count > 0
solution
end
end
end
|
# Copyright © 2010 Brighter Planet.
# See LICENSE for details.
# Contact Brighter Planet for dual-license arrangements.
require 'weighted_average'
module BrighterPlanet
module Pet
module CarbonModel
def self.included(base)
base.decide :emission, :with => :characteristics do
committee :emission do
quorum 'from diet size', :needs => [:diet_size, :diet_emission_intensity, :active_subtimeframe] do |characteristics, timeframe|
characteristics[:active_subtimeframe].days * characteristics[:diet_size] * characteristics[:diet_emission_intensity] + ANNUAL_VETERINARY_EMISSION * (characteristics[:active_subtimeframe] / timeframe.year)
end
end
committee :diet_emission_intensity do # kg/joule
quorum 'from species', :needs => :species do |characteristics|
characteristics[:species].diet_emission_intensity
end
quorum 'default' do
base.fallback.diet_emission_intensity
end
end
committee :diet_size do # joules
quorum 'from weight', :needs => [:weight, :marginal_dietary_requirement, :fixed_dietary_requirement] do |characteristics|
characteristics[:weight] * characteristics[:marginal_dietary_requirement] + characteristics[:fixed_dietary_requirement]
end
end
committee :marginal_dietary_requirement do # joules/kg
quorum 'from species', :needs => :species do |characteristics|
characteristics[:species].marginal_dietary_requirement
end
quorum 'default' do
base.fallback.marginal_dietary_requirement
end
end
committee :fixed_dietary_requirement do # joules
quorum 'from species', :needs => :species do |characteristics|
characteristics[:species].fixed_dietary_requirement
end
quorum 'default' do
base.fallback.fixed_dietary_requirement
end
end
committee :weight do # kg
quorum 'from breed and gender', :needs => [:breed, :gender] do |characteristics|
if breed_gender = BreedGender.find_by_breed_name_and_gender_name(characteristics[:breed], characteristics[:gender])
breed_gender.weight
end
end
quorum 'from breed', :needs => :breed do |characteristics|
characteristics[:breed].weight
end
quorum 'from species', :needs => :species do |characteristics|
characteristics[:species].weight
end
quorum 'default' do
base.fallback.weight
end
end
committee :active_subtimeframe do
quorum 'from acquisition and retirement', :needs => [:acquisition, :retirement] do |characteristics, timeframe|
Timeframe.constrained_new characteristics[:acquisition].to_date, characteristics[:retirement].to_date, timeframe
end
end
committee :acquisition do
quorum 'from retirement', :appreciates => :retirement do |characteristics, timeframe|
[ timeframe.from, characteristics[:retirement] ].compact.min
end
end
committee :retirement do
quorum 'from acquisition', :appreciates => :acquisition do |characteristics, timeframe|
[ timeframe.to, characteristics[:acquisition] ].compact.max
end
end
end
end
ANNUAL_VETERINARY_EMISSION = 0.00437.tons.to :kilograms # kg CO2e per https://brighterplanet.sifterapp.com/projects/30/issues/846/comments
end
end
end
A change to the gem dependencies made BreedGender lookup malfunction
# Copyright © 2010 Brighter Planet.
# See LICENSE for details.
# Contact Brighter Planet for dual-license arrangements.
require 'weighted_average'
module BrighterPlanet
module Pet
module CarbonModel
def self.included(base)
base.decide :emission, :with => :characteristics do
committee :emission do
quorum 'from diet size', :needs => [:diet_size, :diet_emission_intensity, :active_subtimeframe] do |characteristics, timeframe|
characteristics[:active_subtimeframe].days * characteristics[:diet_size] * characteristics[:diet_emission_intensity] + ANNUAL_VETERINARY_EMISSION * (characteristics[:active_subtimeframe] / timeframe.year)
end
end
committee :diet_emission_intensity do # kg/joule
quorum 'from species', :needs => :species do |characteristics|
characteristics[:species].diet_emission_intensity
end
quorum 'default' do
base.fallback.diet_emission_intensity
end
end
committee :diet_size do # joules
quorum 'from weight', :needs => [:weight, :marginal_dietary_requirement, :fixed_dietary_requirement] do |characteristics|
characteristics[:weight] * characteristics[:marginal_dietary_requirement] + characteristics[:fixed_dietary_requirement]
end
end
committee :marginal_dietary_requirement do # joules/kg
quorum 'from species', :needs => :species do |characteristics|
characteristics[:species].marginal_dietary_requirement
end
quorum 'default' do
base.fallback.marginal_dietary_requirement
end
end
committee :fixed_dietary_requirement do # joules
quorum 'from species', :needs => :species do |characteristics|
characteristics[:species].fixed_dietary_requirement
end
quorum 'default' do
base.fallback.fixed_dietary_requirement
end
end
committee :weight do # kg
quorum 'from breed and gender', :needs => [:breed, :gender] do |characteristics|
if breed_gender = BreedGender.find_by_breed_name_and_gender_name(characteristics[:breed].name, characteristics[:gender].name)
breed_gender.weight
end
end
quorum 'from breed', :needs => :breed do |characteristics|
characteristics[:breed].weight
end
quorum 'from species', :needs => :species do |characteristics|
characteristics[:species].weight
end
quorum 'default' do
base.fallback.weight
end
end
committee :active_subtimeframe do
quorum 'from acquisition and retirement', :needs => [:acquisition, :retirement] do |characteristics, timeframe|
Timeframe.constrained_new characteristics[:acquisition].to_date, characteristics[:retirement].to_date, timeframe
end
end
committee :acquisition do
quorum 'from retirement', :appreciates => :retirement do |characteristics, timeframe|
[ timeframe.from, characteristics[:retirement] ].compact.min
end
end
committee :retirement do
quorum 'from acquisition', :appreciates => :acquisition do |characteristics, timeframe|
[ timeframe.to, characteristics[:acquisition] ].compact.max
end
end
end
end
ANNUAL_VETERINARY_EMISSION = 0.00437.tons.to :kilograms # kg CO2e per https://brighterplanet.sifterapp.com/projects/30/issues/846/comments
end
end
end
|
# frozen_string_literal: true
module PhModel
VERSION = '1.0.2'.freeze
end
1.1.0
# frozen_string_literal: true
module PhModel
VERSION = '1.1.0'.freeze
end
|
#private String postKey;
#private Message error;
class CreateResponse
attr_accessor :postKey, :error
end
Add the method from_json
#private String postKey;
#private Message error;
class CreateResponse
attr_accessor :postKey, :error
def self.from_json(json)
results = self.new
results.postKey = json["postKey"]
results.error = json["error"]
results
end
end
|
require "elasticsearch_wrapper"
require "logger"
require "json"
class ElasticsearchAdminWrapper
def initialize(settings, schema, logger = nil)
@client = ElasticsearchWrapper::Client.new(settings, logger)
@schema = schema
@logger = logger || Logger.new("/dev/null")
end
def index_exists?
server_status = JSON.parse(@client.get("/_status"))
server_status["indices"].keys.include? @client.index_name
end
def ensure_index
# Create the elasticsearch index if it does not exist
# If it does exist, close the index and apply the updated analysis settings
index_payload = @schema["index"]
if index_exists?
@logger.info "Index already exists: updating settings"
@logger.debug @client.post("_close", nil)
@logger.debug @client.put("_settings", index_payload["settings"].to_json)
@logger.debug @client.post("_open", nil)
wait_until_ready
@logger.info "Settings updated"
return :updated
else
@client.put("", index_payload.to_json)
@logger.info "Index created"
return :created
end
end
def ensure_index!
delete_index
ensure_index
end
def delete_index
begin
@logger.info "Deleting index"
@client.delete ""
return :deleted
rescue RestClient::ResourceNotFound
@logger.info "Index didn't exist"
return :absent
end
end
def put_mappings
# Create or update the mappings in the current index
@schema["mapping"].each do |mapping_type, mapping|
@logger.info "Setting mapping for the '#{mapping_type}' type"
@logger.debug({mapping_type => mapping}.to_json)
begin
@client.put(
"#{mapping_type}/_mapping",
{mapping_type => mapping}.to_json
)
rescue RestClient::Exception => e
@logger.info e.http_body
raise
end
end
end
private
def wait_until_ready(timeout=10)
# Wait until the cluster is back up and running: useful when updating and
# reopening an index.
# The timeout of ten seconds is a semi-arbitrary figure, but the cluster
# usually reinitialises within a second, so if it gets as far as ten then
# something's most likely gone wrong.
# A note on states: "red" means the cluster isn't back up; "yellow" means
# the primary is back, but not all the replicas; "green" means everything is
# back up. So long as the primary is back up, we should be fine. See
# <http://www.elasticsearch.org/guide/reference/api/admin-cluster-health.html>
health_params = { wait_for_status: "yellow", timeout: "#{timeout}s" }
response = @client.get "/_cluster/health", params: health_params
health = JSON.parse(response)
if health["timed_out"] || ! ["green", "yellow"].include?(health["status"])
@logger.error "Failed to restore search. Response: #{response}"
raise RuntimeError, "Failed to restore search"
end
end
end
Horrific hack to fix some indices not restoring.
In Jenkins and on development machines with ~20 or more shards, the
`test_should_return_symbol_if_index_exists` test fails (intermittently
on Jenkins; consistently on the development machines) when waiting for
the index to reopen, as some shards fail to restore and are left
unassigned so the cluster remains in a "red" state.
Pausing briefly before closing the index fixes this, but I don't have a
clear idea why.
require "elasticsearch_wrapper"
require "logger"
require "json"
class ElasticsearchAdminWrapper
def initialize(settings, schema, logger = nil)
@client = ElasticsearchWrapper::Client.new(settings, logger)
@schema = schema
@logger = logger || Logger.new("/dev/null")
end
def index_exists?
server_status = JSON.parse(@client.get("/_status"))
server_status["indices"].keys.include? @client.index_name
end
def ensure_index
# Create the elasticsearch index if it does not exist
# If it does exist, close the index and apply the updated analysis settings
index_payload = @schema["index"]
if index_exists?
@logger.info "Index already exists: updating settings"
# For no readily discernible reason, a short delay here prevents the
# tests (and potentially Rake tasks) causing some shards to fail when
# reopening the index in some configurations (occasionally in Jenkins;
# reliably on dev machines configured to use 20 shards per index).
# Neither a `flush` nor a `refresh` gets around this problem, otherwise I
# would much prefer those. I would love to find a more sensible way to
# achieve this.
sleep 1
@logger.debug @client.post("_close", nil)
@logger.debug @client.put("_settings", index_payload["settings"].to_json)
@logger.debug @client.post("_open", nil)
wait_until_ready
@logger.info "Settings updated"
return :updated
else
@client.put("", index_payload.to_json)
@logger.info "Index created"
return :created
end
end
def ensure_index!
delete_index
ensure_index
end
def delete_index
begin
@logger.info "Deleting index"
@client.delete ""
return :deleted
rescue RestClient::ResourceNotFound
@logger.info "Index didn't exist"
return :absent
end
end
def put_mappings
# Create or update the mappings in the current index
@schema["mapping"].each do |mapping_type, mapping|
@logger.info "Setting mapping for the '#{mapping_type}' type"
@logger.debug({mapping_type => mapping}.to_json)
begin
@client.put(
"#{mapping_type}/_mapping",
{mapping_type => mapping}.to_json
)
rescue RestClient::Exception => e
@logger.info e.http_body
raise
end
end
end
private
def wait_until_ready(timeout=10)
# Wait until the cluster is back up and running: useful when updating and
# reopening an index.
# The timeout of ten seconds is a semi-arbitrary figure, but the cluster
# usually reinitialises within a second, so if it gets as far as ten then
# something's most likely gone wrong.
# A note on states: "red" means the cluster isn't back up; "yellow" means
# the primary is back, but not all the replicas; "green" means everything is
# back up. So long as the primary is back up, we should be fine. See
# <http://www.elasticsearch.org/guide/reference/api/admin-cluster-health.html>
health_params = { wait_for_status: "yellow", timeout: "#{timeout}s" }
response = @client.get "/_cluster/health", params: health_params
health = JSON.parse(response)
if health["timed_out"] || ! ["green", "yellow"].include?(health["status"])
@logger.error "Failed to restore search. Response: #{response}"
raise RuntimeError, "Failed to restore search"
end
end
end
|
require 'epub/ocf/physical_container/zipruby'
require 'epub/ocf/physical_container/file'
module EPUB
class OCF
class PhysicalContainer
@adapter = Zipruby
class << self
def adapter
if self == PhysicalContainer
@adapter
else
raise NoMethodError.new("undefined method `#{__method__}' for #{self}")
end
end
def adapter=(adapter)
if self == PhysicalContainer
@adapter = adapter.instance_of?(Class) ? adapter : const_get(adapter)
else
raise NoMethodError.new("undefined method `#{__method__}' for #{self}")
end
end
def open(container_path)
_adapter.new(container_path).open do |container|
yield container
end
end
def read(container_path, path_name)
open(container_path) {|container|
container.read(path_name)
}
end
private
def _adapter
(self == PhysicalContainer) ? @adapter : self
end
end
def initialize(container_path)
@container_path = container_path
end
end
end
end
Remove unneccessary blank line
require 'epub/ocf/physical_container/zipruby'
require 'epub/ocf/physical_container/file'
module EPUB
class OCF
class PhysicalContainer
@adapter = Zipruby
class << self
def adapter
if self == PhysicalContainer
@adapter
else
raise NoMethodError.new("undefined method `#{__method__}' for #{self}")
end
end
def adapter=(adapter)
if self == PhysicalContainer
@adapter = adapter.instance_of?(Class) ? adapter : const_get(adapter)
else
raise NoMethodError.new("undefined method `#{__method__}' for #{self}")
end
end
def open(container_path)
_adapter.new(container_path).open do |container|
yield container
end
end
def read(container_path, path_name)
open(container_path) {|container|
container.read(path_name)
}
end
private
def _adapter
(self == PhysicalContainer) ? @adapter : self
end
end
def initialize(container_path)
@container_path = container_path
end
end
end
end
|
require 'erb'
module ESC_POS
module Specifications
class Base
def self.specifications
@@specifications ||= {}
end
def self.set(name, value)
specifications[name] = value
end
def initialize
raise NoMethodError, 'This is just a base class, use it to create custom specifications.'
end
def text(txt, options = {})
font = options.fetch(:font, :font_b)
color = options.fetch(:color, :color_black)
formatted_text = ''
formatted_text << set_font(font)
formatted_text << set_alignment(options[:align_type]) if options[:align_type]
formatted_text << set_color(color)
if txt
formatted_text << re_encode_text(txt)
end
formatted_text
end
def reset_printer
"#{get_value(:esc_code)}#{get_value(:reset_printer_code)}"
end
def feed_lines(lines)
return '' if lines == 1
"#{get_value(:esc_code)}\x64" << (lines - 1).chr
end
def render(options = {})
template_filename = options.fetch(:template, self.class.to_s.underscore)
template = File.read(File.join(Settings.templates_path, "#{template_filename}.esc_pos.erb"))
erb = ERB.new(template, 0, '%<>-')
erb.result(binding)
end
def set_font(font)
"#{get_value(:esc_code)}#{get_value(:font_selector_code)}#{get_value(font)}"
end
def set_alignment(alignment)
"#{get_value(:esc_code)}#{get_value(:alignment_selector_code)}#{get_value(alignment)}"
end
def set_color(color)
"#{get_value(:esc_code)}#{get_value(:color_selector_code)}#{get_value(color)}"
end
def set_international_character_set(character_set)
"#{get_value(:esc_code)}#{get_value(:international_character_selector_code)}#{get_value(character_set).chr}"
end
def set_character_code_table(character_code)
"#{get_value(:esc_code)}#{get_value(:character_table_selector_code)}#{get_value(character_code).chr}"
end
def split_line(char = '-')
text(char * get_value(:width), :font => :font_b)
end
def go_to_cut
feed_lines(get_value(:lines_to_cut_line))
end
def go_to_cut_and_cut(spaces_after = 0)
"#{get_value(:gs_code)}V#{65.chr}#{spaces_after}\r"
end
def cut_paper(partial_cut = false)
if partial_cut
"#{get_value(:esc_code)}m"
else
"#{get_value(:esc_code)}i"
end
end
def get_value(key)
self.class.specifications[key]
end
def re_encode_text(txt)
return txt unless get_value(:special_encoding)
txt.encode(get_value(:special_encoding))
rescue Encoding::UndefinedConversionError
txt.force_encoding(get_value(:special_encoding))
end
end
end
end
Remove default front and color.
require 'erb'
module ESC_POS
module Specifications
class Base
def self.specifications
@@specifications ||= {}
end
def self.set(name, value)
specifications[name] = value
end
def initialize
raise NoMethodError, 'This is just a base class, use it to create custom specifications.'
end
def text(txt, options = {})
font = options.fetch(:font, nil)
color = options.fetch(:color, nil)
formatted_text = ''
formatted_text << set_font(font) if font
formatted_text << set_alignment(options[:align_type]) if options[:align_type]
formatted_text << set_color(color) if color
if txt
formatted_text << re_encode_text(txt)
end
formatted_text
end
def reset_printer
"#{get_value(:esc_code)}#{get_value(:reset_printer_code)}"
end
def feed_lines(lines)
return '' if lines == 1
"#{get_value(:esc_code)}\x64" << (lines - 1).chr
end
def render(options = {})
template_filename = options.fetch(:template, self.class.to_s.underscore)
template = File.read(File.join(Settings.templates_path, "#{template_filename}.esc_pos.erb"))
erb = ERB.new(template, 0, '%<>-')
erb.result(binding)
end
def set_font(font)
"#{get_value(:esc_code)}#{get_value(:font_selector_code)}#{get_value(font)}"
end
def set_alignment(alignment)
"#{get_value(:esc_code)}#{get_value(:alignment_selector_code)}#{get_value(alignment)}"
end
def set_color(color)
"#{get_value(:esc_code)}#{get_value(:color_selector_code)}#{get_value(color)}"
end
def set_international_character_set(character_set)
"#{get_value(:esc_code)}#{get_value(:international_character_selector_code)}#{get_value(character_set).chr}"
end
def set_character_code_table(character_code)
"#{get_value(:esc_code)}#{get_value(:character_table_selector_code)}#{get_value(character_code).chr}"
end
def split_line(char = '-', font = nil)
text(char * get_value(:width), :font => font)
end
def go_to_cut
feed_lines(get_value(:lines_to_cut_line))
end
def go_to_cut_and_cut(spaces_after = 0)
"#{get_value(:gs_code)}V#{65.chr}#{spaces_after}\r"
end
def cut_paper(partial_cut = false)
if partial_cut
"#{get_value(:esc_code)}m"
else
"#{get_value(:esc_code)}i"
end
end
def get_value(key)
self.class.specifications[key]
end
def re_encode_text(txt)
return txt unless get_value(:special_encoding)
txt.encode(get_value(:special_encoding))
rescue Encoding::UndefinedConversionError
txt.force_encoding(get_value(:special_encoding))
end
end
end
end
|
require 'time'
require 'json'
require 'plines/redis_objects'
require 'plines/indifferent_hash'
require 'plines/lua'
module Plines
# Represents a group of jobs that are enqueued together as a batch,
# based on the step dependency graph.
class JobBatch < Struct.new(:pipeline, :id)
include Plines::RedisObjectsHelpers
JobNotPendingError = Class.new(ArgumentError)
set :pending_job_jids
set :completed_job_jids
set :timed_out_external_deps
hash_key :meta
# user_data is a redis hash that can be updated by applications (external to plines).
hash_key :user_data
attr_reader :qless, :redis
def initialize(qless, pipeline, id)
@qless = qless
@redis = qless.redis
@allowed_to_add_external_deps = false
super(pipeline, id)
yield self if block_given?
end
BATCH_DATA_KEY = "batch_data"
EXT_DEP_KEYS_KEY = "ext_dep_keys"
CREATE_OPTIONS_KEY = "create_options"
# We use find/create in place of new for both
# so that the semantics of the two cases are clear.
private_class_method :new
CannotFindExistingJobBatchError = Class.new(StandardError)
def self.find(qless, pipeline, id)
new(qless, pipeline, id) do |inst|
unless inst.creation_started_at
raise CannotFindExistingJobBatchError,
"Cannot find an existing job batch for #{pipeline} / #{id}"
end
yield inst if block_given?
end
end
JobBatchAlreadyCreatedError = Class.new(StandardError)
AddingExternalDependencyNotAllowedError = Class.new(StandardError)
def self.create(qless, pipeline, id, batch_data, options = {}, &block)
new(qless, pipeline, id) do |inst|
inst.send(:initialize_new_batch, batch_data, options, &block)
end
end
def populate_external_deps_meta
@allowed_to_add_external_deps = true
yield
ext_deps = external_deps | newly_added_external_deps.to_a
meta[EXT_DEP_KEYS_KEY] = JSON.dump(ext_deps)
ensure
@allowed_to_add_external_deps = false
end
def newly_added_external_deps
@newly_added_external_deps ||= []
end
def external_deps
if keys = meta[EXT_DEP_KEYS_KEY]
decode(keys)
else
[]
end
end
def add_job(jid, *external_dependencies)
pending_job_jids << jid
unless @allowed_to_add_external_deps || external_dependencies.none?
raise AddingExternalDependencyNotAllowedError, "You cannot add jobs " +
"with external dependencies after creating the job batch."
else
external_dependencies.each do |dep|
newly_added_external_deps << dep
external_dependency_sets[dep] << jid
end
EnqueuedJob.create(qless, pipeline, jid, *external_dependencies)
end
end
def job_jids
pending_job_jids | completed_job_jids
end
def jobs
job_jids.map { |jid| EnqueuedJob.new(qless, pipeline, jid) }
end
def job_repository
qless.jobs
end
def pending_qless_jobs
pending_job_jids.map do |jid|
job_repository[jid]
end.compact
end
def qless_jobs
job_jids.map do |jid|
job_repository[jid]
end.compact
end
def complete_job(qless_job)
qless_job.note_state_change(:complete) do
lua.complete_job(self, qless_job)
end
end
def complete?
!!completed_at
end
def resolve_external_dependency(dep_name)
jids = external_dependency_sets[dep_name]
update_external_dependency \
dep_name, :resolve_external_dependency, jids
cancel_timeout_job_jid_set_for(dep_name)
end
def timeout_external_dependency(dep_name, jids)
update_external_dependency \
dep_name, :timeout_external_dependency, Array(jids)
timed_out_external_deps << dep_name
end
def has_unresolved_external_dependency?(dep_name)
external_dependency_sets[dep_name].any? do |jid|
EnqueuedJob.new(qless, pipeline, jid)
.unresolved_external_dependencies.include?(dep_name)
end
end
def timed_out_external_dependencies
timed_out_external_deps.to_a
end
module InconsistentStateError
def self.===(exn)
Qless::LuaScriptError === exn &&
exn.message.include?('InconsistentTimeoutState')
end
end
def awaiting_external_dependency?(dep_name)
lua.job_batch_awaiting_external_dependency?(
job_batch: self,
dependency_name: dep_name
)
rescue InconsistentStateError
raise NotImplementedError, "External dependency #{dep_name} is in a " +
"hybrid state in which it has timed out for some but not all jobs. " +
"We don't support this state yet and may change the plines data " +
"model so this state is no longer possible in the future."
end
def creation_started_at
time_from("creation_started_at") || time_from("created_at")
end
# Alias for backwards compatibility
alias created_at creation_started_at
def creation_completed_at
time_from "creation_completed_at"
end
def completed_at
time_from "completed_at"
end
def cancelled_at
time_from "cancelled_at"
end
def cancelled?
!!cancelled_at || (meta["cancelled"] == "1")
end
def creation_in_progress?
meta_values = meta.bulk_get(:creation_completed_at,
:created_at,
:creation_in_progress)
if meta_values[:created_at]
# This job batch was created before we updated how we tracked creation
# in progress. The old way is through the creation_in_progress flag.
!!meta_values[:creation_in_progress]
else
# The new way uses creation_started_at/creation_completed_at; if the
# latter is set then creation is complete.
!meta_values[:creation_completed_at]
end
end
def cancellation_reason
meta["cancellation_reason"]
end
def creation_reason
meta["creation_reason"]
end
def timeout_reduction
@timeout_reduction ||= meta["timeout_reduction"].to_i
end
def spawned_from
return @spawned_from if defined?(@spawned_from)
if id = spawned_from_id
@spawned_from = self.class.find(qless, pipeline, id)
else
@spawned_from = nil
end
end
def spawned_from_id
meta['spawned_from_id']
end
def in_terminal_state?
cancelled? || complete?
end
CannotDeleteError = Class.new(StandardError)
def delete
unless in_terminal_state?
raise CannotDeleteError,
"JobBatch #{id} is not in a terminal state and cannot be deleted"
end
lua.delete!(self)
end
def delete!
cancel
lua.delete!(self)
end
CannotCancelError = Class.new(StandardError)
def cancel!(options = {})
if complete?
raise CannotCancelError,
"JobBatch #{id} is already complete and cannot be cancelled"
end
perform_cancellation(options)
end
def cancel(options = {})
return false if complete?
perform_cancellation(options)
true
end
def data
data = decode(meta[BATCH_DATA_KEY])
data && IndifferentHash.from(data)
end
def create_options
options = decode(meta[CREATE_OPTIONS_KEY])
options && IndifferentHash.from(options)
end
def track_timeout_job(dep_name, jid)
timeout_job_jid_sets[dep_name] << jid
end
def timeout_job_jid_sets
@timeout_job_jid_sets ||= Hash.new do |hash, dep|
key = [key_prefix, "timeout_job_jids", dep].join(':')
hash[dep] = Redis::Set.new(key, redis)
end
end
SpawnOptions = Struct.new(:data_overrides, :timeout_reduction, :reason)
def spawn_copy
options = SpawnOptions.new({})
yield options if block_given?
overrides = JSON.parse(JSON.dump options.data_overrides)
pipeline.enqueue_jobs_for(data.merge(overrides), {
spawned_from_id: id,
timeout_reduction: options.timeout_reduction || 0,
reason: options.reason
})
end
def user_data_keys
user_data.keys
end
def user_data_get keys
if keys.size > 0
user_data.bulk_get *keys
else
user_data.all
end
end
def user_data_set hash
user_data.bulk_set hash
end
private
# we manage these keys and don't want them in `create_options`
SPECIAL_OPTION_KEYS = [:timeout_reduction, :spawned_from_id, :reason]
def populate_meta_for_create(batch_data, options)
opts_to_store = options.reject { |k, v| SPECIAL_OPTION_KEYS.include?(k) }
metadata = {
creation_started_at: Time.now.getutc.iso8601,
timeout_reduction: options.fetch(:timeout_reduction, 0),
BATCH_DATA_KEY => JSON.dump(batch_data),
CREATE_OPTIONS_KEY => JSON.dump(opts_to_store),
}
if (reason = options[:reason])
metadata[:creation_reason] = reason
end
if (spawned_from_id = options[:spawned_from_id])
metadata[:spawned_from_id] = spawned_from_id
end
meta.bulk_set(metadata)
@timeout_reduction = metadata.fetch(:timeout_reduction)
end
SomeJobsFailedToCancelError = Class.new(StandardError)
CreationInStillInProgressError = Class.new(StandardError)
def perform_cancellation(options)
return true if cancelled?
if creation_in_progress? && !creation_appears_to_be_stuck?
raise CreationInStillInProgressError,
"#{id} is still being created (started " +
"#{Time.now - creation_started_at} seconds ago)"
end
qless.bulk_cancel(job_jids)
verify_all_jobs_cancelled
external_deps.each do |key|
cancel_timeout_job_jid_set_for(key)
end
meta["cancellation_reason"] = options[:reason] if options.key?(:reason)
meta["cancelled_at"] = Time.now.getutc.iso8601
set_expiration!
pipeline.configuration.notify(:after_job_batch_cancellation, self)
end
STUCK_BATCH_CREATION_TIMEOUT = 60 * 60 # 1 hour
def creation_appears_to_be_stuck?
age_in_seconds = Time.now - creation_started_at
age_in_seconds >= STUCK_BATCH_CREATION_TIMEOUT
end
def verify_all_jobs_cancelled
jobs = qless_jobs.reject { |j| j.state == "complete" }
return if jobs.none?
raise SomeJobsFailedToCancelError,
"#{jobs.size} jobs failed to cancel: #{jobs.inspect}"
end
def update_external_dependency(dep_name, meth, jids)
jids.each do |jid|
EnqueuedJob.new(qless, pipeline, jid).send(meth, dep_name)
end
end
def time_from(meta_entry)
date_string = meta[meta_entry]
Time.iso8601(date_string) if date_string
end
def set_expiration!
lua.expire_job_batch(self)
end
def external_dependency_sets
@external_dependency_sets ||= Hash.new do |hash, dep|
key = [key_prefix, "ext_deps", dep].join(':')
hash[dep] = Redis::Set.new(key, redis)
end
end
def decode(string)
string && JSON.parse(string)
end
def cancel_timeout_job_jid_set_for(dep_name)
timeout_job_jid_set = timeout_job_jid_sets[dep_name]
timeout_job_jid_set.each { |jid| gracefully_cancel(jid) }
timeout_job_jid_set.del
end
def gracefully_cancel(jid)
job = job_repository[jid]
job && job.cancel
end
def lua
@lua ||= Plines::Lua.new(qless)
end
def with_batch_creation_exception_logging
yield
rescue Exception => e
pipeline.configuration.logger.error(
"Aborting creation of plines JobBatch #{pipeline.name} #{id}: " \
"#{e.class.name}: #{e.message} (#{e.backtrace.first})"
)
raise
end
def initialize_new_batch(batch_data, options)
if creation_started_at
raise JobBatchAlreadyCreatedError,
"Job batch #{pipeline} / #{id} already exists"
end
with_batch_creation_exception_logging do
populate_meta_for_create(batch_data, options)
populate_external_deps_meta { yield self if block_given? }
meta[:creation_completed_at] = Time.now.getutc.iso8601
end
end
end
end
fix lint
require 'time'
require 'json'
require 'plines/redis_objects'
require 'plines/indifferent_hash'
require 'plines/lua'
module Plines
# Represents a group of jobs that are enqueued together as a batch,
# based on the step dependency graph.
class JobBatch < Struct.new(:pipeline, :id)
include Plines::RedisObjectsHelpers
JobNotPendingError = Class.new(ArgumentError)
set :pending_job_jids
set :completed_job_jids
set :timed_out_external_deps
hash_key :meta
# user_data is a redis hash that can be updated by applications
# (external to plines).
hash_key :user_data
attr_reader :qless, :redis
def initialize(qless, pipeline, id)
@qless = qless
@redis = qless.redis
@allowed_to_add_external_deps = false
super(pipeline, id)
yield self if block_given?
end
BATCH_DATA_KEY = "batch_data"
EXT_DEP_KEYS_KEY = "ext_dep_keys"
CREATE_OPTIONS_KEY = "create_options"
# We use find/create in place of new for both
# so that the semantics of the two cases are clear.
private_class_method :new
CannotFindExistingJobBatchError = Class.new(StandardError)
def self.find(qless, pipeline, id)
new(qless, pipeline, id) do |inst|
unless inst.creation_started_at
raise CannotFindExistingJobBatchError,
"Cannot find an existing job batch for #{pipeline} / #{id}"
end
yield inst if block_given?
end
end
JobBatchAlreadyCreatedError = Class.new(StandardError)
AddingExternalDependencyNotAllowedError = Class.new(StandardError)
def self.create(qless, pipeline, id, batch_data, options = {}, &block)
new(qless, pipeline, id) do |inst|
inst.send(:initialize_new_batch, batch_data, options, &block)
end
end
def populate_external_deps_meta
@allowed_to_add_external_deps = true
yield
ext_deps = external_deps | newly_added_external_deps.to_a
meta[EXT_DEP_KEYS_KEY] = JSON.dump(ext_deps)
ensure
@allowed_to_add_external_deps = false
end
def newly_added_external_deps
@newly_added_external_deps ||= []
end
def external_deps
if keys = meta[EXT_DEP_KEYS_KEY]
decode(keys)
else
[]
end
end
def add_job(jid, *external_dependencies)
pending_job_jids << jid
unless @allowed_to_add_external_deps || external_dependencies.none?
raise AddingExternalDependencyNotAllowedError, "You cannot add jobs " +
"with external dependencies after creating the job batch."
else
external_dependencies.each do |dep|
newly_added_external_deps << dep
external_dependency_sets[dep] << jid
end
EnqueuedJob.create(qless, pipeline, jid, *external_dependencies)
end
end
def job_jids
pending_job_jids | completed_job_jids
end
def jobs
job_jids.map { |jid| EnqueuedJob.new(qless, pipeline, jid) }
end
def job_repository
qless.jobs
end
def pending_qless_jobs
pending_job_jids.map do |jid|
job_repository[jid]
end.compact
end
def qless_jobs
job_jids.map do |jid|
job_repository[jid]
end.compact
end
def complete_job(qless_job)
qless_job.note_state_change(:complete) do
lua.complete_job(self, qless_job)
end
end
def complete?
!!completed_at
end
def resolve_external_dependency(dep_name)
jids = external_dependency_sets[dep_name]
update_external_dependency \
dep_name, :resolve_external_dependency, jids
cancel_timeout_job_jid_set_for(dep_name)
end
def timeout_external_dependency(dep_name, jids)
update_external_dependency \
dep_name, :timeout_external_dependency, Array(jids)
timed_out_external_deps << dep_name
end
def has_unresolved_external_dependency?(dep_name)
external_dependency_sets[dep_name].any? do |jid|
EnqueuedJob.new(qless, pipeline, jid)
.unresolved_external_dependencies.include?(dep_name)
end
end
def timed_out_external_dependencies
timed_out_external_deps.to_a
end
module InconsistentStateError
def self.===(exn)
Qless::LuaScriptError === exn &&
exn.message.include?('InconsistentTimeoutState')
end
end
def awaiting_external_dependency?(dep_name)
lua.job_batch_awaiting_external_dependency?(
job_batch: self,
dependency_name: dep_name
)
rescue InconsistentStateError
raise NotImplementedError, "External dependency #{dep_name} is in a " +
"hybrid state in which it has timed out for some but not all jobs. " +
"We don't support this state yet and may change the plines data " +
"model so this state is no longer possible in the future."
end
def creation_started_at
time_from("creation_started_at") || time_from("created_at")
end
# Alias for backwards compatibility
alias created_at creation_started_at
def creation_completed_at
time_from "creation_completed_at"
end
def completed_at
time_from "completed_at"
end
def cancelled_at
time_from "cancelled_at"
end
def cancelled?
!!cancelled_at || (meta["cancelled"] == "1")
end
def creation_in_progress?
meta_values = meta.bulk_get(:creation_completed_at,
:created_at,
:creation_in_progress)
if meta_values[:created_at]
# This job batch was created before we updated how we tracked creation
# in progress. The old way is through the creation_in_progress flag.
!!meta_values[:creation_in_progress]
else
# The new way uses creation_started_at/creation_completed_at; if the
# latter is set then creation is complete.
!meta_values[:creation_completed_at]
end
end
def cancellation_reason
meta["cancellation_reason"]
end
def creation_reason
meta["creation_reason"]
end
def timeout_reduction
@timeout_reduction ||= meta["timeout_reduction"].to_i
end
def spawned_from
return @spawned_from if defined?(@spawned_from)
if id = spawned_from_id
@spawned_from = self.class.find(qless, pipeline, id)
else
@spawned_from = nil
end
end
def spawned_from_id
meta['spawned_from_id']
end
def in_terminal_state?
cancelled? || complete?
end
CannotDeleteError = Class.new(StandardError)
def delete
unless in_terminal_state?
raise CannotDeleteError,
"JobBatch #{id} is not in a terminal state and cannot be deleted"
end
lua.delete!(self)
end
def delete!
cancel
lua.delete!(self)
end
CannotCancelError = Class.new(StandardError)
def cancel!(options = {})
if complete?
raise CannotCancelError,
"JobBatch #{id} is already complete and cannot be cancelled"
end
perform_cancellation(options)
end
def cancel(options = {})
return false if complete?
perform_cancellation(options)
true
end
def data
data = decode(meta[BATCH_DATA_KEY])
data && IndifferentHash.from(data)
end
def create_options
options = decode(meta[CREATE_OPTIONS_KEY])
options && IndifferentHash.from(options)
end
def track_timeout_job(dep_name, jid)
timeout_job_jid_sets[dep_name] << jid
end
def timeout_job_jid_sets
@timeout_job_jid_sets ||= Hash.new do |hash, dep|
key = [key_prefix, "timeout_job_jids", dep].join(':')
hash[dep] = Redis::Set.new(key, redis)
end
end
SpawnOptions = Struct.new(:data_overrides, :timeout_reduction, :reason)
def spawn_copy
options = SpawnOptions.new({})
yield options if block_given?
overrides = JSON.parse(JSON.dump options.data_overrides)
pipeline.enqueue_jobs_for(data.merge(overrides), {
spawned_from_id: id,
timeout_reduction: options.timeout_reduction || 0,
reason: options.reason
})
end
def user_data_keys
user_data.keys
end
def user_data_get keys
if keys.size > 0
user_data.bulk_get *keys
else
user_data.all
end
end
def user_data_set hash
user_data.bulk_set hash
end
private
# we manage these keys and don't want them in `create_options`
SPECIAL_OPTION_KEYS = [:timeout_reduction, :spawned_from_id, :reason]
def populate_meta_for_create(batch_data, options)
opts_to_store = options.reject { |k, v| SPECIAL_OPTION_KEYS.include?(k) }
metadata = {
creation_started_at: Time.now.getutc.iso8601,
timeout_reduction: options.fetch(:timeout_reduction, 0),
BATCH_DATA_KEY => JSON.dump(batch_data),
CREATE_OPTIONS_KEY => JSON.dump(opts_to_store),
}
if (reason = options[:reason])
metadata[:creation_reason] = reason
end
if (spawned_from_id = options[:spawned_from_id])
metadata[:spawned_from_id] = spawned_from_id
end
meta.bulk_set(metadata)
@timeout_reduction = metadata.fetch(:timeout_reduction)
end
SomeJobsFailedToCancelError = Class.new(StandardError)
CreationInStillInProgressError = Class.new(StandardError)
def perform_cancellation(options)
return true if cancelled?
if creation_in_progress? && !creation_appears_to_be_stuck?
raise CreationInStillInProgressError,
"#{id} is still being created (started " +
"#{Time.now - creation_started_at} seconds ago)"
end
qless.bulk_cancel(job_jids)
verify_all_jobs_cancelled
external_deps.each do |key|
cancel_timeout_job_jid_set_for(key)
end
meta["cancellation_reason"] = options[:reason] if options.key?(:reason)
meta["cancelled_at"] = Time.now.getutc.iso8601
set_expiration!
pipeline.configuration.notify(:after_job_batch_cancellation, self)
end
STUCK_BATCH_CREATION_TIMEOUT = 60 * 60 # 1 hour
def creation_appears_to_be_stuck?
age_in_seconds = Time.now - creation_started_at
age_in_seconds >= STUCK_BATCH_CREATION_TIMEOUT
end
def verify_all_jobs_cancelled
jobs = qless_jobs.reject { |j| j.state == "complete" }
return if jobs.none?
raise SomeJobsFailedToCancelError,
"#{jobs.size} jobs failed to cancel: #{jobs.inspect}"
end
def update_external_dependency(dep_name, meth, jids)
jids.each do |jid|
EnqueuedJob.new(qless, pipeline, jid).send(meth, dep_name)
end
end
def time_from(meta_entry)
date_string = meta[meta_entry]
Time.iso8601(date_string) if date_string
end
def set_expiration!
lua.expire_job_batch(self)
end
def external_dependency_sets
@external_dependency_sets ||= Hash.new do |hash, dep|
key = [key_prefix, "ext_deps", dep].join(':')
hash[dep] = Redis::Set.new(key, redis)
end
end
def decode(string)
string && JSON.parse(string)
end
def cancel_timeout_job_jid_set_for(dep_name)
timeout_job_jid_set = timeout_job_jid_sets[dep_name]
timeout_job_jid_set.each { |jid| gracefully_cancel(jid) }
timeout_job_jid_set.del
end
def gracefully_cancel(jid)
job = job_repository[jid]
job && job.cancel
end
def lua
@lua ||= Plines::Lua.new(qless)
end
def with_batch_creation_exception_logging
yield
rescue Exception => e
pipeline.configuration.logger.error(
"Aborting creation of plines JobBatch #{pipeline.name} #{id}: " \
"#{e.class.name}: #{e.message} (#{e.backtrace.first})"
)
raise
end
def initialize_new_batch(batch_data, options)
if creation_started_at
raise JobBatchAlreadyCreatedError,
"Job batch #{pipeline} / #{id} already exists"
end
with_batch_creation_exception_logging do
populate_meta_for_create(batch_data, options)
populate_external_deps_meta { yield self if block_given? }
meta[:creation_completed_at] = Time.now.getutc.iso8601
end
end
end
end
|
module Firehose
module Rack
class PublisherApp
include Firehose::Rack::Helpers
def call(env)
req = env['parsed_request'] ||= ::Rack::Request.new(env)
path = req.path
method = req.request_method
cache_control = {}
# Parse out cache control directives from the Cache-Control header.
if cache_control_header = env['Cache-Control']
cache_control = cache_control_header.split(',').map(&:strip).inject({}) do |memo, directive|
key, value = directive.split('=')
memo[key.downcase] = value
memo
end
end
if method == 'PUT'
EM.next_tick do
body = env['rack.input'].read
Firehose.logger.debug "HTTP published `#{body}` to `#{path}`"
publisher.publish(path, body, :ttl => cache_control['max-age']).callback do
env['async.callback'].call [202, {'Content-Type' => 'text/plain', 'Content-Length' => '0'}, []]
env['async.callback'].call response(202, '', 'Content-Type' => 'text/plain')
end.errback do |e|
Firehose.logger.debug "Error publishing: #{e.inspect}"
env['async.callback'].call response(500, 'Error when trying to publish', 'Content-Type' => 'text/plain')
end
end
# Tell the web server that this will be an async response.
ASYNC_RESPONSE
else
Firehose.logger.debug "HTTP #{method} not supported"
msg = "#{method} not supported."
[501, {'Content-Type' => 'text/plain', 'Content-Length' => msg.size.to_s}, [msg]]
end
end
private
def publisher
@publisher ||= Firehose::Publisher.new
end
end
end
end
Added logging for TTL since I can not figure out why its not working in staging
module Firehose
module Rack
class PublisherApp
include Firehose::Rack::Helpers
def call(env)
req = env['parsed_request'] ||= ::Rack::Request.new(env)
path = req.path
method = req.request_method
cache_control = {}
# Parse out cache control directives from the Cache-Control header.
if cache_control_header = env['Cache-Control']
cache_control = cache_control_header.split(',').map(&:strip).inject({}) do |memo, directive|
key, value = directive.split('=')
memo[key.downcase] = value
memo
end
end
# Read the max-age directive from the cache so that we can set a TTL on the redis key. This will
# prevent stale content from being served up to the client.
ttl = cache_control['max-age']
if method == 'PUT'
EM.next_tick do
body = env['rack.input'].read
Firehose.logger.debug "HTTP published #{body.inspect} to #{path.inspect} with ttl #{ttl.inspect}"
publisher.publish(path, body, :ttl => ttl).callback do
env['async.callback'].call [202, {'Content-Type' => 'text/plain', 'Content-Length' => '0'}, []]
env['async.callback'].call response(202, '', 'Content-Type' => 'text/plain')
end.errback do |e|
Firehose.logger.debug "Error publishing: #{e.inspect}"
env['async.callback'].call response(500, 'Error when trying to publish', 'Content-Type' => 'text/plain')
end
end
# Tell the web server that this will be an async response.
ASYNC_RESPONSE
else
Firehose.logger.debug "HTTP #{method} not supported"
msg = "#{method} not supported."
[501, {'Content-Type' => 'text/plain', 'Content-Length' => msg.size.to_s}, [msg]]
end
end
private
def publisher
@publisher ||= Firehose::Publisher.new
end
end
end
end
|
# encoding: utf-8
# This file is part of the K5 bot project.
# See files README.md and COPYING for copyright and licensing information.
# IRCFirstListener is the first listener that is called and handles
# messages that are important for things to function properly.
require_relative '../../Listener'
class IRCFirstListener
include BotCore::Listener
# This method is overridden, so that command-methods can
# pass back their own return values.
def receive_message(msg)
dispatch_message_to_methods(msg)
end
def on_ping(msg)
msg.bot.send_raw(msg.params ? "PONG :#{msg.params.first}" : 'PONG')
true # stop further message propagation
end
def on_263
msg.bot.send_raw(msg.bot.last_sent)
true # stop further message propagation
end
def on_ctcp_privmsg(msg)
result = nil
queries = msg.ctcp
queries.each do |ctcp|
case ctcp.command
when :PING
msg.notice_user(IRCMessage.make_ctcp_message(:PING, ctcp.arguments))
result = true # stop further message propagation
end
end
result # stop further message propagation, if it was CTCP query that we handled
end
FIRST_LISTENER_PRIORITY = -16
def listener_priority
FIRST_LISTENER_PRIORITY
end
end
Migrate IRCFirstListener from notice_user()
# encoding: utf-8
# This file is part of the K5 bot project.
# See files README.md and COPYING for copyright and licensing information.
# IRCFirstListener is the first listener that is called and handles
# messages that are important for things to function properly.
require_relative '../../Listener'
class IRCFirstListener
include BotCore::Listener
# This method is overridden, so that command-methods can
# pass back their own return values.
def receive_message(msg)
dispatch_message_to_methods(msg)
end
def on_ping(msg)
msg.bot.send_raw(msg.params ? "PONG :#{msg.params.first}" : 'PONG')
true # stop further message propagation
end
def on_263
msg.bot.send_raw(msg.bot.last_sent)
true # stop further message propagation
end
def on_ctcp_privmsg(msg)
result = nil
queries = msg.ctcp
queries.each do |ctcp|
case ctcp.command
when :PING
msg.reply(
IRCMessage.make_ctcp_message(:PING, ctcp.arguments),
:notice => true,
:force_private => true,
)
result = true # stop further message propagation
end
end
result # stop further message propagation, if it was CTCP query that we handled
end
FIRST_LISTENER_PRIORITY = -16
def listener_priority
FIRST_LISTENER_PRIORITY
end
end
|
module Fluent
class LetsChatPlugin < Output
Fluent::Plugin.register_output('lets_chat', self)
config_param :lcb_host, :string, :default => 'localhost'
config_param :lcb_port, :string, :default => '5000'
config_param :lcb_room, :string
config_param :lcb_user_token, :string
config_param :lcb_user_password, :string
config_param :lcb_keys, :string, :default => ''
def initialize
super
require 'net/http'
require 'uri'
require 'json'
end
def configure(conf)
super
@http = Net::HTTP.new(@lcb_host, @lcb_port)
@req = Net::HTTP::Post.new("/rooms/#{@lcb_room}/messages")
@req.add_field 'Accept', 'application/json'
@req.add_field 'Content-Type', 'application/json'
@req.basic_auth @lcb_user_token, @lcb_user_password
@lcb_keys = @lcb_keys.split(',')
end
def start
super
end
def shutdown
super
end
def emit(tag, es, chain)
es.each {|time,record|
begin
send_message(tag, record)
rescue => e
$log.error("Send message Error:", :error_class => e.class, :error => e.message)
end
}
$log.flush
chain.next
end
def send_message(tag, record)
message = "Request from #{tag}\n"
if @lcb_keys.empty?
record.each do |key, value|
message << " #{key}: #{value}\n"
end
else
@lcb_keys.each do |key|
value = deep_fetch(record, key) rescue nil
message << " #{key}: #{value}\n"
end
end
@req.body = {"text" => "#{message}"}.to_json
$log.info "Let's Chat Request => #{@req.body}\n"
res = @http.request(@req)
$log.info "Let's Chat Response => #{res.code}\n"
end
def deep_fetch(record, key)
rec = record
key.split(":").each{ |k|
rec = rec[k]
raise StandardError if rec.nil?
}
rec
end
end
end
request json including array
module Fluent
class LetsChatPlugin < Output
Fluent::Plugin.register_output('lets_chat', self)
config_param :lcb_host, :string, :default => 'localhost'
config_param :lcb_port, :string, :default => '5000'
config_param :lcb_room, :string
config_param :lcb_user_token, :string
config_param :lcb_user_password, :string
config_param :lcb_keys, :string, :default => ''
def initialize
super
require 'net/http'
require 'uri'
require 'json'
end
def configure(conf)
super
@http = Net::HTTP.new(@lcb_host, @lcb_port)
@req = Net::HTTP::Post.new("/rooms/#{@lcb_room}/messages")
@req.add_field 'Accept', 'application/json'
@req.add_field 'Content-Type', 'application/json'
@req.basic_auth @lcb_user_token, @lcb_user_password
@lcb_keys = @lcb_keys.split(',')
end
def start
super
end
def shutdown
super
end
def emit(tag, es, chain)
es.each {|time,record|
begin
send_message(tag, record)
rescue => e
$log.error("Send message Error:", :error_class => e.class, :error => e.message)
end
}
$log.flush
chain.next
end
def send_message(tag, record)
message = "Request from #{tag}\n"
if @lcb_keys.empty?
record.each do |key, value|
message << " #{key}: #{value}\n"
end
else
@lcb_keys.each do |key|
value = deep_fetch(record, key) rescue nil
message << " #{key}: #{value}\n"
end
end
@req.body = {"text" => "#{message}"}.to_json
$log.info "Let's Chat Request => #{@req.body}\n"
res = @http.request(@req)
$log.info "Let's Chat Response => #{res.code}\n"
end
def deep_fetch(record, key)
rec = record
key.split(":").each{ |k|
if rec.is_a?(Array)
rec = rec.map{|r| r[k] unless (r.nil? || r.is_a?(String))}
else
rec = rec[k]
end
raise StandardError if (rec.nil? || rec.compact.empty?)
}
rec
end
end
end
|
require 'downlow/ext/pathname'
module Downlow
VERSION = '0.1.3'
def self.get(url, *args)
options = {}
first = args.shift
if first.is_a?(Hash)
# hash as argument means were setting the options
options = first
elsif first.to_s != ''
# string as argument means we're setting the destination
options[:destination] = first
end
# merge the rest as options
options = args.inject(options) {|o, arg| o = o.merge(arg) } if !args.empty?
# fetch to a temp dir
fetch_options = options.dup
fetch_options.delete(:destination)
path = fetch(url, fetch_options)
final_path = extract(path, options)
FileUtils.rm_r(path) # delete tmp path
final_path
end
def self.fetch(*args)
Downlow::Fetcher.fetch(*args)
end
def self.extract(*args)
Downlow::Extractor.extract(*args)
end
end
def DL(*args) Downlow.get(*args); end
require 'downlow/fetcher'
require 'downlow/fetchers/git'
require 'downlow/fetchers/http'
require 'downlow/fetchers/github'
require 'downlow/fetchers/local'
require 'downlow/extractor'
require 'downlow/extractors/tar_gz'
require 'downlow/extractors/zip'
require 'downlow/extractors/dir'
Version bump
require 'downlow/ext/pathname'
module Downlow
VERSION = '0.1.4'
def self.get(url, *args)
options = {}
first = args.shift
if first.is_a?(Hash)
# hash as argument means were setting the options
options = first
elsif first.to_s != ''
# string as argument means we're setting the destination
options[:destination] = first
end
# merge the rest as options
options = args.inject(options) {|o, arg| o = o.merge(arg) } if !args.empty?
# fetch to a temp dir
fetch_options = options.dup
fetch_options.delete(:destination)
path = fetch(url, fetch_options)
final_path = extract(path, options)
FileUtils.rm_r(path) # delete tmp path
final_path
end
def self.fetch(*args)
Downlow::Fetcher.fetch(*args)
end
def self.extract(*args)
Downlow::Extractor.extract(*args)
end
end
def DL(*args) Downlow.get(*args); end
require 'downlow/fetcher'
require 'downlow/fetchers/git'
require 'downlow/fetchers/http'
require 'downlow/fetchers/github'
require 'downlow/fetchers/local'
require 'downlow/extractor'
require 'downlow/extractors/tar_gz'
require 'downlow/extractors/zip'
require 'downlow/extractors/dir'
|
Fixing placement of { and contents of hash.
|
module Podio
module News
include Podio::ResponseWrapper
extend self
def find_stream()
Podio.connection.get("/news/stream").body
end
def get_news_redirect(news_id)
Podio.connection.get("/news/#{news_id}/redirect").body
end
def unsubscribe_entry(news_id)
Podio.connection.post do |req|
req.url "/news/#{news_entry}/unsubscribe"
end
end
def create(attributes)
response = Podio.connection.post do |req|
req.url '/news/'
req.body = attributes
end
response.body['news_id']
end
def update(id, attributes)
response = Podio.connection.put do |req|
req.url "/news/#{id}"
req.body = attributes
end
response.status
end
def find(id, options={})
member Podio.connection.get("/news/#{id}").body
end
def find_visible
list Podio.connection.get("/news/").body
end
def find_all
list Podio.connection.get("/news/").body
end
def find_all_by_locale(locale)
list Podio.connection.get('/news/?locale=#{locale}').body
end
def find_all_by_target_group(target_group)
list Podio.connection.get('/news/?target_group=#{target_group}').body
end
def find_all_by_locale_and_group(locale, group)
list Podio.connection.get('/news/?locale=#{locale}?target_group=#{group}').body
end
end
end
Formatting
module Podio
module News
include Podio::ResponseWrapper
extend self
def find_stream()
Podio.connection.get("/news/stream").body
end
def get_news_redirect(news_id)
Podio.connection.get("/news/#{news_id}/redirect").body
end
def unsubscribe_entry(news_id)
Podio.connection.post do |req|
req.url "/news/#{news_entry}/unsubscribe"
end
end
def create(attributes)
response = Podio.connection.post do |req|
req.url '/news/'
req.body = attributes
end
response.body['news_id']
end
def update(id, attributes)
response = Podio.connection.put do |req|
req.url "/news/#{id}"
req.body = attributes
end
response.status
end
def find(id, options={})
member Podio.connection.get("/news/#{id}").body
end
def find_visible
list Podio.connection.get("/news/").body
end
def find_all
list Podio.connection.get("/news/").body
end
def find_all_by_locale(locale)
list Podio.connection.get('/news/?locale=#{locale}').body
end
def find_all_by_target_group(target_group)
list Podio.connection.get('/news/?target_group=#{target_group}').body
end
def find_all_by_locale_and_group(locale, group)
list Podio.connection.get('/news/?locale=#{locale}?target_group=#{group}').body
end
end
end |
module Podio
module User
include Podio::ResponseWrapper
extend self
def current
member Podio.connection.get("/user/").body
end
end
end
add create user call
module Podio
module User
include Podio::ResponseWrapper
extend self
def current
member Podio.connection.get("/user/").body
end
def create(token, attributes)
response = Podio.connection.post do |req|
req.url '/user/'
req.body = attributes.merge(:token => token)
end
response.body['user_id']
end
end
end
|
module Poppins
class Document
def initialize(input=nil, output=nil)
if input.empty?
# TODO: Probably need to raise an error if ARGF is nil...
@input = ARGF.readlines.join
else
@input = File.open(input, 'r').readlines.join
end
@output = output
# RegEx for matching reference links in the text. (Avoid footnotes!)
@links = /\[([^\]]+)\]\[([^^\]]+)\]/
# RegEx for matching labels for reference links. (Avoid footnotes!)
@labels = /^\[([^^\]]+)\]:\s+(.+)$/
end
##
# Returns an array of the links found in the document in order.
def links
@input.scan(@links)
end
def labels
Hash[@input.scan(@labels)]
end
def order_of_first_appearance
order = []
links.each do |l|
order.push(l[1]) unless order.include?(l[1])
end
order
end
def ordinal_references
references = []
order_of_first_appearance.each_with_index do |o, i|
references.push("[#{i+1}]: #{labels[o]}")
end
references
end
##
# Returns an array of the link text and the new reference number when
# passed links based on the old reference numbering system
def get_replacement(string)
md = string.match(@links)
link_text = md[1].to_s
reference_number = (order_of_first_appearance.index(md[2]) + 1).to_s
return [link_text, reference_number]
end
##
# Produces the clean, formatted version of the Markdown with new
# reference numbers.
def clean_and_format
# Remove old references (TOOD: Need to remove blank lines resulting from
# this.)
result = @input.gsub(@labels, '')
#
# Add new references
ordinal_references.each do |r|
result += r.to_s + "\n"
end
# Replace old reference numbers with the new ones
result = result.gsub(@links) do |m|
replacement = get_replacement(m)
"[#{replacement[0]}][#{replacement[1]}]"
end
# output the result
# puts result
if @output
File.open(@output, 'w') do |f|
f.write(result)
end
else
puts result
end
end
end
end
remove blank lines when reformatting reference links
module Poppins
class Document
def initialize(input=nil, output=nil)
if input.empty?
# TODO: Probably need to raise an error if ARGF is nil...
@input = ARGF.readlines.join
else
@input = File.open(input, 'r').readlines.join
end
@output = output
# RegEx for matching reference links in the text. (Avoid footnotes!)
@links = /\[([^\]]+)\]\[([^^\]]+)\]/
# RegEx for matching labels for reference links. (Avoid footnotes!)
@labels = /^\[([^^\]]+)\]:\s+(.+)$/
@labels_with_possible_newlines = /^\[([^^\]]+)\]:\s+(.+)(\n)?/
end
##
# Returns an array of the links found in the document in order.
def links
@input.scan(@links)
end
def labels
Hash[@input.scan(@labels)]
end
def order_of_first_appearance
order = []
links.each do |l|
order.push(l[1]) unless order.include?(l[1])
end
order
end
def ordinal_references
references = []
order_of_first_appearance.each_with_index do |o, i|
references.push("[#{i+1}]: #{labels[o]}")
end
references
end
##
# Returns an array of the link text and the new reference number when
# passed links based on the old reference numbering system
def get_replacement(string)
md = string.match(@links)
link_text = md[1].to_s
reference_number = (order_of_first_appearance.index(md[2]) + 1).to_s
return [link_text, reference_number]
end
##
# Produces the clean, formatted version of the Markdown with new
# reference numbers.
def clean_and_format
# Remove old references (TOOD: Need to remove blank lines resulting from
# this.)
result = @input.gsub(@labels_with_possible_newlines, '')
#
# Add new references
ordinal_references.each do |r|
result += r.to_s + "\n"
end
# Replace old reference numbers with the new ones
result = result.gsub(@links) do |m|
replacement = get_replacement(m)
"[#{replacement[0]}][#{replacement[1]}]"
end
# output the result
# puts result
if @output
File.open(@output, 'w') do |f|
f.write(result)
end
else
puts result
end
end
end
end
|
module Potemkin
class Builder
def env_vars
end
# Will build the actual APK
def build
logger.describe "creating build"
if env_vars
with_env_vars(env_vars) { Potemkin.run build_command }
else
Potemkin.run build_command
end
end
def clean
logger.describe "cleaning build"
Potemkin.run clean_command
end
# Takes a hash and a block, wrapping the block with the env variables
# found in the hash.
def with_env_vars(env_vars)
old_values = {}
env_vars.each do |key,new_value|
old_values[key] = ENV[key]
ENV[key] = new_value
end
yield
env_vars.each_key do |key|
ENV[key] = old_values[key]
end
end
# Returns the config, mainly here to mock in tests
def config
Potemkin.config
end
# Returns the logger, mainly here to mock in tests
def logger
Potemkin.logger
end
end
end
By default no env vars are set
Seemed logical to me clean & build where run with env_vars, wether or not there are any...
module Potemkin
class Builder
# No environment variables by default
def env_vars
{}
end
# Will build the actual APK
def build
logger.describe "creating build"
with_env_vars(env_vars) { Potemkin.run build_command }
end
def clean
logger.describe "cleaning build"
with_env_vars(env_vars) { Potemkin.run clean_command }
end
# Takes a hash and a block, wrapping the block with the env variables
# found in the hash.
def with_env_vars(env_vars)
old_values = {}
env_vars.each do |key,new_value|
old_values[key] = ENV[key]
ENV[key] = new_value
end
yield
env_vars.each_key do |key|
ENV[key] = old_values[key]
end
end
# Returns the config, mainly here to mock in tests
def config
Potemkin.config
end
# Returns the logger, mainly here to mock in tests
def logger
Potemkin.logger
end
end
end
|
module Presenter
module Helper
include Presenter::Naming
if object_or_collection.respond_to?(:map) # If it is a collection
object_or_collection.map { |object| presenter_from_model_object(object).new(object) }
def present(object_or_collection, *args, &block)
else
handle_single_object(object_or_collection, *args)
end
end
def handle_single_object(object, *args)
presenter_from_model_object(object).new(object, *args)
rescue ArgumentError
raise Presenter::Error, Presenter::Error::single_object_argument_error_message
end
end
end
Handle collections for #present
module Presenter
module Helper
include Presenter::Naming
def present(object_or_collection, *args, &block)
if object_or_collection.respond_to?(:map)
handle_collection(object_or_collection, *args, &block)
else
handle_single_object(object_or_collection, *args)
end
end
def handle_single_object(object, *args)
presenter_from_model_object(object).new(object, *args)
rescue ArgumentError
raise Presenter::Error, Presenter::Error::single_object_argument_error_message
end
def handle_collection(collection, *args, &block)
collection.map do |object|
block_args = block_given? ? block.call(object) : []
presenter_from_model_object(object).new(object, *args, *block_args)
end
rescue ArgumentError
raise Presenter::Error, Presenter::Error::collection_argument_error_message
end
end
end
|
Update pitcher.rb
Moving response .txt file to a 'logs' directory
|
require 'plist'
module LionAdmin
class Base
def initialize(user)
@user_prefix = "ssh #{user}"
@os_version = %x[#{@user_prefix} defaults read loginwindow SystemVersionStampAsString]
if @os_version.match(/10\.7.*|10\.8.*/)
@serveradmin="/Applications/Server.app/Contents/ServerRoot/usr/sbin/serveradmin"
else
@serveradmin="/usr/sbin/serveradmin"
end
end
def version
version = %x[#{@user_prefix} #{@serveradmin} -v]
end
def hostname
hostname = %x[#{@user_prefix} hostname].chomp
end
def serialnumber
serial = String.new
system_profiler = %x[#{@user_prefix} system_profiler SPHardwareDataType]
system_profiler.each_line do |line|
if line.match("Serial Number") && line.match("system")
serial = line.split(":").last.chomp.strip
end
end
return serial
end
def services
services = %x[#{@user_prefix} sudo #{@serveradmin} list].split("\n")
end
def fullstatus(service)
fullstatus = Plist::parse_xml(%x[#{@user_prefix} sudo #{@serveradmin} fullstatus -x #{service}])
end
def status(service)
status = Plist::parse_xml(%x[#{@user_prefix} sudo #{@serveradmin} status -x #{service}])
end
def settings(service)
settings = Plist::parse_xml(%x[#{@user_prefix} sudo #{@serveradmin} settings -x #{service}])
end
def start_service(service)
if check_if_running(service)
return "service is already running..."
else
puts "starting service: #{service}..."
%x[#{@user_prefix} #{@serveradmin} start #{service}]
puts "service: #{service} started!"
end
end
def stop_service(service)
if check_if_running(service)
puts "stopping service: #{service}..."
%x[#{@user_prefix} #{@serveradmin} stop #{service}]
puts "service: #{service} stopped!"
else
return "service is already stopped"
end
end
def run_command(service,command)
output = Plist::parse_xml(%x[#{@user_prefix} sudo #{@serveradmin} command #{service}:command = #{command}])
if output.match("UNEXPECTED_COMMAND")
return "received unexpected command"
else
return output
end
end
def get_running_services
running_services = Array.new
list_of_services = services
list_of_services.each do |s|
if check_if_running(s)
running_services.push(s)
end
end
return running_services
end
def get_stopped_services
stopped_services = Array.new
list_of_services = services
list_of_services.each do |s|
unless check_if_running(s)
stopped_services.push(s)
end
end
return stopped_services
end
def change_settings(service,pref,value)
output_plist = Plist::parse_xml(%x[#{@user_prefix} sudo #{@serveradmin} settings #{service}:#{pref} = #{value} -x])
return output_plist[pref]
end
def check_if_running(service)
status = status(service) if !service.nil?
state = status["state"] if !status.nil?
if !state.nil? && state.match(/RUNNING/)
return true
else
return false
end
end
end
end
fixed start_service and stop_service, they were missing sudo
require 'plist'
module LionAdmin
class Base
def initialize(user)
@user_prefix = "ssh #{user}"
@os_version = %x[#{@user_prefix} defaults read loginwindow SystemVersionStampAsString]
if @os_version.match(/10\.7.*|10\.8.*/)
@serveradmin="/Applications/Server.app/Contents/ServerRoot/usr/sbin/serveradmin"
else
@serveradmin="/usr/sbin/serveradmin"
end
end
def version
version = %x[#{@user_prefix} #{@serveradmin} -v]
end
def hostname
hostname = %x[#{@user_prefix} hostname].chomp
end
def serialnumber
serial = String.new
system_profiler = %x[#{@user_prefix} system_profiler SPHardwareDataType]
system_profiler.each_line do |line|
if line.match("Serial Number") && line.match("system")
serial = line.split(":").last.chomp.strip
end
end
return serial
end
def services
services = %x[#{@user_prefix} sudo #{@serveradmin} list].split("\n")
end
def fullstatus(service)
fullstatus = Plist::parse_xml(%x[#{@user_prefix} sudo #{@serveradmin} fullstatus -x #{service}])
end
def status(service)
status = Plist::parse_xml(%x[#{@user_prefix} sudo #{@serveradmin} status -x #{service}])
end
def settings(service)
settings = Plist::parse_xml(%x[#{@user_prefix} sudo #{@serveradmin} settings -x #{service}])
end
def start_service(service)
if check_if_running(service)
return "service is already running..."
else
puts "starting service: #{service}..."
%x[#{@user_prefix} sudo #{@serveradmin} start #{service}]
puts "service: #{service} started!"
end
end
def stop_service(service)
if check_if_running(service)
puts "stopping service: #{service}..."
%x[#{@user_prefix} sudo #{@serveradmin} stop #{service}]
puts "service: #{service} stopped!"
else
return "service is already stopped"
end
end
def run_command(service,command)
output = Plist::parse_xml(%x[#{@user_prefix} sudo #{@serveradmin} command #{service}:command = #{command}])
if output.match("UNEXPECTED_COMMAND")
return "received unexpected command"
else
return output
end
end
def get_running_services
running_services = Array.new
list_of_services = services
list_of_services.each do |s|
if check_if_running(s)
running_services.push(s)
end
end
return running_services
end
def get_stopped_services
stopped_services = Array.new
list_of_services = services
list_of_services.each do |s|
unless check_if_running(s)
stopped_services.push(s)
end
end
return stopped_services
end
def change_settings(service,pref,value)
output_plist = Plist::parse_xml(%x[#{@user_prefix} sudo #{@serveradmin} settings #{service}:#{pref} = #{value} -x])
return output_plist[pref]
end
def check_if_running(service)
status = status(service) if !service.nil?
state = status["state"] if !status.nil?
if !state.nil? && state.match(/RUNNING/)
return true
else
return false
end
end
end
end |
require_relative '_yrcad'
class Numeric
def mm
self
end
def cm
self * 10.0
end
def um
self / 1000.0
end
def in
self * 25.4
end
end
def to_polar(r, a)
return [r * Math::cos(a), r * Math::sin(a)]
end
TOLERANCE = 50.um
class Shape
# if @shape isn't defined in a Shape's initialize() method, then render()
# should be overridden to create and return it on-the-fly.
def render
@shape
end
def add(shape)
Union.new(self, shape)
end
def sub(shape)
Difference.new(self, shape)
end
def mul(shape)
Intersection.new(self, shape)
end
def move(x, y, z)
Shape.new # TODO
end
def move_x(delta)
move(delta, 0, 0)
end
def move_y(delta)
move(0, delta, 0)
end
def move_z(delta)
move(0, 0, delta)
end
def rotate(angle, axis)
Shape.new # TODO
end
def rot_x(angle)
rotate(angle, [1, 0, 0])
end
def rot_y(angle)
rotate(angle, [0, 1, 0])
end
def rot_z(angle)
rotate(angle, [0, 0, 1])
end
def scale(x, y, z)
Shape.new # TODO
end
def scale_x(factor)
scale(factor, 1, 1)
end
def scale_y(factor)
scale(1, factor, 1)
end
def scale_z(factor)
scale(1, 1, factor)
end
def extrude(height, twist=0)
LinearExtrusion.new(self, height, twist)
end
def revolve(angle=360)
Revolution.new(self, angle)
end
def bbox
# TODO
end
def min_x
bbox[0].x
end
def min_y
bbox[0].y
end
def min_z
bbox[0].z
end
def max_x
bbox[1].x
end
def max_y
bbox[1].y
end
def max_z
bbox[1].z
end
def x_size
max_x - min_x
end
def y_size
max_y - min_y
end
def z_size
max_z - min_z
end
end
class RegularPolygon < Polygon
attr_reader :sides, :radius
def initialize(sides, radius)
@sides = sides
@radius = radius
angles = (1..sides).map { |i| i * 2 * Math::PI / sides }
points = angles.map { |a| to_polar(radius, a) }
super(points)
end
end
class Square < Polygon
def initialize(size)
@shape = nil # TODO
end
end
class Circle < Shape
def initialize(dia)
@shape = nil # TODO
end
end
class Text < Shape
end
class Cube < Box
def initialize(size)
super(size, size, size)
end
end
class Cylinder < Shape
def initialize(height, dia)
@shape = nil # TODO
end
end
class Sphere < Shape
def initialize(dia)
@shape = nil # TODO
end
end
class Polyhedron < Shape
def initialize(points, triangles)
@shape = nil # TODO
end
end
class Torus < Shape
def initialize(dia1, dia2)
@shape = nil # TODO
end
end
class LinearExtrusion < Shape
attr_reader :profile, :height
def initialize(profile, height, twist=0)
@profile = profile
@height = height
@twist = twist
@shape = nil # TODO
end
end
class Revolution < Shape
attr_reader :profile, :angle
def initialize(profile, angle=360)
@profile = profile
@angle = angle
@shape = nil # TODO
end
end
class RegularPrism < LinearExtrusion
attr_reader :sides, :radius
def initialize(sides, radius, height)
@sides = sides
@radius = radius
poly = RegularPolygon.new(sides, radius)
super(poly, height)
end
end
def read_stl(path)
nil # TODO
end
def make_maker(name, klass)
Object.send(:define_method, name, &klass.method(:new))
end
make_maker :polygon, Polygon
make_maker :reg_poly, RegularPolygon
make_maker :square, Square
make_maker :circle, Circle
make_maker :text, Text
make_maker :box, Box
make_maker :cube, Cube
make_maker :cylinder, Cylinder
make_maker :sphere, Sphere
make_maker :polyhedron, Polyhedron
make_maker :torus, Torus
make_maker :reg_prism, RegularPrism
Remove TODO from LinearExtrusion.initialize.
require_relative '_yrcad'
class Numeric
def mm
self
end
def cm
self * 10.0
end
def um
self / 1000.0
end
def in
self * 25.4
end
end
def to_polar(r, a)
return [r * Math::cos(a), r * Math::sin(a)]
end
TOLERANCE = 50.um
class Shape
# if @shape isn't defined in a Shape's initialize() method, then render()
# should be overridden to create and return it on-the-fly.
def render
@shape
end
def add(shape)
Union.new(self, shape)
end
def sub(shape)
Difference.new(self, shape)
end
def mul(shape)
Intersection.new(self, shape)
end
def move(x, y, z)
Shape.new # TODO
end
def move_x(delta)
move(delta, 0, 0)
end
def move_y(delta)
move(0, delta, 0)
end
def move_z(delta)
move(0, 0, delta)
end
def rotate(angle, axis)
Shape.new # TODO
end
def rot_x(angle)
rotate(angle, [1, 0, 0])
end
def rot_y(angle)
rotate(angle, [0, 1, 0])
end
def rot_z(angle)
rotate(angle, [0, 0, 1])
end
def scale(x, y, z)
Shape.new # TODO
end
def scale_x(factor)
scale(factor, 1, 1)
end
def scale_y(factor)
scale(1, factor, 1)
end
def scale_z(factor)
scale(1, 1, factor)
end
def extrude(height, twist=0)
LinearExtrusion.new(self, height, twist)
end
def revolve(angle=360)
Revolution.new(self, angle)
end
def bbox
# TODO
end
def min_x
bbox[0].x
end
def min_y
bbox[0].y
end
def min_z
bbox[0].z
end
def max_x
bbox[1].x
end
def max_y
bbox[1].y
end
def max_z
bbox[1].z
end
def x_size
max_x - min_x
end
def y_size
max_y - min_y
end
def z_size
max_z - min_z
end
end
class RegularPolygon < Polygon
attr_reader :sides, :radius
def initialize(sides, radius)
@sides = sides
@radius = radius
angles = (1..sides).map { |i| i * 2 * Math::PI / sides }
points = angles.map { |a| to_polar(radius, a) }
super(points)
end
end
class Square < Polygon
def initialize(size)
@shape = nil # TODO
end
end
class Circle < Shape
def initialize(dia)
@shape = nil # TODO
end
end
class Text < Shape
end
class Cube < Box
def initialize(size)
super(size, size, size)
end
end
class Cylinder < Shape
def initialize(height, dia)
@shape = nil # TODO
end
end
class Sphere < Shape
def initialize(dia)
@shape = nil # TODO
end
end
class Polyhedron < Shape
def initialize(points, triangles)
@shape = nil # TODO
end
end
class Torus < Shape
def initialize(dia1, dia2)
@shape = nil # TODO
end
end
class LinearExtrusion < Shape
attr_reader :profile, :height
def initialize(profile, height, twist=0)
@profile = profile
@height = height
@twist = twist
end
end
class Revolution < Shape
attr_reader :profile, :angle
def initialize(profile, angle=360)
@profile = profile
@angle = angle
@shape = nil # TODO
end
end
class RegularPrism < LinearExtrusion
attr_reader :sides, :radius
def initialize(sides, radius, height)
@sides = sides
@radius = radius
poly = RegularPolygon.new(sides, radius)
super(poly, height)
end
end
def read_stl(path)
nil # TODO
end
def make_maker(name, klass)
Object.send(:define_method, name, &klass.method(:new))
end
make_maker :polygon, Polygon
make_maker :reg_poly, RegularPolygon
make_maker :square, Square
make_maker :circle, Circle
make_maker :text, Text
make_maker :box, Box
make_maker :cube, Cube
make_maker :cylinder, Cylinder
make_maker :sphere, Sphere
make_maker :polyhedron, Polyhedron
make_maker :torus, Torus
make_maker :reg_prism, RegularPrism
|
#!/usr/bin/env ruby
require "active_support"
require 'peatio_client'
require_relative "config"
require_relative "myfunc"
def new_yunbi_client
client = PeatioAPI::Client.new endpoint: 'https://yunbi.com', access_key: my_yunbi_access_key, secret_key: my_yunbi_secret_key
end
def new_yunbi_pub_client
client_public = PeatioAPI::Client.new endpoint: 'https://yunbi.com'
end
def fetch_yunbi (quote="bts", base="cny", max_orders=5)
yunbi_fetch quote:quote, base:base, max_orders:max_orders
end
def yunbi_fetch (quote:"bts", base:"cny", max_orders:5)
client_public = new_yunbi_pub_client
new_quote = (quote == "bts" ? "btsx" : quote)
market = new_quote + base
order_book = client_public.get_public '/api/v2/order_book', {"market":market, "asks_limit":max_orders, "bids_limit":max_orders}
ob = order_book
asks = ob["asks"].sort_by {|e| e["price"].to_f}
bids = ob["bids"].sort_by {|e| e["price"].to_f}.reverse
asks_new=[]
bids_new=[]
asks.each do |e|
item = {"price"=>e["price"],"volume"=>e["remaining_volume"]}
asks_new.push item
end
bids.each do |e|
item = {"price"=>e["price"],"volume"=>e["remaining_volume"]}
bids_new.push item
end
#return
{
"source"=>"yunbi",
"base"=>base,
"quote"=>quote,
"asks"=>asks_new,
"bids"=>bids_new
}
end
def yunbi_balance
client = new_yunbi_client
me = client.get '/api/v2/members/me'
my_accounts = me["accounts"]
my_balance=Hash.new
my_accounts.each { |e| my_balance.store e["currency"],e["balance"].to_f - e["locked"].to_f }
if my_balance["bts"].nil?
my_balance.store "bts", my_balance["btsx"]
end
return my_balance
end
#for test
#def orders_yunbi (options={})
#default_options = {:base=>"cny", :quote=>"bts", :type=>"all"}
#options = default_options.merge options
#end
def yunbi_orders (quote:"bts", base:"cny", type:"all")
client = new_yunbi_client
new_quote = (quote == "bts" ? "btsx" : quote)
market = new_quote + base
orders = client.get '/api/v2/orders', {"market":market}
need_ask = ("all" == type or "ask" == type)
need_bid = ("all" == type or "bid" == type)
asks_new=[]
bids_new=[]
orders.each do |e|
if "buy" == e["side"] and need_bid
item = {"id"=>e["id"],"price"=>e["price"],"volume"=>e["remaining_volume"]}
bids_new.push item
elsif "sell" == e["side"] and need_ask
item = {"id"=>e["id"],"price"=>e["price"],"volume"=>e["remaining_volume"]}
asks_new.push item
end
end
asks_new.sort_by! {|e| e["price"].to_f}
bids_new.sort_by! {|e| e["price"].to_f}.reverse!
#return
{
"source"=>"yunbi",
"base"=>base,
"quote"=>quote,
"asks"=>asks_new,
"bids"=>bids_new
}
end
def yunbi_cancel_order (id)
client = new_yunbi_client
client.post '/api/v2/order/delete', {"id":id}
end
def yunbi_cancel_orders (ids)
client = new_yunbi_client
ids.each { |id| client.post '/api/v2/order/delete', {"id":id} }
end
def yunbi_cancel_orders_by_type (quote:"bts", base:"cny", type:"all")
orders = yunbi_orders quote:quote, base:base, type:type
orders["asks"].each {|e| yunbi_cancel_order e["id"]}
orders["bids"].each {|e| yunbi_cancel_order e["id"]}
end
def yunbi_cancel_all_orders
client = new_yunbi_client
orders = client.post '/api/v2/orders/clear'
end
def yunbi_new_order (quote:"bts", base:"cny", type:nil, price:nil, volume:nil)
if type.nil? or price.nil? or volume.nil?
return
end
new_quote = (quote == "bts" ? "btsx" : quote)
market = new_quote + base
new_type = ((type == "bid" or type == "buy") ? "buy" : "sell")
client = new_yunbi_client
orders = client.post '/api/v2/orders', {"market":market, "side":new_type, "price":price, "volume":volume}
end
def yunbi_bid (quote:"bts", base:"cny", price:nil, volume:nil)
yunbi_new_order quote:quote, base:base, type:"bid", price:price, volume:volume
end
def yunbi_ask (quote:"bts", base:"cny", price:nil, volume:nil)
yunbi_new_order quote:quote, base:base, type:"ask", price:price, volume:volume
end
#main
if __FILE__ == $0
if ARGV[0]
ob = fetch_yunbi ARGV[0], ARGV[1]
else
ob = fetch_yunbi
end
print_order_book ob
end
change yunbi trade api wrappers for compatible with btc38 api
#!/usr/bin/env ruby
require "active_support"
require 'peatio_client'
require_relative "config"
require_relative "myfunc"
def new_yunbi_client
client = PeatioAPI::Client.new endpoint: 'https://yunbi.com', access_key: my_yunbi_access_key, secret_key: my_yunbi_secret_key
end
def new_yunbi_pub_client
client_public = PeatioAPI::Client.new endpoint: 'https://yunbi.com'
end
def fetch_yunbi (quote="bts", base="cny", max_orders=5)
yunbi_fetch quote:quote, base:base, max_orders:max_orders
end
def yunbi_fetch (quote:"bts", base:"cny", max_orders:5)
client_public = new_yunbi_pub_client
new_quote = (quote == "bts" ? "btsx" : quote)
market = new_quote + base
order_book = client_public.get_public '/api/v2/order_book', {"market":market, "asks_limit":max_orders, "bids_limit":max_orders}
ob = order_book
asks = ob["asks"].sort_by {|e| e["price"].to_f}
bids = ob["bids"].sort_by {|e| e["price"].to_f}.reverse
asks_new=[]
bids_new=[]
asks.each do |e|
item = {"price"=>e["price"],"volume"=>e["remaining_volume"]}
asks_new.push item
end
bids.each do |e|
item = {"price"=>e["price"],"volume"=>e["remaining_volume"]}
bids_new.push item
end
#return
{
"source"=>"yunbi",
"base"=>base,
"quote"=>quote,
"asks"=>asks_new,
"bids"=>bids_new
}
end
def yunbi_balance
client = new_yunbi_client
me = client.get '/api/v2/members/me'
my_accounts = me["accounts"]
my_balance=Hash.new
my_accounts.each { |e| my_balance.store e["currency"],e["balance"].to_f - e["locked"].to_f }
if my_balance["bts"].nil?
my_balance.store "bts", my_balance["btsx"]
end
return my_balance
end
#for test
#def orders_yunbi (options={})
#default_options = {:base=>"cny", :quote=>"bts", :type=>"all"}
#options = default_options.merge options
#end
def yunbi_orders (quote:"bts", base:"cny", type:"all")
client = new_yunbi_client
new_quote = (quote == "bts" ? "btsx" : quote)
market = new_quote + base
orders = client.get '/api/v2/orders', {"market":market}
need_ask = ("all" == type or "ask" == type)
need_bid = ("all" == type or "bid" == type)
asks_new=[]
bids_new=[]
orders.each do |e|
if "buy" == e["side"] and need_bid
item = {"id"=>e["id"],"price"=>e["price"],"volume"=>e["remaining_volume"]}
bids_new.push item
elsif "sell" == e["side"] and need_ask
item = {"id"=>e["id"],"price"=>e["price"],"volume"=>e["remaining_volume"]}
asks_new.push item
end
end
asks_new.sort_by! {|e| e["price"].to_f}
bids_new.sort_by! {|e| e["price"].to_f}.reverse!
#return
{
"source"=>"yunbi",
"base"=>base,
"quote"=>quote,
"asks"=>asks_new,
"bids"=>bids_new
}
end
# parameter base is to be compatible with btc38
def yunbi_cancel_order (id:0, base:nil)
if 0 == id
return nil
end
client = new_yunbi_client
client.post '/api/v2/order/delete', {"id":id}
end
# parameter base is to be compatible with btc38
def yunbi_cancel_orders (ids:[], base:nil)
client = new_yunbi_client
ids.each { |id| client.post '/api/v2/order/delete', {"id":id} }
end
def yunbi_cancel_orders_by_type (quote:"bts", base:"cny", type:"all")
orders = yunbi_orders quote:quote, base:base, type:type
orders["asks"].each {|e| yunbi_cancel_order id:e["id"]}
orders["bids"].each {|e| yunbi_cancel_order id:e["id"]}
end
def yunbi_cancel_all_orders (quote:nil, base:nil)
client = new_yunbi_client
orders = client.post '/api/v2/orders/clear'
end
def yunbi_new_order (quote:"bts", base:"cny", type:nil, price:nil, volume:nil)
if type.nil? or price.nil? or volume.nil?
return
end
new_quote = (quote == "bts" ? "btsx" : quote)
market = new_quote + base
new_type = ((type == "bid" or type == "buy") ? "buy" : "sell")
client = new_yunbi_client
orders = client.post '/api/v2/orders', {"market":market, "side":new_type, "price":price, "volume":volume}
end
def yunbi_bid (quote:"bts", base:"cny", price:nil, volume:nil)
yunbi_new_order quote:quote, base:base, type:"bid", price:price, volume:volume
end
def yunbi_ask (quote:"bts", base:"cny", price:nil, volume:nil)
yunbi_new_order quote:quote, base:base, type:"ask", price:price, volume:volume
end
#main
if __FILE__ == $0
if ARGV[0]
ob = fetch_yunbi ARGV[0], ARGV[1]
else
ob = fetch_yunbi
end
print_order_book ob
end
|
require 'formula'
class AppEngineJavaSdk < Formula
url 'http://googleappengine.googlecode.com/files/appengine-java-sdk-1.5.1.zip'
homepage 'http://code.google.com/appengine/docs/java/overview.html'
sha1 'f7eda9d0b53d1193ee77a911fba7efd6ef32b181'
def shim_script target
<<-EOS.undent
#!/bin/bash
#{libexec}/bin/#{target} $*
EOS
end
def install
rm Dir['bin/*.cmd']
libexec.install Dir['*']
Dir["#{libexec}/bin/*"].each do |b|
n = Pathname.new(b).basename
(bin+n).write shim_script(n)
end
end
end
appengine-java-sdk 1.5.3
Signed-off-by: Adam Vandenberg <34c2b6407fd5a10249a15d699d40f9ed1782e98c@gmail.com>
require 'formula'
class AppEngineJavaSdk < Formula
url 'http://googleappengine.googlecode.com/files/appengine-java-sdk-1.5.3.zip'
homepage 'http://code.google.com/appengine/docs/java/overview.html'
sha1 '9e4a2ba96931e51ed49fee119b2dceeeec015665'
def shim_script target
<<-EOS.undent
#!/bin/bash
#{libexec}/bin/#{target} $*
EOS
end
def install
rm Dir['bin/*.cmd']
libexec.install Dir['*']
Dir["#{libexec}/bin/*"].each do |b|
n = Pathname.new(b).basename
(bin+n).write shim_script(n)
end
end
end
|
class GtkMacIntegration < Formula
desc "API to integrate GTK OS X applications with the Mac desktop"
homepage "https://wiki.gnome.org/Projects/GTK+/OSX/Integration"
url "https://download.gnome.org/sources/gtk-mac-integration/2.0/gtk-mac-integration-2.0.8.tar.xz"
sha256 "74fce9dbc5efe4e3d07a20b24796be1b1d6c3ac10a0ee6b1f1d685c809071b79"
bottle do
sha256 "d12a21fddf5ed6e18ea4025ebe480fd5b99929c234423cf29c634f4925d14156" => :yosemite
sha256 "40bdabc52178c159b9a0e2e35f74525cf36d20c7d59a28f71d8312a5518e97f8" => :mavericks
sha256 "8c8fb5c90fb42ee5ff1f646daacb0bc571d86f35b149df4da42d9e3e4ee74edd" => :mountain_lion
end
depends_on "pkg-config" => :build
depends_on "gtk+"
depends_on "gtk+3" => :recommended
depends_on "gobject-introspection"
depends_on "pygtk"
def install
args = %W[
--disable-dependency-tracking
--disable-silent-rules
--prefix=#{prefix}
--with-gtk2
--enable-python=yes
--enable-introspection=yes
]
args << ((build.without? "gtk+3") ? "--without-gtk3" : "--with-gtk3")
system "./configure", *args
system "make", "install"
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <gtkosxapplication.h>
int main(int argc, char *argv[]) {
gchar *bundle = gtkosx_application_get_bundle_path();
return 0;
}
EOS
atk = Formula["atk"]
cairo = Formula["cairo"]
fontconfig = Formula["fontconfig"]
freetype = Formula["freetype"]
gdk_pixbuf = Formula["gdk-pixbuf"]
gettext = Formula["gettext"]
glib = Formula["glib"]
gtkx = Formula["gtk+"]
libpng = Formula["libpng"]
pango = Formula["pango"]
pixman = Formula["pixman"]
flags = (ENV.cflags || "").split + (ENV.cppflags || "").split + (ENV.ldflags || "").split
flags += %W[
-I#{atk.opt_include}/atk-1.0
-I#{cairo.opt_include}/cairo
-I#{fontconfig.opt_include}
-I#{freetype.opt_include}/freetype2
-I#{gdk_pixbuf.opt_include}/gdk-pixbuf-2.0
-I#{gettext.opt_include}
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{gtkx.opt_include}/gtk-2.0
-I#{gtkx.opt_lib}/gtk-2.0/include
-I#{include}/gtkmacintegration
-I#{libpng.opt_include}/libpng16
-I#{pango.opt_include}/pango-1.0
-I#{pixman.opt_include}/pixman-1
-DMAC_INTEGRATION
-D_REENTRANT
-L#{atk.opt_lib}
-L#{cairo.opt_lib}
-L#{gdk_pixbuf.opt_lib}
-L#{gettext.opt_lib}
-L#{glib.opt_lib}
-L#{gtkx.opt_lib}
-L#{lib}
-L#{pango.opt_lib}
-latk-1.0
-lcairo
-lgdk-quartz-2.0
-lgdk_pixbuf-2.0
-lgio-2.0
-lglib-2.0
-lgobject-2.0
-lgtk-quartz-2.0
-lgtkmacintegration-gtk2
-lintl
-lpango-1.0
-lpangocairo-1.0
]
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
gtk-mac-integration: update 2.0.8 bottle.
class GtkMacIntegration < Formula
desc "API to integrate GTK OS X applications with the Mac desktop"
homepage "https://wiki.gnome.org/Projects/GTK+/OSX/Integration"
url "https://download.gnome.org/sources/gtk-mac-integration/2.0/gtk-mac-integration-2.0.8.tar.xz"
sha256 "74fce9dbc5efe4e3d07a20b24796be1b1d6c3ac10a0ee6b1f1d685c809071b79"
bottle do
sha256 "8f4ea7e3555ad822b049846440746ac785c94c97aea7bd36b12a51e22878644c" => :el_capitan
sha256 "d12a21fddf5ed6e18ea4025ebe480fd5b99929c234423cf29c634f4925d14156" => :yosemite
sha256 "40bdabc52178c159b9a0e2e35f74525cf36d20c7d59a28f71d8312a5518e97f8" => :mavericks
sha256 "8c8fb5c90fb42ee5ff1f646daacb0bc571d86f35b149df4da42d9e3e4ee74edd" => :mountain_lion
end
depends_on "pkg-config" => :build
depends_on "gtk+"
depends_on "gtk+3" => :recommended
depends_on "gobject-introspection"
depends_on "pygtk"
def install
args = %W[
--disable-dependency-tracking
--disable-silent-rules
--prefix=#{prefix}
--with-gtk2
--enable-python=yes
--enable-introspection=yes
]
args << ((build.without? "gtk+3") ? "--without-gtk3" : "--with-gtk3")
system "./configure", *args
system "make", "install"
end
test do
(testpath/"test.c").write <<-EOS.undent
#include <gtkosxapplication.h>
int main(int argc, char *argv[]) {
gchar *bundle = gtkosx_application_get_bundle_path();
return 0;
}
EOS
atk = Formula["atk"]
cairo = Formula["cairo"]
fontconfig = Formula["fontconfig"]
freetype = Formula["freetype"]
gdk_pixbuf = Formula["gdk-pixbuf"]
gettext = Formula["gettext"]
glib = Formula["glib"]
gtkx = Formula["gtk+"]
libpng = Formula["libpng"]
pango = Formula["pango"]
pixman = Formula["pixman"]
flags = (ENV.cflags || "").split + (ENV.cppflags || "").split + (ENV.ldflags || "").split
flags += %W[
-I#{atk.opt_include}/atk-1.0
-I#{cairo.opt_include}/cairo
-I#{fontconfig.opt_include}
-I#{freetype.opt_include}/freetype2
-I#{gdk_pixbuf.opt_include}/gdk-pixbuf-2.0
-I#{gettext.opt_include}
-I#{glib.opt_include}/glib-2.0
-I#{glib.opt_lib}/glib-2.0/include
-I#{gtkx.opt_include}/gtk-2.0
-I#{gtkx.opt_lib}/gtk-2.0/include
-I#{include}/gtkmacintegration
-I#{libpng.opt_include}/libpng16
-I#{pango.opt_include}/pango-1.0
-I#{pixman.opt_include}/pixman-1
-DMAC_INTEGRATION
-D_REENTRANT
-L#{atk.opt_lib}
-L#{cairo.opt_lib}
-L#{gdk_pixbuf.opt_lib}
-L#{gettext.opt_lib}
-L#{glib.opt_lib}
-L#{gtkx.opt_lib}
-L#{lib}
-L#{pango.opt_lib}
-latk-1.0
-lcairo
-lgdk-quartz-2.0
-lgdk_pixbuf-2.0
-lgio-2.0
-lglib-2.0
-lgobject-2.0
-lgtk-quartz-2.0
-lgtkmacintegration-gtk2
-lintl
-lpango-1.0
-lpangocairo-1.0
]
system ENV.cc, "test.c", "-o", "test", *flags
system "./test"
end
end
|
mongo-orchestration 0.5 (new formula)
Closes #49478.
Signed-off-by: Mike McQuaid <a17fed27eaa842282862ff7c1b9c8395a26ac320@mikemcquaid.com>
class MongoOrchestration < Formula
desc "REST API to manage MongoDB configurations on a single host."
homepage "https://github.com/10gen/mongo-orchestration"
url "https://pypi.python.org/packages/source/m/mongo-orchestration/mongo-orchestration-0.5.tar.gz"
sha256 "3d99f1700ba11169d9b25b8196454f4ba91476c5aea23b4cf368bea6bd73a07d"
head "https://github.com/10gen/mongo-orchestration"
depends_on :python if MacOS.version <= :snow_leopard
resource "bottle" do
url "https://pypi.python.org/packages/source/b/bottle/bottle-0.12.9.tar.gz"
sha256 "fe0a24b59385596d02df7ae7845fe7d7135eea73799d03348aeb9f3771500051"
end
resource "pymongo" do
url "https://pypi.python.org/packages/source/p/pymongo/pymongo-3.2.1.tar.gz"
sha256 "57a86ca602b0a4d2da1f9f3afa8c59fd8ca62d829f6d8f467eae0b7cb22ba88a"
end
resource "cherrypy" do
url "https://pypi.python.org/packages/source/C/CherryPy/CherryPy-4.0.0.tar.gz"
sha256 "73ad4f8870b5a3e9988a7778b5d3003a390d440527ec3458a0c7e58865d2611a"
end
def install
ENV.prepend_create_path "PYTHONPATH", libexec/"vendor/lib/python2.7/site-packages"
resources.each do |r|
r.stage do
system "python", *Language::Python.setup_install_args(libexec/"vendor")
end
end
ENV.prepend_create_path "PYTHONPATH", libexec/"lib/python2.7/site-packages"
system "python", *Language::Python.setup_install_args(libexec)
bin.install Dir["#{libexec}/bin/*"]
bin.env_script_all_files(libexec/"bin", :PYTHONPATH => ENV["PYTHONPATH"])
end
def plist; <<-EOS.undent
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>mongo-orchestration</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/mongo-orchestration</string>
<string>-b</string>
<string>127.0.0.1</string>
<string>-p</string>
<string>8889</string>
<string>--no-fork</string>
<string>start</string>
</array>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
system "#{bin}/mongo-orchestration", "-h"
end
end
|
# frozen_string_literal: true
require "cask/cmd/abstract_internal_command"
require "tap"
require "utils/formatter"
require "utils/github"
module Cask
class Cmd
class Automerge < AbstractInternalCommand
OFFICIAL_CASK_TAPS = [
"homebrew/cask",
"homebrew/cask-drivers",
"homebrew/cask-eid",
"homebrew/cask-fonts",
"homebrew/cask-versions",
].freeze
def run
taps = OFFICIAL_CASK_TAPS.map(&Tap.public_method(:fetch))
access = taps.all? { |tap| GitHub.write_access?(tap.full_name) }
raise "This command may only be run by Homebrew maintainers." unless access
Homebrew.install_gem! "git_diff"
require "git_diff"
failed = []
taps.each do |tap|
open_pull_requests = GitHub.pull_requests(tap.full_name, state: :open, base: "master")
open_pull_requests.each do |pr|
next unless passed_ci(pr)
next unless check_diff(pr)
number = pr["number"]
sha = pr.dig("head", "sha")
print "#{Formatter.url(pr["html_url"])} "
retried = false
begin
GitHub.merge_pull_request(
tap.full_name,
number: number, sha: sha,
merge_method: :squash,
commit_message: "Squashed and auto-merged via `brew cask automerge`."
)
puts "#{Tty.bold}#{Formatter.success("✔")}#{Tty.reset}"
rescue
unless retried
retried = true
sleep 5
retry
end
puts "#{Tty.bold}#{Formatter.error("✘")}#{Tty.reset}"
failed << pr["html_url"]
end
end
end
return if failed.empty?
$stderr.puts
raise CaskError, "Failed to merge the following PRs:\n#{failed.join("\n")}"
end
def passed_ci(pr)
statuses = GitHub.open_api(pr["statuses_url"])
latest_pr_status = statuses.select { |status| status["context"] == "continuous-integration/travis-ci/pr" }
.max_by { |status| Time.parse(status["updated_at"]) }
latest_pr_status&.fetch("state") == "success"
end
def check_diff(pr)
diff_url = pr["diff_url"]
output, _, status = curl_output("--location", diff_url)
return false unless status.success?
diff = GitDiff.from_string(output)
diff_is_single_cask(diff) && diff_only_version_or_checksum_changed(diff)
end
def diff_is_single_cask(diff)
return false unless diff.files.count == 1
file = diff.files.first
return false unless file.a_path == file.b_path
file.a_path.match?(%r{\ACasks/[^/]+\.rb\Z})
end
def diff_only_version_or_checksum_changed(diff)
lines = diff.files.flat_map(&:hunks).flat_map(&:lines)
additions = lines.select(&:addition?)
deletions = lines.select(&:deletion?)
changed_lines = deletions + additions
return false if additions.count != deletions.count
return false if additions.count > 2
changed_lines.all? { |line| diff_line_is_version(line.to_s) || diff_line_is_sha256(line.to_s) }
end
def diff_line_is_sha256(line)
line.match?(/\A[+-]\s*sha256 '[0-9a-f]{64}'\Z/)
end
def diff_line_is_version(line)
line.match?(/\A[+-]\s*version '[^']+'\Z/)
end
def self.help
"automatically merge “simple” Cask pull requests"
end
end
end
end
Remove cask-eid tap from automerge
Since the homebrew-cask-eid tap is no longer in use, it should be removed from the command as well.
# frozen_string_literal: true
require "cask/cmd/abstract_internal_command"
require "tap"
require "utils/formatter"
require "utils/github"
module Cask
class Cmd
class Automerge < AbstractInternalCommand
OFFICIAL_CASK_TAPS = [
"homebrew/cask",
"homebrew/cask-drivers",
"homebrew/cask-fonts",
"homebrew/cask-versions",
].freeze
def run
taps = OFFICIAL_CASK_TAPS.map(&Tap.public_method(:fetch))
access = taps.all? { |tap| GitHub.write_access?(tap.full_name) }
raise "This command may only be run by Homebrew maintainers." unless access
Homebrew.install_gem! "git_diff"
require "git_diff"
failed = []
taps.each do |tap|
open_pull_requests = GitHub.pull_requests(tap.full_name, state: :open, base: "master")
open_pull_requests.each do |pr|
next unless passed_ci(pr)
next unless check_diff(pr)
number = pr["number"]
sha = pr.dig("head", "sha")
print "#{Formatter.url(pr["html_url"])} "
retried = false
begin
GitHub.merge_pull_request(
tap.full_name,
number: number, sha: sha,
merge_method: :squash,
commit_message: "Squashed and auto-merged via `brew cask automerge`."
)
puts "#{Tty.bold}#{Formatter.success("✔")}#{Tty.reset}"
rescue
unless retried
retried = true
sleep 5
retry
end
puts "#{Tty.bold}#{Formatter.error("✘")}#{Tty.reset}"
failed << pr["html_url"]
end
end
end
return if failed.empty?
$stderr.puts
raise CaskError, "Failed to merge the following PRs:\n#{failed.join("\n")}"
end
def passed_ci(pr)
statuses = GitHub.open_api(pr["statuses_url"])
latest_pr_status = statuses.select { |status| status["context"] == "continuous-integration/travis-ci/pr" }
.max_by { |status| Time.parse(status["updated_at"]) }
latest_pr_status&.fetch("state") == "success"
end
def check_diff(pr)
diff_url = pr["diff_url"]
output, _, status = curl_output("--location", diff_url)
return false unless status.success?
diff = GitDiff.from_string(output)
diff_is_single_cask(diff) && diff_only_version_or_checksum_changed(diff)
end
def diff_is_single_cask(diff)
return false unless diff.files.count == 1
file = diff.files.first
return false unless file.a_path == file.b_path
file.a_path.match?(%r{\ACasks/[^/]+\.rb\Z})
end
def diff_only_version_or_checksum_changed(diff)
lines = diff.files.flat_map(&:hunks).flat_map(&:lines)
additions = lines.select(&:addition?)
deletions = lines.select(&:deletion?)
changed_lines = deletions + additions
return false if additions.count != deletions.count
return false if additions.count > 2
changed_lines.all? { |line| diff_line_is_version(line.to_s) || diff_line_is_sha256(line.to_s) }
end
def diff_line_is_sha256(line)
line.match?(/\A[+-]\s*sha256 '[0-9a-f]{64}'\Z/)
end
def diff_line_is_version(line)
line.match?(/\A[+-]\s*version '[^']+'\Z/)
end
def self.help
"automatically merge “simple” Cask pull requests"
end
end
end
end
|
require 'rails_helper'
describe Api::V1::WebsitesController, :type => :controller do
let(:member) { create :member }
let(:website) { create :website }
let(:group) {create :grouped_issue, website: website}
let(:subscriber) { create :subscriber, website: website }
let!(:website_member) { create :website_member, website: website, member: member }
let(:default_params) { {website_id: website.id, format: :json} }
render_views # this is used so we can check the json response from the controller
describe 'GET #index' do
let(:params) { default_params.merge({}) }
context 'if logged in' do
before { auth_member(member) }
it 'should render json' do
get :index, params
expect(response).to be_successful
expect(response.content_type).to eq('application/json')
end
it 'should assign websites' do
get :index, params
expect(assigns(:websites)).to eq([website])
end
it 'should render the right json' do
get :index, params
expect(response).to be_successful
expect(response.body).to eq({
websites: [
{
id: website.id,
title: website.title,
domain: website.domain,
app_id: website.app_id,
app_key: website.app_key,
errors: website.grouped_issues.count,
subscribers: website.subscribers.count,
members: website.members.count
}
]
}.to_json)
end
end
it 'should give error if not logged in' do
get :index, params
expect(response.body).to eq({errors: ['Authorized users only.']}.to_json)
expect(response).to have_http_status(401)
end
end
describe 'POST #create' do
let(:params) { default_params.merge({website: { domain: 'http://www.google.com', title: 'google'} }) }
context 'if logged in' do
before { auth_member(member) }
it 'should render json' do
post :create, params
expect(response).to be_successful
expect(response.content_type).to eq('application/json')
end
it 'should create website' do
expect {
post :create, params
}.to change(Website, :count).by(1)
end
it 'should give error if website_exists' do
website = create :website, domain: 'http://www.google.com', title: 'Title for website'
expect {
post :create, { website:{ domain: 'http://www.google.com', title: 'Random title' }, format: :json }
}.to change(Website, :count).by(0)
end
it 'should create a website member' do
expect {
post :create, params
}.to change(WebsiteMember, :count).by(1)
end
it 'should render the right json' do
post :create, params
website = Website.find_by_domain('http://www.google.com')
expect(response).to be_successful
expect(response.body).to eq({
id: website.id,
domain: website.domain,
created_at: website.created_at,
updated_at: website.updated_at,
title: website.title
}.to_json)
end
end
it 'should give error if not logged in' do
post :create, params
expect(response.body).to eq({errors: ['Authorized users only.']}.to_json)
expect(response).to have_http_status(401)
end
end
describe 'GET #show' do
let(:params) { default_params.merge({ id: website.id}) }
context 'if logged in' do
before { auth_member(member) }
it 'should assign webiste' do
get :show, params
expect(assigns(:website)).to eq(website)
end
it 'should render json' do
get :show, params
expect(response).to be_successful
expect(response.content_type).to eq('application/json')
end
it 'gives a 404 if the website is not found' do
get :show, default_params.merge({id: website.id + 1})
expect(assigns(:website)).to be_nil
end
it 'should render the expected json' do
get :show, params
expect(response).to be_successful
expect(response.body).to eq({
id: website.id,
app_id: website.app_id,
app_key: website.app_key,
domain: website.domain,
title: website.title,
errors: website.grouped_issues.count,
subscribers: website.subscribers.count,
members: website.members.count
}.to_json)
end
end
it 'should give error if not logged in' do
get :show, params
expect(response.body).to eq({errors: ['Authorized users only.']}.to_json)
expect(response).to have_http_status(401)
end
end
describe 'DELETE #destroy' do
let(:params) { default_params.merge({ id: website.id, format: :js}) }
context 'it is logged in' do
before { auth_member(member) }
it 'should delete website' do
expect{
delete :destroy, params
}.to change(Website,:count).by(-1)
end
it 'should delete only website from current member' do
member2 = create :member
website2 = create :website, title: 'Title for website', domain: 'http://www.new-website.com'
website_member2 = create :website_member, member: member2, website: website2
expect {
delete :destroy, default_params.merge({ id: website2.id, format: :js })
}.to change(Website, :count).by(0)
end
it 'should reload page' do
website2 = create :website, title: 'Website title', domain: 'http://www.second-website.com'
website_member2 = create :website_member, member: member, website: website2
delete :destroy, params
expect(response.body).to eq("location.reload();")
expect(response.content_type).to eq('text/javascript')
expect(response).to have_http_status(200)
end
it 'should redirect_to new_website_path' do
delete :destroy, params
expect(response.body).to eq("location.href='/websites/new';")
expect(response.content_type).to eq('text/javascript')
expect(response).to have_http_status(200)
end
end
it 'should give error if not logged in' do
delete :destroy, params
expect(response.body).to eq({errors: ['Authorized users only.']}.to_json)
expect(response).to have_http_status(401)
end
end
end
fixed failing test
require 'rails_helper'
describe Api::V1::WebsitesController, :type => :controller do
let(:member) { create :member }
let(:website) { create :website }
let(:group) {create :grouped_issue, website: website}
let(:subscriber) { create :subscriber, website: website }
let!(:website_member) { create :website_member, website: website, member: member }
let(:default_params) { {website_id: website.id, format: :json} }
render_views # this is used so we can check the json response from the controller
describe 'GET #index' do
let(:params) { default_params.merge({}) }
context 'if logged in' do
before { auth_member(member) }
it 'should render json' do
get :index, params
expect(response).to be_successful
expect(response.content_type).to eq('application/json')
end
it 'should assign websites' do
get :index, params
expect(assigns(:websites)).to eq([website])
end
it 'should render the right json' do
get :index, params
expect(response).to be_successful
expect(response.body).to eq({
websites: [
{
id: website.id,
title: website.title,
domain: website.domain,
app_id: website.app_id,
app_key: website.app_key,
errors: website.grouped_issues.count,
subscribers: website.subscribers.count,
members: website.members.count
}
]
}.to_json)
end
end
it 'should give error if not logged in' do
get :index, params
expect(response.body).to eq({errors: ['Authorized users only.']}.to_json)
expect(response).to have_http_status(401)
end
end
describe 'POST #create' do
let(:params) { default_params.merge({website: { domain: 'http://www.google.com', title: 'google'} }) }
context 'if logged in' do
before { auth_member(member) }
it 'should render json' do
post :create, params
expect(response).to be_successful
expect(response.content_type).to eq('application/json')
end
it 'should create website' do
expect {
post :create, params
}.to change(Website, :count).by(1)
end
it 'should give error if website_exists' do
website = create :website, domain: 'http://www.google.com', title: 'Title for website'
expect {
post :create, { website:{ domain: 'http://www.google.com', title: 'Random title' }, format: :json }
}.to change(Website, :count).by(0)
end
it 'should create a website member' do
expect {
post :create, params
}.to change(WebsiteMember, :count).by(1)
end
it 'should render the right json' do
post :create, params
website = Website.find_by_domain('http://www.google.com')
expect(response).to be_successful
expect(response.body).to eq({
id: website.id,
domain: website.domain,
app_key: website.app_key,
app_id: website.app_id,
created_at: website.created_at,
updated_at: website.updated_at,
title: website.title
}.to_json)
end
end
it 'should give error if not logged in' do
post :create, params
expect(response.body).to eq({errors: ['Authorized users only.']}.to_json)
expect(response).to have_http_status(401)
end
end
describe 'GET #show' do
let(:params) { default_params.merge({ id: website.id}) }
context 'if logged in' do
before { auth_member(member) }
it 'should assign webiste' do
get :show, params
expect(assigns(:website)).to eq(website)
end
it 'should render json' do
get :show, params
expect(response).to be_successful
expect(response.content_type).to eq('application/json')
end
it 'gives a 404 if the website is not found' do
get :show, default_params.merge({id: website.id + 1})
expect(assigns(:website)).to be_nil
end
it 'should render the expected json' do
get :show, params
expect(response).to be_successful
expect(response.body).to eq({
id: website.id,
app_id: website.app_id,
app_key: website.app_key,
domain: website.domain,
title: website.title,
errors: website.grouped_issues.count,
subscribers: website.subscribers.count,
members: website.members.count
}.to_json)
end
end
it 'should give error if not logged in' do
get :show, params
expect(response.body).to eq({errors: ['Authorized users only.']}.to_json)
expect(response).to have_http_status(401)
end
end
describe 'DELETE #destroy' do
let(:params) { default_params.merge({ id: website.id, format: :js}) }
context 'it is logged in' do
before { auth_member(member) }
it 'should delete website' do
expect{
delete :destroy, params
}.to change(Website,:count).by(-1)
end
it 'should delete only website from current member' do
member2 = create :member
website2 = create :website, title: 'Title for website', domain: 'http://www.new-website.com'
website_member2 = create :website_member, member: member2, website: website2
expect {
delete :destroy, default_params.merge({ id: website2.id, format: :js })
}.to change(Website, :count).by(0)
end
it 'should reload page' do
website2 = create :website, title: 'Website title', domain: 'http://www.second-website.com'
website_member2 = create :website_member, member: member, website: website2
delete :destroy, params
expect(response.body).to eq("location.reload();")
expect(response.content_type).to eq('text/javascript')
expect(response).to have_http_status(200)
end
it 'should redirect_to new_website_path' do
delete :destroy, params
expect(response.body).to eq("location.href='/websites/new';")
expect(response.content_type).to eq('text/javascript')
expect(response).to have_http_status(200)
end
end
it 'should give error if not logged in' do
delete :destroy, params
expect(response.body).to eq({errors: ['Authorized users only.']}.to_json)
expect(response).to have_http_status(401)
end
end
end |
# EDSC-32: As a user, I want to see a list of dataset facets so that I may find
# datasets by topic
# Have to manually click on facet titles to open and close facet lists
require "spec_helper"
describe "Dataset Facets", reset: false do
before :all do
Capybara.reset_sessions!
load_page :search, facets: true
end
context "facet listing" do
it "shows the first Project facet" do
find("h3.facet-title", text: 'Project').click
expect(page).to have_content("Project 2009_AN_NASA")
find("h3.facet-title", text: 'Project').click
end
it "shows the first Platforms facet" do
find("h3.facet-title", text: 'Platform').click
expect(page).to have_content("Platform AIRCRAFT")
find("h3.facet-title", text: 'Platform').click
end
it "shows the first Instruments facet" do
find("h3.facet-title", text: 'Instrument').click
expect(page).to have_content("Instrument AIRS")
find("h3.facet-title", text: 'Instrument').click
end
it "shows the first Sensors facet" do
find("h3.facet-title", text: 'Sensor').click
expect(page).to have_content("Sensor AA")
find("h3.facet-title", text: 'Sensor').click
end
it "shows the first 2D Coordinate Name facet" do
find("h3.facet-title", text: '2D Coordinate Name').click
expect(page).to have_content("2D Coordinate Name CALIPSO")
find("h3.facet-title", text: '2D Coordinate Name').click
end
it "shows the first Category Keyword facet" do
find("h3.facet-title", text: 'Category Keyword').click
expect(page).to have_content("Category Keyword ATMOSPHERE")
find("h3.facet-title", text: 'Category Keyword').click
end
it "shows the first Topic Keyword facet" do
find("h3.facet-title", text: 'Topic Keyword').click
expect(page).to have_content("Topic Keyword AGRICULTURE")
find("h3.facet-title", text: 'Topic Keyword').click
end
it "shows the first Term Keyword facet" do
find("h3.facet-title", text: 'Term Keyword').click
expect(page).to have_content("Term Keyword AEROSOLS")
find("h3.facet-title", text: 'Term Keyword').click
end
it "shows the first Variable Level 1 Keyword facet" do
find("h3.facet-title", text: 'Variable Level 1 Keyword').click
expect(page).to have_content("Variable Level 1 Keyword AIR TEMPERATURE")
find("h3.facet-title", text: 'Variable Level 1 Keyword').click
end
it "shows the first Variable Level 2 Keyword facet" do
find("h3.facet-title", text: 'Variable Level 2 Keyword').click
expect(page).to have_content("Variable Level 2 Keyword ALBATROSSES/PETRELS AND ALLIES")
find("h3.facet-title", text: 'Variable Level 2 Keyword').click
end
it "shows the first Variable Level 3 Keyword facet" do
find("h3.facet-title", text: 'Variable Level 3 Keyword').click
expect(page).to have_content("Variable Level 3 Keyword ASH/DUST COMPOSITION")
find("h3.facet-title", text: 'Variable Level 3 Keyword').click
end
it "shows the first Detailed Variable Keyword facet" do
find("h3.facet-title", text: 'Detailed Variable Keyword').click
expect(page).to have_content("Detailed Variable Keyword 2.0 * TB(19V) - TB(21V)")
find("h3.facet-title", text: 'Detailed Variable Keyword').click
end
it "shows the first Processing Level facet" do
find("h3.facet-title", text: 'Processing level').click
expect(page).to have_content("Processing level 0")
find("h3.facet-title", text: 'Processing level').click
end
it "collapses and expands facet lists by type" do
expect(page).to have_css("#collapse2.facets-list-hide")
find("h3.facet-title", text: "Project").click
expect(page).to have_css("#collapse2.facets-list-show")
find("h3.facet-title", text: "Project").click
expect(page).to have_css("#collapse2.facets-list-hide")
end
end
context 'when closing the facet list' do
before :all do
expect(page).to have_no_link('Browse Datasets')
page.find('#master-overlay-parent .master-overlay-hide-parent').click
end
it "displays links to re-open the facet list" do
expect(page).to have_link('Browse Datasets')
end
context 're-opening the facet list' do
before :all do
click_link 'Browse Datasets'
end
it 'hides the link to show facets' do
expect(page).to have_no_link('Browse Datasets')
end
end
end
context "selecting facets" do
after :each do
reset_search
end
it "shows the user which facets have been applied to the query" do
# select a project
find("h3.facet-title", text: 'Project').click
find(".facets-item", text: "EOSDIS").click
wait_for_xhr
within(:css, '.selected-facets-panel') do
expect(page).to have_content("EOSDIS")
expect(page).to have_css(".facets-item.selected")
end
expect(page).to have_css("p.facets-item.selected")
# select a platform
find("h3.facet-title", text: 'Platform').click
find(".facets-item", text: "FIELD INVESTIGATION").click
wait_for_xhr
within(:css, '.selected-facets-panel') do
expect(page).to have_content("FIELD INVESTIGATION")
expect(page).to have_css(".facets-item.selected")
end
expect(page).to have_css("p.facets-item.selected")
# select a second project
find(".facets-item", text: "ESIP").click
wait_for_xhr
within(:css, '.selected-facets-panel') do
expect(page).to have_content("EOSDIS and ESIP")
expect(page).to have_css(".facets-item.selected")
end
expect(page).to have_css("p.facets-item.selected")
find("h3.facet-title", text: 'Project').click
find("h3.facet-title", text: 'Platform').click
end
context "removes facet from query on second click" do
before :all do
find("h3.facet-title", text: 'Project').click
end
before :each do
find(".facets-item", text: "EOSDIS").click
end
after :all do
find("h3.facet-title", text: 'Project').click
end
it "clicks remove from selected facets" do
within(:css, '.selected-facets-panel') do
expect(page).to have_content("EOSDIS")
expect(page).to have_css(".facets-item.selected")
find(".facets-item", text: "EOSDIS").click
end
expect(page).to have_no_css(".facets-item.selected")
end
it "clicks remove from facet lists" do
find("p.facets-item", text: "EOSDIS").click
expect(page).to have_no_css(".facets-item.selected")
end
end
it "hides empty facet lists" do
expect(page).to have_css(".panel.processing-level .panel-heading")
find("h3.facet-title", text: 'Project').click
find(".project .facets-item", text: "AQUA").click
expect(page).to have_no_css(".panel.processing-level .panel-heading")
find("h3.facet-title", text: 'Project').click
end
it "updates the dataset results" do
expect(page).to have_content("15 Minute Stream Flow Data: USGS (FIFE)")
expect(page).to have_no_content("AIRS/Aqua Level 1B AMSU (A1/A2) geolocated and calibrated brightness temperatures V005")
find("h3.facet-title", text: 'Project').click
find(".project .facets-item", text: "AQUA").click
expect(page).to have_no_content("15 Minute Stream Flow Data: USGS (FIFE)")
expect(page).to have_content("AIRS/Aqua Level 1B AMSU (A1/A2) geolocated and calibrated brightness temperatures V005")
find("h3.facet-title", text: 'Project').click
end
it "updates facet lists" do
find("h3.facet-title", text: 'Project').click
within(:css, ".project") do
expect(page).to have_content("AQUA")
expect(page).to have_content("AURA")
find(".facets-item", text: "AQUA").click
expect(page).to have_no_content("AURA")
end
find("h3.facet-title", text: 'Project').click
end
it "keeps facet lists collapsed after selecting and removing a facet" do
expect(page).to have_css("#collapse2.facets-list-hide")
find("h3.facet-title", text: "Project").click
expect(page).to have_css("#collapse2.facets-list-show")
find("h3.facet-title", text: 'Platform').click
within(:css, ".platform") do
find(".facets-item", text: "AIRCRAFT").click
end
wait_for_xhr
expect(page).to have_css("#collapse2.facets-list-show")
within(:css, ".selected-facets-panel") do
find(".facets-item", text: "AIRCRAFT").click
end
wait_for_xhr
expect(page).to have_no_css(".selected-facets-panel.facets")
expect(page).to have_css("#collapse2.facets-list-show")
find("h3.facet-title", text: "Project").click
expect(page).to have_css("#collapse2.facets-list-hide")
find("h3.facet-title", text: 'Platform').click
end
end
context "when applied facets and search terms filter the datasets list to no results" do
before(:all) do
find("h3.facet-title", text: 'Project').click
find(".facets-item", text: "EOSDIS").click
fill_in :keywords, with: "somestringthatmatchesnodatasets"
wait_for_xhr
end
after(:all) do
reset_search
find("h3.facet-title", text: 'Project').click
end
it "continues to display applied facets with counts of 0" do
within '.selected-facets-panel' do
expect(page).to have_content("EOSDIS")
end
expect(page).to have_content("EOSDIS (0)")
end
end
# EDSC-622 - We had been displaying duplicate entries with special characters escaped
context "when applying facets containing special characters" do
before(:all) do
find("h3.facet-title", text: 'Term Keyword').click
find(".facets-item", text: "ANIMALS/VERTEBRATES").click
wait_for_xhr
end
after(:all) do
reset_search
find("h3.facet-title", text: 'Term Keyword').click
end
it "does not display a duplicate entry with special characters escaped" do
expect(page).to have_no_content("ANIMALS%2FVERTEBRATES")
end
it "does displays the selected entry" do
expect(page).to have_content("ANIMALS/VERTEBRATES")
end
end
end
EDSC-684 Added a test.
# EDSC-32: As a user, I want to see a list of dataset facets so that I may find
# datasets by topic
# Have to manually click on facet titles to open and close facet lists
require "spec_helper"
describe "Dataset Facets", reset: false do
before :all do
Capybara.reset_sessions!
load_page :search, facets: true
end
context "facet listing" do
it "shows the first Project facet" do
find("h3.facet-title", text: 'Project').click
expect(page).to have_content("Project 2009_AN_NASA")
find("h3.facet-title", text: 'Project').click
end
it "shows the first Platforms facet" do
find("h3.facet-title", text: 'Platform').click
expect(page).to have_content("Platform AIRCRAFT")
find("h3.facet-title", text: 'Platform').click
end
it "shows the first Instruments facet" do
find("h3.facet-title", text: 'Instrument').click
expect(page).to have_content("Instrument AIRS")
find("h3.facet-title", text: 'Instrument').click
end
it "shows the first Sensors facet" do
find("h3.facet-title", text: 'Sensor').click
expect(page).to have_content("Sensor AA")
find("h3.facet-title", text: 'Sensor').click
end
it "shows the first 2D Coordinate Name facet" do
find("h3.facet-title", text: '2D Coordinate Name').click
expect(page).to have_content("2D Coordinate Name CALIPSO")
find("h3.facet-title", text: '2D Coordinate Name').click
end
it "shows the first Category Keyword facet" do
find("h3.facet-title", text: 'Category Keyword').click
expect(page).to have_content("Category Keyword ATMOSPHERE")
find("h3.facet-title", text: 'Category Keyword').click
end
it "shows the first Topic Keyword facet" do
find("h3.facet-title", text: 'Topic Keyword').click
expect(page).to have_content("Topic Keyword AGRICULTURE")
find("h3.facet-title", text: 'Topic Keyword').click
end
it "shows the first Term Keyword facet" do
find("h3.facet-title", text: 'Term Keyword').click
expect(page).to have_content("Term Keyword AEROSOLS")
find("h3.facet-title", text: 'Term Keyword').click
end
it "shows the first Variable Level 1 Keyword facet" do
find("h3.facet-title", text: 'Variable Level 1 Keyword').click
expect(page).to have_content("Variable Level 1 Keyword AIR TEMPERATURE")
find("h3.facet-title", text: 'Variable Level 1 Keyword').click
end
it "shows the first Variable Level 2 Keyword facet" do
find("h3.facet-title", text: 'Variable Level 2 Keyword').click
expect(page).to have_content("Variable Level 2 Keyword ALBATROSSES/PETRELS AND ALLIES")
find("h3.facet-title", text: 'Variable Level 2 Keyword').click
end
it "shows the first Variable Level 3 Keyword facet" do
find("h3.facet-title", text: 'Variable Level 3 Keyword').click
expect(page).to have_content("Variable Level 3 Keyword ASH/DUST COMPOSITION")
find("h3.facet-title", text: 'Variable Level 3 Keyword').click
end
it "shows the first Detailed Variable Keyword facet" do
find("h3.facet-title", text: 'Detailed Variable Keyword').click
expect(page).to have_content("Detailed Variable Keyword 2.0 * TB(19V) - TB(21V)")
find("h3.facet-title", text: 'Detailed Variable Keyword').click
end
it "shows the first Processing Level facet" do
find("h3.facet-title", text: 'Processing level').click
expect(page).to have_content("Processing level 0")
find("h3.facet-title", text: 'Processing level').click
end
it "collapses and expands facet lists by type" do
expect(page).to have_css("#collapse2.facets-list-hide")
find("h3.facet-title", text: "Project").click
expect(page).to have_css("#collapse2.facets-list-show")
find("h3.facet-title", text: "Project").click
expect(page).to have_css("#collapse2.facets-list-hide")
end
end
context 'when closing the facet list' do
before :all do
expect(page).to have_no_link('Browse Datasets')
page.find('#master-overlay-parent .master-overlay-hide-parent').click
end
it "displays links to re-open the facet list" do
expect(page).to have_link('Browse Datasets')
end
context 're-opening the facet list' do
before :all do
click_link 'Browse Datasets'
end
it 'hides the link to show facets' do
expect(page).to have_no_link('Browse Datasets')
end
end
end
context "selecting facets" do
after :each do
reset_search
end
it "shows the user which facets have been applied to the query" do
# select a project
find("h3.facet-title", text: 'Project').click
find(".facets-item", text: "EOSDIS").click
wait_for_xhr
within(:css, '.selected-facets-panel') do
expect(page).to have_content("EOSDIS")
expect(page).to have_css(".facets-item.selected")
end
expect(page).to have_css("p.facets-item.selected")
# select a platform
find("h3.facet-title", text: 'Platform').click
find(".facets-item", text: "FIELD INVESTIGATION").click
wait_for_xhr
within(:css, '.selected-facets-panel') do
expect(page).to have_content("FIELD INVESTIGATION")
expect(page).to have_css(".facets-item.selected")
end
expect(page).to have_css("p.facets-item.selected")
# select a second project
find(".facets-item", text: "ESIP").click
wait_for_xhr
within(:css, '.selected-facets-panel') do
expect(page).to have_content("EOSDIS and ESIP")
expect(page).to have_css(".facets-item.selected")
end
expect(page).to have_css("p.facets-item.selected")
find("h3.facet-title", text: 'Project').click
find("h3.facet-title", text: 'Platform').click
end
context "removes facet from query on second click" do
before :all do
find("h3.facet-title", text: 'Project').click
end
before :each do
find(".facets-item", text: "EOSDIS").click
end
after :all do
find("h3.facet-title", text: 'Project').click
end
it "clicks remove from selected facets" do
within(:css, '.selected-facets-panel') do
expect(page).to have_content("EOSDIS")
expect(page).to have_css(".facets-item.selected")
find(".facets-item", text: "EOSDIS").click
end
expect(page).to have_no_css(".facets-item.selected")
end
it "clicks remove from facet lists" do
find("p.facets-item", text: "EOSDIS").click
expect(page).to have_no_css(".facets-item.selected")
end
end
it "hides empty facet lists" do
expect(page).to have_css(".panel.processing-level .panel-heading")
find("h3.facet-title", text: 'Project').click
find(".project .facets-item", text: "AQUA").click
expect(page).to have_no_css(".panel.processing-level .panel-heading")
find("h3.facet-title", text: 'Project').click
end
it "updates the dataset results" do
expect(page).to have_content("15 Minute Stream Flow Data: USGS (FIFE)")
expect(page).to have_no_content("AIRS/Aqua Level 1B AMSU (A1/A2) geolocated and calibrated brightness temperatures V005")
find("h3.facet-title", text: 'Project').click
find(".project .facets-item", text: "AQUA").click
expect(page).to have_no_content("15 Minute Stream Flow Data: USGS (FIFE)")
expect(page).to have_content("AIRS/Aqua Level 1B AMSU (A1/A2) geolocated and calibrated brightness temperatures V005")
find("h3.facet-title", text: 'Project').click
end
it "updates facet lists" do
find("h3.facet-title", text: 'Project').click
within(:css, ".project") do
expect(page).to have_content("AQUA")
expect(page).to have_content("AURA")
find(".facets-item", text: "AQUA").click
expect(page).to have_no_content("AURA")
end
find("h3.facet-title", text: 'Project').click
end
it "keeps facet lists collapsed after selecting and removing a facet" do
expect(page).to have_css("#collapse2.facets-list-hide")
find("h3.facet-title", text: "Project").click
expect(page).to have_css("#collapse2.facets-list-show")
find("h3.facet-title", text: 'Platform').click
within(:css, ".platform") do
find(".facets-item", text: "AIRCRAFT").click
end
wait_for_xhr
expect(page).to have_css("#collapse2.facets-list-show")
within(:css, ".selected-facets-panel") do
find(".facets-item", text: "AIRCRAFT").click
end
wait_for_xhr
expect(page).to have_no_css(".selected-facets-panel.facets")
expect(page).to have_css("#collapse2.facets-list-show")
find("h3.facet-title", text: "Project").click
expect(page).to have_css("#collapse2.facets-list-hide")
find("h3.facet-title", text: 'Platform').click
end
end
context "when applied facets and search terms filter the datasets list to no results" do
before(:all) do
find("h3.facet-title", text: 'Project').click
find(".facets-item", text: "EOSDIS").click
fill_in :keywords, with: "somestringthatmatchesnodatasets"
wait_for_xhr
end
after(:all) do
reset_search
find("h3.facet-title", text: 'Project').click
end
it "continues to display applied facets with counts of 0" do
within '.selected-facets-panel' do
expect(page).to have_content("EOSDIS")
end
expect(page).to have_content("EOSDIS (0)")
end
end
# EDSC-622 - We had been displaying duplicate entries with special characters escaped
context "when applying facets containing special characters" do
before(:all) do
find("h3.facet-title", text: 'Term Keyword').click
find(".facets-item", text: "ANIMALS/VERTEBRATES").click
wait_for_xhr
end
after(:all) do
reset_search
find("h3.facet-title", text: 'Term Keyword').click
end
it "does not display a duplicate entry with special characters escaped" do
expect(page).to have_no_content("ANIMALS%2FVERTEBRATES")
end
it "does displays the selected entry" do
expect(page).to have_content("ANIMALS/VERTEBRATES")
end
end
context "when applying facets containing trailing whitespace" do
before :all do
find("h3.facet-title", text: 'Platform').click
find(".facets-item", text: "AQUARIUS_SAC-D ").click
wait_for_xhr
end
after :all do
reset_search
find("h3.facet-title", text: 'Platform').click
end
it "displays correct count on dataset list pane" do
facet_count = 0
dataset_count = -1
# get count from facet list
within '#master-overlay-parent' do
facet_count = find('h3', text: 'Platform').parent.parent.find('p.facets-item.selected').all('span')[1].text
end
# get count from dataset list pane
within '#dataset-results' do
dataset_count = find('header').find('h2').find('strong').text
end
expect(facet_count).to eq(dataset_count)
end
end
end
|
# Updates local files with latest from html5-boilerplate github repository.
require 'open-uri'
BASE_DIR = File.dirname(__FILE__)
BASE_URL = 'https://raw.github.com/h5bp/html5-boilerplate/master'
def update_file(local_path, url)
puts "Updating #{local_path}"
open(local_path, 'wb') do |file|
file << open(url).read
end
end
%w(
404.html
apple-touch-icon-114x114-precomposed.png
apple-touch-icon-57x57-precomposed.png
apple-touch-icon-72x72-precomposed.png
apple-touch-icon-precomposed.png
apple-touch-icon.png
crossdomain.xml
favicon.ico
humans.txt
robots.txt
js/libs/jquery-1.7.1.js
js/libs/modernizr-2.5.2.min.js
js/plugins.js
js/script.js
).each do |file|
update_file "#{BASE_DIR}/content/#{file}", "#{BASE_URL}/#{file}"
end
%w(
.htaccess
.gitignore
).each do |file|
update_file "#{BASE_DIR}/h5bp#{file}", "#{BASE_URL}/#{file}"
end
update update
# Updates local files with latest from html5-boilerplate github repository.
require 'open-uri'
BASE_DIR = File.dirname(__FILE__)
BASE_URL = 'https://raw.github.com/h5bp/html5-boilerplate/master'
{
'404.html' => 'content/404.html',
'apple-touch-icon-114x114-precomposed.png' => 'content/apple-touch-icon-114x114-precomposed.png',
'apple-touch-icon-57x57-precomposed.png' => 'content/apple-touch-icon-57x57-precomposed.png',
'apple-touch-icon-72x72-precomposed.png' => 'content/apple-touch-icon-72x72-precomposed.png',
'apple-touch-icon-precomposed.png' => 'content/apple-touch-icon-precomposed.png',
'apple-touch-icon.png' => 'content/apple-touch-icon.png',
'crossdomain.xml' => 'content/crossdomain.xml',
'favicon.ico' => 'content/favicon.ico',
'humans.txt' => 'content/humans.txt',
'robots.txt' => 'content/robots.txt',
'js/libs/jquery-1.7.1.js' => 'content/js/libs/jquery.js',
'js/libs/modernizr-2.5.2.min.js' => 'content/js/libs/modernizr.js',
'js/plugins.js' => 'content/js/_plugins.js',
'js/script.js' => 'content/js/_script.js',
'.htaccess' => 'h5bp.htaccess',
'.gitignore' => 'h5bp.gitignore',
}.each do |repo_file, local_file|
puts "Updating #{local_file}"
open("#{BASE_DIR}/#{local_file}", 'wb') do |file|
file << open("#{BASE_URL}/#{repo_file}").read
end
end
|
# encoding: UTF-8
class Project
include ServerFiles
REQUIRED_OPTIONS = [:name, :url, :commit, :user, :repos_dir, :results_location, :logger]
PAYLOAD_PORT = 4005
attr_accessor *REQUIRED_OPTIONS
def initialize(opts = {})
name = opts[:name]
url = opts[:url]
commit = opts[:commit]
user = opts[:user]
repos_dir = opts[:repos_dir]
results_location = opts[:results_location]
logger = opts[:logger]
if REQUIRED_OPTIONS.any?{|opt| self.send(:opt).nil? }
raise "missing a required option (#{REQUIRED_OPTIONS}) missing: #{REQUIRED_OPTIONS.select?{|opt| self.call(:opt).nil? }}"
end
end
def project_key
"#{user}/#{repo_name}"
end
def commit_key
"#{project_key}/#{commit}"
end
def repo_location
"#{repos_dir}#{name}"
end
def create_or_update_repo
if File.exists?(repo_location)
logger.info("update repo")
`cd #{repo_location}; git pull`
else
logger.info("create repo")
`cd #{repos_dir}; git clone #{repo_url}`
end
end
def process_request(project_request)
create_or_update_repo
results = "error running systemu"
Dir.chdir(repo_location) do
cid = fork do
ENV['REQUEST_METHOD']=nil
ENV['REQUEST_URI']=nil
ENV['QUERY_STRING']=nil
ENV['PWD']=nil
ENV['DOCUMENT_ROOT']=nil
ENV['BUNDLE_GEMFILE']="#{repo_location}/Gemfile"
full_cmd = "cd #{repo_location}; LC_ALL=en_US.UTF-8 LC_CTYPE=en_US.UTF-8 PORT=#{PAYLOAD_PORT} foreman start > /opt/bitnami/apps/server_responder/log/foreman.log"
logger.info "running: #{full_cmd}"
exec(full_cmd)
end
puts "running child is #{cid}"
begin
logger.info "sleep while app boots"
sleep(7)
logger.info "waking up to hit app"
results = RestClient.post "http://localhost:#{PAYLOAD_PORT}#{project_request}", {}
logger.error "results: #{results}"
write_file(results_location,results)
rescue => error
error_msg = "error hitting app #{error}"
logger.error error_msg
error_trace = "error trace #{error.backtrace.join("\n")}"
logger.error error_trace
write_file(results_location, "#{error_msg}\n #{error_trace}")
ensure
begin
logger.info "killing child processes"
Process.kill '-SIGINT', cid # kill the daemon
rescue Errno::ESRCH
logger.error "error killing process likely crashed when running"
end
end
end
results
end
def process_cmd(cmd)
create_or_update_repo
if commit=='history'
ProjectCommands.project_history_for_command(project_key, repo_location, default_local_location, repo_url, commit, commit_key, cmd, results_location)
else
ProjectCommands.project_command(project_key, repo_location, default_local_location, repo_url, commit, commit_key, cmd, results_location)
end
end
def process_github_hook
create_or_update_repo
deferred_server_config = "#{repo_location}/.deferred_server"
cmd = "churn"
if File.exists?(deferred_server_config)
cmd = File.read(deferred_server_config)
results = nil
Dir.chdir(repo_location) do
if File.exists?("#{repo_location}/Gemfile")
`chmod +w Gemfile.lock`
`gem install bundler --no-ri --no-rdoc`
`BUNDLE_GEMFILE=#{repo_location}/Gemfile && bundle install`
end
full_cmd = "BUNDLE_GEMFILE=#{repo_location}/Gemfile && #{cmd}"
logger.info "dir: #{repo_location} && running: #{full_cmd}"
results = `#{full_cmd} 2>&1`
end
else
results = `cd #{repo_location}; #{cmd}`
end
puts "results: #{results}"
exit_status = $?.exitstatus
json_results = {
:cmd_run => cmd,
:exit_status => exit_status,
:results => results
}
write_file(commit_key,json_results.to_json)
write_commits(project_key, after_commit, commit_key, push)
RestClient.post CLIENT_APP+"/request_complete",
{:project_key => project_key, :commit_key => commit_key}
json_results
end
def self.project_history_for_command(project_key, repo_location, default_local_location, repo_url, commit, commit_key, cmd, results_location)
from_date = 60.days.ago.to_date
until_date = Date.today
completed_commits = []
#from https://github.com/metricfu/metric_fu/issues/107#issuecomment-21747147
(from_date..until_date).each do |date|
git_log_cmd = "cd #{repo_location}; git log --max-count=1 --before=#{date} --after=#{date - 1} --format='%H'"
puts "git_log_cmd: #{git_log_cmd}"
current_git_commit = `#{git_log_cmd}`.to_s.strip
puts "commit #{current_git_commit} for date #{date}"
if current_git_commit!='' && !completed_commits.include?(current_git_commit)
completed_commits << current_git_commit
current_commit_key = "#{project_key}/#{current_git_commit}"
current_results_location = results_location.gsub('_history_',"_#{current_git_commit}_")
project_command(project_key, repo_location, default_local_location, repo_url, current_git_commit, current_commit_key, cmd, current_results_location)
RestClient.post("http://churn.picoappz.com/#{project_key}/commits/#{current_git_commit}", :rechurn => 'false')
end
end
{:project_key => project_key, :commit_key => commit_key}
end
def self.project_command(project_key, repo_location, default_local_location, repo_url, commit, commit_key, cmd, results_location)
if File.exists?(repo_location)
puts ("update repo")
`cd #{repo_location}; git pull`
else
puts("create repo")
`cd #{local_repos}; git clone #{repo_url}`
end
full_command = "cd #{repo_location}; git checkout #{commit}; #{cmd}"
puts ("running: #{full_command}")
results = `#{full_command}`
#temporary hack for the empty results not creating files / valid output
if results==''
results = "cmd #{cmd} completed with no output"
end
puts "results: #{results}"
exit_status = $?.exitstatus
json_results = {
:cmd_run => cmd,
:exit_status => exit_status,
:results => results
}
write_file(commit_key,json_results.to_json)
write_file(results_location,json_results.to_json)
RestClient.post "http://git-hook-responder.herokuapp.com"+"/request_complete",
{:project_key => project_key, :commit_key => commit_key}
results
end
end
this ins't js
# encoding: UTF-8
class Project
include ServerFiles
REQUIRED_OPTIONS = [:name, :url, :commit, :user, :repos_dir, :results_location, :logger]
PAYLOAD_PORT = 4005
attr_accessor *REQUIRED_OPTIONS
def initialize(opts = {})
name = opts[:name]
url = opts[:url]
commit = opts[:commit]
user = opts[:user]
repos_dir = opts[:repos_dir]
results_location = opts[:results_location]
logger = opts[:logger]
if REQUIRED_OPTIONS.any?{|opt| self.send(opt).nil? }
raise "missing a required option (#{REQUIRED_OPTIONS}) missing: #{REQUIRED_OPTIONS.select?{|opt| self.send(opt).nil? }}"
end
end
def project_key
"#{user}/#{repo_name}"
end
def commit_key
"#{project_key}/#{commit}"
end
def repo_location
"#{repos_dir}#{name}"
end
def create_or_update_repo
if File.exists?(repo_location)
logger.info("update repo")
`cd #{repo_location}; git pull`
else
logger.info("create repo")
`cd #{repos_dir}; git clone #{repo_url}`
end
end
def process_request(project_request)
create_or_update_repo
results = "error running systemu"
Dir.chdir(repo_location) do
cid = fork do
ENV['REQUEST_METHOD']=nil
ENV['REQUEST_URI']=nil
ENV['QUERY_STRING']=nil
ENV['PWD']=nil
ENV['DOCUMENT_ROOT']=nil
ENV['BUNDLE_GEMFILE']="#{repo_location}/Gemfile"
full_cmd = "cd #{repo_location}; LC_ALL=en_US.UTF-8 LC_CTYPE=en_US.UTF-8 PORT=#{PAYLOAD_PORT} foreman start > /opt/bitnami/apps/server_responder/log/foreman.log"
logger.info "running: #{full_cmd}"
exec(full_cmd)
end
puts "running child is #{cid}"
begin
logger.info "sleep while app boots"
sleep(7)
logger.info "waking up to hit app"
results = RestClient.post "http://localhost:#{PAYLOAD_PORT}#{project_request}", {}
logger.error "results: #{results}"
write_file(results_location,results)
rescue => error
error_msg = "error hitting app #{error}"
logger.error error_msg
error_trace = "error trace #{error.backtrace.join("\n")}"
logger.error error_trace
write_file(results_location, "#{error_msg}\n #{error_trace}")
ensure
begin
logger.info "killing child processes"
Process.kill '-SIGINT', cid # kill the daemon
rescue Errno::ESRCH
logger.error "error killing process likely crashed when running"
end
end
end
results
end
def process_cmd(cmd)
create_or_update_repo
if commit=='history'
ProjectCommands.project_history_for_command(project_key, repo_location, default_local_location, repo_url, commit, commit_key, cmd, results_location)
else
ProjectCommands.project_command(project_key, repo_location, default_local_location, repo_url, commit, commit_key, cmd, results_location)
end
end
def process_github_hook
create_or_update_repo
deferred_server_config = "#{repo_location}/.deferred_server"
cmd = "churn"
if File.exists?(deferred_server_config)
cmd = File.read(deferred_server_config)
results = nil
Dir.chdir(repo_location) do
if File.exists?("#{repo_location}/Gemfile")
`chmod +w Gemfile.lock`
`gem install bundler --no-ri --no-rdoc`
`BUNDLE_GEMFILE=#{repo_location}/Gemfile && bundle install`
end
full_cmd = "BUNDLE_GEMFILE=#{repo_location}/Gemfile && #{cmd}"
logger.info "dir: #{repo_location} && running: #{full_cmd}"
results = `#{full_cmd} 2>&1`
end
else
results = `cd #{repo_location}; #{cmd}`
end
puts "results: #{results}"
exit_status = $?.exitstatus
json_results = {
:cmd_run => cmd,
:exit_status => exit_status,
:results => results
}
write_file(commit_key,json_results.to_json)
write_commits(project_key, after_commit, commit_key, push)
RestClient.post CLIENT_APP+"/request_complete",
{:project_key => project_key, :commit_key => commit_key}
json_results
end
def self.project_history_for_command(project_key, repo_location, default_local_location, repo_url, commit, commit_key, cmd, results_location)
from_date = 60.days.ago.to_date
until_date = Date.today
completed_commits = []
#from https://github.com/metricfu/metric_fu/issues/107#issuecomment-21747147
(from_date..until_date).each do |date|
git_log_cmd = "cd #{repo_location}; git log --max-count=1 --before=#{date} --after=#{date - 1} --format='%H'"
puts "git_log_cmd: #{git_log_cmd}"
current_git_commit = `#{git_log_cmd}`.to_s.strip
puts "commit #{current_git_commit} for date #{date}"
if current_git_commit!='' && !completed_commits.include?(current_git_commit)
completed_commits << current_git_commit
current_commit_key = "#{project_key}/#{current_git_commit}"
current_results_location = results_location.gsub('_history_',"_#{current_git_commit}_")
project_command(project_key, repo_location, default_local_location, repo_url, current_git_commit, current_commit_key, cmd, current_results_location)
RestClient.post("http://churn.picoappz.com/#{project_key}/commits/#{current_git_commit}", :rechurn => 'false')
end
end
{:project_key => project_key, :commit_key => commit_key}
end
def self.project_command(project_key, repo_location, default_local_location, repo_url, commit, commit_key, cmd, results_location)
if File.exists?(repo_location)
puts ("update repo")
`cd #{repo_location}; git pull`
else
puts("create repo")
`cd #{local_repos}; git clone #{repo_url}`
end
full_command = "cd #{repo_location}; git checkout #{commit}; #{cmd}"
puts ("running: #{full_command}")
results = `#{full_command}`
#temporary hack for the empty results not creating files / valid output
if results==''
results = "cmd #{cmd} completed with no output"
end
puts "results: #{results}"
exit_status = $?.exitstatus
json_results = {
:cmd_run => cmd,
:exit_status => exit_status,
:results => results
}
write_file(commit_key,json_results.to_json)
write_file(results_location,json_results.to_json)
RestClient.post "http://git-hook-responder.herokuapp.com"+"/request_complete",
{:project_key => project_key, :commit_key => commit_key}
results
end
end
|
module Protobuf
VERSION = '2.2.5'
PROTOC_VERSION = '2.4.1'
end
bump to 2.2.6
module Protobuf
VERSION = '2.2.6'
PROTOC_VERSION = '2.4.1'
end
|
module RabidMQ
VERSION = "0.1.36"
end
Bump to 0.1.37
module RabidMQ
VERSION = "0.1.37"
end
|
require "rack/heroku_meta/version"
module Rack
class HerokuMeta
DEFAULT_ROUTE = "/heroku_meta"
def initialize(app, options = {})
@app = app
@route = options.fetch(:route, DEFAULT_ROUTE)
end
def call(env)
dup._call(env)
end
def _call(env)
if env["PATH_INFO"] == @route
env["REQUEST_METHOD"] == "GET" ? serve_meta : not_found
else
@app.call(env)
end
end
private
def not_found
[404, { "Content-Type" => "plain/text" }, ["Not Found"]]
end
def meta
ps = ENV["PS"]
commit_hash = ENV["COMMIT_HASH"]
%Q({"ps":"#{ps}","commit_hash":"#{commit_hash}"})
end
def serve_meta
[200, { "Content-Type" => "application/json" }, [meta]]
end
end
end
Added comment about manual JSON for meta
require "rack/heroku_meta/version"
module Rack
class HerokuMeta
DEFAULT_ROUTE = "/heroku_meta"
def initialize(app, options = {})
@app = app
@route = options.fetch(:route, DEFAULT_ROUTE)
end
def call(env)
dup._call(env)
end
def _call(env)
if env["PATH_INFO"] == @route
env["REQUEST_METHOD"] == "GET" ? serve_meta : not_found
else
@app.call(env)
end
end
private
def not_found
[404, { "Content-Type" => "plain/text" }, ["Not Found"]]
end
def meta
ps = ENV["PS"]
commit_hash = ENV["COMMIT_HASH"]
# Manual JSON to avoid a gem dependency
%Q({"ps":"#{ps}","commit_hash":"#{commit_hash}"})
end
def serve_meta
[200, { "Content-Type" => "application/json" }, [meta]]
end
end
end
|
Initial checkin of ICMPv6 support
git-svn-id: b3d365f95d627ddffb8722c756fee9dc0a59d335@87 64fbf49a-8b99-41c6-b593-eb2128c2192d
# $Id$
#
# Copyright (c) 2008, Jon Hart
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY Jon Hart ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL Jon Hart BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
module Racket
# Internet Control Message Protcol, v6
#
# http://en.wikipedia.org/wiki/ICMPv6
#
# Generic ICMP class from which all ICMP variants spawn. This should never be used directly.
class ICMPv6Generic < RacketPart
ICMPv6_TYPE_ECHO_REPLY = 129
ICMPv6_TYPE_DESTINATION_UNREACHABLE = 1
ICMPv6_TYPE_PACKET_TOO_BIG = 2
# ICMPv6_TYPE_SOURCE_QUENCH =
# ICMPv6_TYPE_REDIRECT =
ICMPv6_TYPE_ECHO_REQUEST = 128
# ICMPv6_TYPE_MOBILE_IP_ADVERTISEMENT =
# ICMPv6_TYPE_ROUTER_SOLICITATION =
ICMPv6_TYPE_TIME_EXCEEDED = 3
ICMPv6_TYPE_PARAMETER_PROBLEM = 4
# ICMPv6_TYPE_TIMESTAMP_REQUEST =
# ICMPv6_TYPE_TIMESTAMP_REPLY =
# ICMPv6_TYPE_INFO_REQUEST =
# ICMPv6_TYPE_INFO_REPLY =
# ICMPv6_TYPE_ADDRESS_MASK_REQUEST =
# ICMPv6_TYPE_ADDRESS_MASK_REPLY =
# Type
unsigned :type, 8
# Code
unsigned :code, 8
# Checksum
unsigned :checksum, 16
rest :message
# check the checksum for this ICMP packet
def checksum?
self.checksum == compute_checksum
end
def initialize(*args)
super(*args)
@autofix = false
end
# compute and set the checksum for this ICMP packet
def checksum!(src_ip, dst_ip)
self.checksum = compute_checksum(src_ip, dst_ip)
end
# 'fix' this ICMP packet up for sending.
# (really, just set the checksum)
def fix!(src_ip, dst_ip)
self.checksum!(src_ip, dst_ip)
end
private
def compute_checksum(src_ip, dst_ip)
s1 = src_ip >> 96
s2 = (src_ip >> 64) & 0x0000FFFF
s3 = (src_ip >> 32) & 0x00000000FFFF
s4 = src_ip & 0x000000000000FFFF
d1 = dst_ip >> 96
d2 = (dst_ip >> 64) & 0x0000FFFF
d3 = (dst_ip >> 32) & 0x00000000FFFF
d4 = dst_ip & 0x000000000000FFFF
# pseudo header used for checksum calculation as per RFC 768
pseudo = [ s1, s2, s3, s4, d1, d2, d3, d4, self.length, 58, self.type, self.code, 0, self.message ]
L3::Misc.checksum(pseudo.pack("NNNNNNNNNNCCna*"))
end
end
# Send raw ICMP packets of your own design
class ICMPv6 < ICMPv6Generic
rest :payload
end
# ICMP Echo
# Generic ICMPv6 echo, used by request and reply
class ICMPv6Echo < ICMPv6Generic
unsigned :id, 16
unsigned :sequence, 16
rest :payload
def initialize(*args)
super(*args)
self.type = ICMPv6_TYPE_ECHO_REQUEST
self.code = 0
end
end
class ICMPv6EchoRequest < ICMPv6Echo
rest :payload
def initialize(*args)
super(*args)
self.type = ICMPv6_TYPE_ECHO_REQUEST
self.code = 0
end
end
# ICMP Echo Reply
class ICMPv6EchoReply < ICMPv6Echo
rest :payload
def initialize(*args)
super(*args)
self.type = ICMPv6_TYPE_ECHO_REPLY
self.code = 0
end
end
# ICMP Destination Unreachable Message
class ICMPv6DestinationUnreachable < ICMPv6Generic
ICMPv6_CODE_NO_ROUTE = 0
ICMPv6_CODE_ADMIN_PROHIBITED = 1
ICMPv6_CODE_BEYOND_SCOPE = 2
ICMPv6_CODE_ADDRESS_UNREACHABLE = 3
ICMPv6_CODE_PORT_UNREACHABLE = 4
ICMPv6_CODE_FAILED_POLICY = 4
ICMPv6_CODE_REJECT_ROUTE = 5
# This is never used according to the RFC
unsigned :unused, 32
# Internet header + 64 bits of original datagram
rest :payload
def initialize(*args)
super(*args)
self.type = ICMPv6_TYPE_DESTINATION_UNREACHABLE
end
end
class ICMPv6PacketTooBig < ICMPv6Generic
# The Maximum Transmission Unit of the next-hop link
unsigned :mtu, 32
rest :payload
def initialize(*args)
super(*args)
self.type = ICMPv6_TYPE_PACKET_TOO_BIG
end
end
# ICMP Time Exceeded Message
class ICMPv6TimeExceeded < ICMPv6Generic
ICMPv6_CODE_TTL_EXCEEDED_IN_TRANSIT = 0
ICMPv6_CODE_FRAG_REASSEMBLY_TIME_EXCEEDED = 1
# This is never used according to the RFC
unsigned :unused, 32
# As much of the original ICMPv6 packet without busting MTU
rest :payload
def initialize(*args)
super(*args)
self.type = ICMPv6_TYPE_TIME_EXCEEDED
end
end
# ICMPv6 Parameter Problem Message
class ICMPv6ParameterProblem < ICMPv6Generic
ICMPv6_CODE_ERRONEOUS_HEADER = 0
ICMPv6_CODE_UNRECOGNIZED_NEXT_HEADER = 1
ICMPv6_CODE_UNRECOGNIZED_OPTION = 2
# pointer to the octet where the error was detected
unsigned :pointer, 32
# As much of the original ICMPv6 packet without busting MTU
rest :payload
def initialize(*args)
super(*args)
self.type = ICMPv6_TYPE_PARAMETER_PROBLEM
end
end
end
# vim: set ts=2 et sw=2:
|
module Rack::JSON
class Request < Rack::Request
include Rack::Utils
def initialize(env)
@env = env
super(env)
end
def add_query_param(param)
self.query_string << param
end
def collection
self.path_info.split('/')[1] || ""
end
def collection_path?
self.path_info.match /^\/\w+$/
end
def member_path?
self.path_info.match /^\/\w+\/\w+$/
end
def json
self.body.rewind
self.body.read
end
def query
@query ||= Rack::JSON::JSONQuery.new(unescape(query_string))
end
def resource_id
id_string = self.path_info.split('/').last.to_s
begin
Mongo::ObjectID.from_string(id_string)
rescue Mongo::InvalidObjectID
id_string.match(/^\d+$/) ? id_string.to_i : id_string
end
end
def session
@env['rack.session']
end
end
end
Request can modify the body of the request
module Rack::JSON
class Request < Rack::Request
include Rack::Utils
attr_reader :env
def initialize(env)
@env = env
super(env)
end
def add_query_param(param)
self.query_string << param
end
def collection
self.path_info.split('/')[1] || ""
end
def collection_path?
self.path_info.match /^\/\w+$/
end
def member_path?
self.path_info.match /^\/\w+\/\w+$/
end
def json
self.body.rewind
self.body.read
end
def query
@query ||= Rack::JSON::JSONQuery.new(unescape(query_string))
end
def resource_id
id_string = self.path_info.split('/').last.to_s
begin
Mongo::ObjectID.from_string(id_string)
rescue Mongo::InvalidObjectID
id_string.match(/^\d+$/) ? id_string.to_i : id_string
end
end
def session
@env['rack.session']
end
def set_body json
@env['rack.input'] = StringIO.new(json)
@env['rack.input'].rewind
end
end
end |
module Rails
module JS
VERSION = "0.0.4"
end
end
bumps version to 0.0.5
module Rails
module JS
VERSION = "0.0.5"
end
end
|
require 'rake/remote_task'
require "rake/remote_chef/version"
[
["Rake::RemoteChef", :runlist, :run_list]
].each do |methods|
receiver = methods.shift
methods.each do |method|
eval "def #{method} *args, █ #{receiver}.#{method}(*args, &block);end"
end
end
module Rake
class RemoteChef
def self.run_list role, runlist
@runlist ||= {}
@runlist[role] ||= []
(@runlist[role] += runlist).uniq!
end
def self.run_list_for *roles
roles.map {|r| @runlist[r] }.flatten.uniq.compact
end
def self.load options = {}
options = {:config => options} if String === options
order = [:bootstrap, :core]
order += options.keys - order
recipes = {
:config => 'config/chef.rb',
:bootstrap => 'bootstrap/ubuntu',
:core => 'core'
}.merge(options)
order.each do |flavor|
recipe = recipes[flavor]
next if recipe.nil? or flavor == :config
require "rake/remote_chef/#{recipe}"
end
set :ruby_path, '/opt/chef'
set(:rsync_flags) {['-rlptDzP', '--exclude', '.git', '-e', "ssh #{ssh_flags.join(' ')}"]}
Kernel.load recipes[:config]
Kernel.load "config/chef_#{ENV['to']}.rb" if ENV['to']
end
end
end
Fixed to Rake::RemoteChef.run_list_for confains defaut run_list.
require 'rake/remote_task'
require "rake/remote_chef/version"
[
["Rake::RemoteChef", :runlist, :run_list]
].each do |methods|
receiver = methods.shift
methods.each do |method|
eval "def #{method} *args, █ #{receiver}.#{method}(*args, &block);end"
end
end
module Rake
class RemoteChef
def self.run_list role, runlist
@runlist ||= {}
@runlist[role] ||= []
(@runlist[role] += runlist).uniq!
end
def self.run_list_for *roles
roles.unshift(:default)
roles.map {|r| @runlist[r] }.flatten.uniq.compact
end
def self.load options = {}
options = {:config => options} if String === options
order = [:bootstrap, :core]
order += options.keys - order
recipes = {
:config => 'config/chef.rb',
:bootstrap => 'bootstrap/ubuntu',
:core => 'core'
}.merge(options)
order.each do |flavor|
recipe = recipes[flavor]
next if recipe.nil? or flavor == :config
require "rake/remote_chef/#{recipe}"
end
set :ruby_path, '/opt/chef'
set(:rsync_flags) {['-rlptDzP', '--exclude', '.git', '-e', "ssh #{ssh_flags.join(' ')}"]}
Kernel.load recipes[:config]
Kernel.load "config/chef_#{ENV['to']}.rb" if ENV['to']
end
end
end
|
# frozen_string_literal: true
module Recorder
VERSION = '1.2.0'
end
Bumped patch version
# frozen_string_literal: true
module Recorder
VERSION = '1.2.1'
end
|
# Copyright (c) 2009-2019 Minero Aoki, Kenshi Muto
# 2002-2008 Minero Aoki
#
# This program is free software.
# You can distribute or modify this program under the terms of
# the GNU LGPL, Lesser General Public License version 2.1.
# For details of the GNU LGPL, see the file "COPYING".
#
require 'review/book/book_unit'
module ReVIEW
module Book
class Part < BookUnit
def self.mkpart_from_namelistfile(book, path)
chaps = []
File.read(path, mode: 'rt:BOM|utf-8').split.each_with_index do |name, number|
if path =~ /PREDEF/
chaps << Chapter.mkchap(book, name)
else
chaps << Chapter.mkchap(book, name, number + 1)
end
end
Part.mkpart(chaps)
end
def self.mkpart_from_namelist(book, names)
Part.mkpart(names.map { |name| Chapter.mkchap_ifexist(book, name) }.compact)
end
def self.mkpart(chaps)
chaps.empty? ? nil : Part.new(chaps[0].book, nil, chaps)
end
# if Part is dummy, `number` is nil.
#
def initialize(book, number, chapters, name = '', io = nil)
@book = book
@number = number
@name = name
@chapters = chapters
@path = name
if io
@content = io.read
elsif @path.present? && File.exist?(File.join(@book.config['contentdir'], @path))
@content = File.read(File.join(@book.config['contentdir'], @path), mode: 'rt:BOM|utf-8')
@name = File.basename(name, '.re')
else
@content = ''
end
if file?
@title = nil
else
@title = name
end
@volume = nil
super()
end
attr_reader :number
attr_reader :chapters
attr_reader :name
def each_chapter(&block)
@chapters.each(&block)
end
def volume
if @number && file?
vol = Volume.count_file(File.join(@book.config['contentdir'], @path))
else
vol = Volume.new(0, 0, 0)
end
vol
end
def file?
name.present? and path.end_with?('.re')
end
def format_number(heading = true)
if heading
I18n.t('part', @number)
else
@number.to_s
end
end
def on_appendix?
false
end
# backward compatibility
alias_method :on_APPENDIX?, :on_appendix?
end
end
end
make indices for part as same as chapter. Closes: #1567
# Copyright (c) 2009-2020 Minero Aoki, Kenshi Muto, Masayoshi Takahashi
# 2002-2008 Minero Aoki
#
# This program is free software.
# You can distribute or modify this program under the terms of
# the GNU LGPL, Lesser General Public License version 2.1.
# For details of the GNU LGPL, see the file "COPYING".
#
require 'review/book/book_unit'
module ReVIEW
module Book
class Part < BookUnit
def self.mkpart_from_namelistfile(book, path)
chaps = []
File.read(path, mode: 'rt:BOM|utf-8').split.each_with_index do |name, number|
if path =~ /PREDEF/
chaps << Chapter.mkchap(book, name)
else
chaps << Chapter.mkchap(book, name, number + 1)
end
end
Part.mkpart(chaps)
end
def self.mkpart_from_namelist(book, names)
Part.mkpart(names.map { |name| Chapter.mkchap_ifexist(book, name) }.compact)
end
def self.mkpart(chaps)
chaps.empty? ? nil : Part.new(chaps[0].book, nil, chaps)
end
# if Part is dummy, `number` is nil.
#
def initialize(book, number, chapters, name = '', io = nil)
@book = book
@number = number
@name = name
@chapters = chapters
@path = name
if io
@content = io.read
elsif @path.present? && File.exist?(File.join(@book.config['contentdir'], @path))
@content = File.read(File.join(@book.config['contentdir'], @path), mode: 'rt:BOM|utf-8')
@name = File.basename(name, '.re')
else
@content = ''
end
if file?
@title = nil
else
@title = name
end
@volume = nil
super()
end
def generate_indexes
super
return unless content
@numberless_image_index = @indexes.numberless_image_index
@image_index = @indexes.image_index
@icon_index = @indexes.icon_index
@indepimage_index = @indexes.indepimage_index
end
attr_reader :number
attr_reader :chapters
attr_reader :name
def each_chapter(&block)
@chapters.each(&block)
end
def volume
if @number && file?
vol = Volume.count_file(File.join(@book.config['contentdir'], @path))
else
vol = Volume.new(0, 0, 0)
end
vol
end
def file?
name.present? and path.end_with?('.re')
end
def format_number(heading = true)
if heading
I18n.t('part', @number)
else
@number.to_s
end
end
def on_appendix?
false
end
# backward compatibility
alias_method :on_APPENDIX?, :on_appendix?
end
end
end
|
require 'rest_client' # rest_client 1.6.1
require 'json'
require 'set'
require 'cgi'
# RightApiClient has the generic get/post/delete/put calls that are used
# by resources
class RightApiClient
ROOT_RESOURCE = '/api/session'
ROOT_INSTANCE_RESOURCE = '/api/session/instance'
# permitted parameters for initializing
AUTH_PARAMS = %w(email password account_id api_url api_version cookies instance_token)
# NOTE: instances will be handled in a special case
INSTANCE_ACTIONS = {
:clouds => {:volumes => 'do_get', :volume_types => 'do_get', :volume_attachments => 'do_get', :volume_snapshots => 'do_get', :instances => 'get_new_dummy_class'}
}
#
# Methods shared by the RightApiClient, Resource and resource arrays.
#
module Helper
# Helper used to add methods to classes
def define_instance_method(meth, &blk)
(class << self; self; end).module_eval do
define_method(meth, &blk)
end
end
# Helper method that returns all api methods available to a client
# or resource
def api_methods
self.methods(false)
end
# Define methods that query the API for the associated resources
# Some resources have many links with the same rel.
# We want to capture all these href in the same method, returning an array
def get_associated_resources(client, links, associations)
# First go through the links and group the rels together
rels = {}
links.each do |link|
if rels[link['rel'].to_sym] # if we have already seen this rel attribute
rels[link['rel'].to_sym] << link['href']
else
rels[link['rel'].to_sym] = [link['href']]
end
end
# Note: hrefs will be an array, even if there is only one link with that rel
rels.each do |rel,hrefs|
# Add the link to the associations set if present. This is to accommodate Resource objects
associations << rel if associations != nil
# Create methods so that the link can be followed
define_instance_method(rel) do |*args|
if hrefs.size == 1 # Only one link for the specific rel attribute
Resource.process(client, *client.do_get(hrefs.first, *args))
else
resources = []
hrefs.each do |href|
resources << Resource.process(client, *client.do_get(href, *args))
end
# return the array of resource objects
resources
end
end if rels != :tags || rels != :backups
# Design choice for tags since you cannot querry do_get on /api/tags:
# Instead of having tags_by_tag, tags_by_resource, tags_multi_add, and tags_multi_delete as root resources
# we allow tags to be a root resource, creating dummy object that has these methods with their corresponding actions
define_instance_method(rel) do |*params|
# hrefs will only have one element namely api/tags
DummyResource.new(client, hrefs.first, {:by_tag => 'do_post', :by_resource => 'do_post', :multi_add => 'do_post', :multi_delete =>'do_post'})
end if rel == :tags
# The backups hack
add_in_backups(client, hrefs.first) if rel == :backups
end
end
# Specific to backups. A hack :<(
# This extra hack is needed because:
# We want to call client.backups.create(params) but client.backups does a GET and therefore needs the lineage as a parameter
# Index, show, update, destroy and restore all need to take in parameters when you call backup so args will not be empty.
def add_in_backups(client, path)
define_instance_method(:backups) do |*args|
if args != []
Resource.process(client, *client.do_get(path, *args))
else
DummyResource.new(client, path, {:create => 'do_post', :cleanup => 'do_post'})
end
end
end
def add_id_to_path(path, params = {})
path += "/#{params.delete(:id)}" if params.has_key?(:id)
path
end
end
include Helper
# The cookies for our client.
attr_reader :cookies
def initialize(args)
# Default params
@api_url, @api_version = 'https://my.rightscale.com', '1.5'
# Initializing all instance variables from hash
args.each { |key,value|
instance_variable_set("@#{key}", value) if value && AUTH_PARAMS.include?(key.to_s)
} if args.is_a? Hash
raise 'This API Client is only compatible with RightScale API 1.5 and upwards.' if (Float(@api_version) < 1.5)
@client = RestClient::Resource.new(@api_url)
# There are three options for login: credentials, instance token, or if the user already has the cookies they can just use those
@cookies ||= login()
if @instance_token
define_instance_method(:get_instance) do |*params|
Resource.process(self, *self.do_get(ROOT_INSTANCE_RESOURCE, *params))
end
# Like tags, you cannot call api/clouds when using an instance_token
INSTANCE_ACTIONS.each do |dummy_meth, meths|
define_instance_method(dummy_meth) do |*params|
path = add_id_to_path("/api/clouds", *params)
DummyResource.new(self, path, meths)
end
end
# add in the hack for the backups
add_in_backups(self, "/api/backups")
else
# Session is the root resource that has links to all the base resources,
# to the client since they can be accessed directly
define_instance_method(:session) do |*params|
Resource.process(self, *self.do_get(ROOT_RESOURCE, *params))
end
get_associated_resources(self, session.links, nil)
end
end
def to_s
"#<RightApiClient>"
end
# Log HTTP calls to file (file can be STDOUT as well)
def log(file)
RestClient.log = file
end
# Users shouldn't need to call the following methods directly
# you can login with username and password or with an instance_token
def login
if @instance_token
params = {
'instance_token' => @instance_token
}
path = ROOT_INSTANCE_RESOURCE
else
params = {
'email' => @email,
'password' => @password,
}
path = ROOT_RESOURCE
end
params['account_href'] = "/api/accounts/#{@account_id}"
response = @client[path].post(params, 'X_API_VERSION' => @api_version) do |response, request, result, &block|
case response.code
when 302
response
else
response.return!(request, result, &block)
end
end
response.cookies
end
def headers
{'X_API_VERSION' => @api_version, :cookies => @cookies, :accept => :json}
end
# Generic get
def do_get(path, params={})
# Resource id is a special param as it needs to be added to the path
path = add_id_to_path(path, params)
# Normally you would just pass a hash of query params to RestClient,
# but unfortunately it only takes them as a hash, and for filtering
# we need to pass multiple parameters with the same key. The result
# is that we have to build up the query string manually.
filters = params.delete(:filters)
params_string = params.map{|k,v| "#{k.to_s}=#{CGI::escape(v.to_s)}" }.join('&')
if filters && filters.any?
path += "?filter[]=" + filters.map{|f| CGI::escape(f) }.join('&filter[]=')
path += "&#{params_string}"
else
path += "?#{params_string}"
end
# If present, remove ? and & at end of path
path.chomp!('&')
path.chomp!('?')
begin
# Return content type so the resulting resource object knows what kind of resource it is.
resource_type, body = @client[path].get(headers) do |response, request, result, &block|
case response.code
when 200
# Get the resource_type from the content_type, the resource_type will
# be used later to add relevant methods to relevant resources.
type = ''
if result.content_type.index('rightscale')
type = get_resource_type(result.content_type)
end
[type, response.body]
else
raise "Unexpected response #{response.code.to_s}, #{response.body}"
end
end
#Session cookie is expired or invalid
rescue RuntimeError => e
if re_login?(e)
@cookies = login()
retry
else
raise e
end
end
data = JSON.parse(body)
[data, resource_type, path]
end
# Generic post
def do_post(path, params={})
begin
@client[path].post(params, headers) do |response, request, result, &block|
case response.code
when 201, 202
# Create and return the resource
href = response.headers[:location]
Resource.process(self, *self.do_get(href))
when 200..299
# this is needed for the tags Resource -- which returns a 200 and has a content type
# therefore, a resource object needs to be returned
if response.code == 200 && response.headers[:content_type].index('rightscale')
type = get_resource_type(response.headers[:content_type])
Resource.process(self, JSON.parse(response), type, path)
else
response.return!(request, result, &block)
end
else
raise "Unexpected response #{response.code.to_s}, #{response.body}"
end
end
rescue RuntimeError => e
if re_login?(e)
@cookies = login()
retry
else
raise e
end
end
end
# Generic delete
def do_delete(path)
begin
@client[path].delete(headers) do |response, request, result, &block|
case response.code
when 200
else
raise "Unexpected response #{response.code.to_s}, #{response.body}"
end
end
rescue RuntimeError => e
if re_login?(e)
@cookies = login()
retry
else
raise e
end
end
end
# Generic put
def do_put(path, params={})
begin
@client[path].put(params, headers) do |response, request, result, &block|
case response.code
when 204
else
raise "Unexpected response #{response.code.to_s}, #{response.body}"
end
end
rescue RuntimeError => e
if re_login?(e)
@cookies = login()
retry
else
raise e
end
end
end
def re_login?(e)
e.message.index('403') && e.message =~ %r(.*Session cookie is expired or invalid)
end
# returns the resource_type
def get_resource_type(content_type)
content_type.scan(/\.rightscale\.(.*)\+json/)[0][0]
end
# Given a path returns a RightApiClient::Resource instance.
#
def resource(path,params={})
Resource.process(self, *do_get(path,params))
end
# This is need for resources like tags where the api/tags/ call is not supported.
# This will define a dummy object and its methods
class DummyResource
include Helper
# path is the base_resource's href
# params is a hash where:
# key = method name
# value = action that is needed (like do_post, do_get...)
def initialize(client, path, params={})
params.each do |meth, action|
define_instance_method(meth) do |*args|
# do_get does not return a resource object (unlike do_post)
if meth == :instances
path = path.to_str + add_id_to_path("/instances", *args)
DummyResource.new(client, path, {:live_tasks => 'do_get'})
elsif meth == :live_tasks
Resource.process(client, *client.do_get(path.to_str + '/live/tasks', *args))
elsif action == 'do_get'
Resource.process(client, *client.do_get(path.to_str + '/' + meth.to_s, *args))
elsif meth == :create
client.send action, path, *args
else
# send converts action (a string) into a method call
client.send action, (path.to_str + '/' + meth.to_s), *args
end
end
end
end
end
# Represents resources returned by API calls, this class dynamically adds
# methods and properties to instances depending on what type of resource
# they are.
class Resource
include Helper
# The API does not provide information about the basic actions that can be
# performed on resources so we need to define them
RESOURCE_ACTIONS = {
:create => ['deployment', 'server_array', 'server', 'ssh_key', 'volume', 'volume_snapshot', 'volume_attachment'],
:destroy => ['deployment', 'server_array', 'server', 'ssh_key', 'volume', 'volume_snapshot', 'volume_attachment', 'backup'],
:update => ['deployment', 'instance', 'server_array', 'server', 'backup']
}
attr_reader :client, :attributes, :associations, :actions, :raw, :resource_type
# Insert the given term at the correct place in the path, so
# if there are parameters in the path then insert it before them.
def self.insert_in_path(path, term)
if path.index('?')
new_path = path.sub('?', "/#{term}?")
else
new_path = "#{path}/#{term}"
end
end
# Takes some response data from the API
# Returns a single Resource object or a collection if there were many
def self.process(client, data, resource_type, path)
if data.kind_of?(Array)
resource_array = data.map { |obj| Resource.new(client, obj, resource_type) }
# Bring in the helper so we can add methods to it before it's returned.
# The next few if statements might be nicer as a case but some
# resources might need multiple methods so we'll keep things as
# separate if statements for now.
resource_array.extend(Helper)
# Add create methods for the relevant resources
if RESOURCE_ACTIONS[:create].include?(resource_type)
resource_array.define_instance_method('create') do |*args|
client.do_post(path, *args)
end
end
# Add multi methods for the instance resource
if ['instance'].include?(resource_type)
['multi_terminate', 'multi_run_executable'].each do |multi_action|
multi_action_path = Resource.insert_in_path(path, multi_action)
resource_array.define_instance_method(multi_action) do |*args|
client.do_post(multi_action_path, *args)
end
end
end
# Add multi_update to input resource
if ['input'].include?(resource_type)
resource_array.define_instance_method('multi_update') do |*args|
multi_update_path = Resource.insert_in_path(path, 'multi_update')
client.do_put(multi_update_path, *args)
end
end
resource_array
else
Resource.new(client, data, resource_type)
end
end
def inspect
"#<#{self.class.name} " +
"resource_type=\"#{@resource_type}\"" +
"#{', name='+name.inspect if self.respond_to?(:name)}" +
"#{', resource_uid='+resource_uid.inspect if self.respond_to?(:resource_uid)}>"
end
def initialize(client, hash, resource_type)
@client = client
@resource_type = resource_type
@raw = hash.dup
@attributes, @associations, @actions = Set.new, Set.new, Set.new
links = hash.delete('links') || []
raw_actions = hash.delete('actions') || []
# We obviously can't re-define a method called 'self', so pull
# out the 'self' link and make it 'href'.
self_index = links.any? && links.each_with_index do |link, idx|
if link['rel'] == 'self'
break idx
end
if idx == links.size-1
break nil
end
end
if self_index
hash['href'] = links.delete_at(self_index)['href']
end
# Add links to attributes set and create a method that returns the links
attributes << :links
define_instance_method(:links) { return links }
# API doesn't tell us whether a resource action is a GET or a POST, but
# I think they are all post so add them all as posts for now.
raw_actions.each do |action|
action_name = action['rel']
# Add it to the actions set
actions << action_name.to_sym
define_instance_method(action_name.to_sym) do |*args|
href = hash['href'] + "/" + action['rel']
client.do_post(href, *args)
end
end
get_associated_resources(client, links, associations)
hash.each do |k, v|
# If a parent resource is requested with a view then it might return
# extra data that can be used to build child resources here, without
# doing another get request.
if associations.include?(k.to_sym)
# We could use one rescue block rather than these multiple ifs, but
# exceptions are slow and the whole points of this code block is
# optimization so we'll stick to using ifs.
# v might be an array or hash so use include rather than has_key
if v.include?('links')
child_self_link = v['links'].find { |target| target['rel'] == 'self' }
if child_self_link
child_href = child_self_link['href']
if child_href
# Currently, only instances need this optimization, but in the
# future we might like to extract resource_type from child_href
# and not hard-code it.
if child_href.index('instance')
define_instance_method(k) { Resource.process(client, v, 'instance', child_href) }
end
end
end
end
else
# Add it to the attributes set and create a method for it
attributes << k.to_sym
define_instance_method(k) { return v }
end
end
# Some resources are not linked together, so they have to be manually
# added here.
case @resource_type
when 'instance'
define_instance_method('live_tasks') do |*args|
Resource.process(client, *client.do_get(href + '/live/tasks', *args))
end
end
# Add destroy method to relevant resources
if RESOURCE_ACTIONS[:destroy].include?(@resource_type)
define_instance_method('destroy') do
client.do_delete(href)
end
end
# Add update method to relevant resources
if RESOURCE_ACTIONS[:update].include?(@resource_type)
define_instance_method('update') do |*args|
client.do_put(href, *args)
end
end
end
end
end
Refs #9925 starting to refactor and add in index call...
require 'rest_client' # rest_client 1.6.1
require 'json'
require 'set'
require 'cgi'
# RightApiClient has the generic get/post/delete/put calls that are used
# by resources
class RightApiClient
ROOT_RESOURCE = '/api/session'
ROOT_INSTANCE_RESOURCE = '/api/session/instance'
# permitted parameters for initializing
AUTH_PARAMS = %w(email password account_id api_url api_version cookies instance_token)
# NOTE: instances will be handled in a special case
INSTANCE_ACTIONS = {
:clouds => {:volumes => 'do_get', :volume_types => 'do_get', :volume_attachments => 'do_get', :volume_snapshots => 'do_get', :instances => 'get_new_dummy_class'}
}
#
# Methods shared by the RightApiClient, Resource and resource arrays.
#
module Helper
# Helper used to add methods to classes
def define_instance_method(meth, &blk)
(class << self; self; end).module_eval do
define_method(meth, &blk)
end
end
# Helper method that returns all api methods available to a client
# or resource
def api_methods
self.methods(false)
end
# Define methods that query the API for the associated resources
# Some resources have many links with the same rel.
# We want to capture all these href in the same method, returning an array
def get_associated_resources(client, links, associations)
# First go through the links and group the rels together
rels = {}
links.each do |link|
if rels[link['rel'].to_sym] # if we have already seen this rel attribute
rels[link['rel'].to_sym] << link['href']
else
rels[link['rel'].to_sym] = [link['href']]
end
end
# Note: hrefs will be an array, even if there is only one link with that rel
rels.each do |rel,hrefs|
# Add the link to the associations set if present. This is to accommodate Resource objects
associations << rel if associations != nil
# Create methods so that the link can be followed
define_instance_method(rel) do |*args|
if hrefs.size == 1 # Only one link for the specific rel attribute
# Do a new resource with the different methods-- create, index (general_resources). Need to know rel
# (specific_resources) -- show, destory, update
if has_id(*args)
# They want to do a show, or an update, or a delete
Resource.new(client, *client.do_get(hrefs.first, *args))
else
p '***************** No id, ', *args
path = add_filters_to_path(hrefs.first, *args)
RootResource.new(client, path, rel)
#Resource.new(client, *client.do_get(hrefs.first, *args))
end
else
# @@ To Do
resources = []
hrefs.each do |href|
resources << Resource.new(client, *client.do_get(href, *args))
end
# return the array of resource objects
resources
end
end if rels != :tags || rels != :backups
# Design choice for tags since you cannot querry do_get on /api/tags:
# Instead of having tags_by_tag, tags_by_resource, tags_multi_add, and tags_multi_delete as root resources
# we allow tags to be a root resource, creating dummy object that has these methods with their corresponding actions
define_instance_method(rel) do |*params|
# hrefs will only have one element namely api/tags
DummyResource.new(client, hrefs.first, {:by_tag => 'do_post', :by_resource => 'do_post', :multi_add => 'do_post', :multi_delete =>'do_post'})
end if rel == :tags
# The backups hack
add_in_backups(client, hrefs.first) if rel == :backups
end
end
# Specific to backups. A hack :<(
# This extra hack is needed because:
# We want to call client.backups.create(params) but client.backups does a GET and therefore needs the lineage as a parameter
# Index, show, update, destroy and restore all need to take in parameters when you call backup so args will not be empty.
def add_in_backups(client, path)
define_instance_method(:backups) do |*args|
if args != []
Resource.new(client, *client.do_get(path, *args))
else
DummyResource.new(client, path, {:create => 'do_post', :cleanup => 'do_post'})
end
end
end
def add_id_to_path(path, params = {})
path += "/#{params.delete(:id)}" if has_id(params)
path
end
def has_id(params = {})
params.has_key?(:id)
end
def add_filters_to_path(path, params ={})
filters = params.delete(:filters)
params_string = params.map{|k,v| "#{k.to_s}=#{CGI::escape(v.to_s)}" }.join('&')
p "The path is #{path}"
p "&&&&&&&&&&&&&&&&& #{params_string}"
if filters && filters.any?
path += "?filter[]=" + filters.map{|f| CGI::escape(f) }.join('&filter[]=')
path += "&#{params_string}"
else
path += "?#{params_string}"
end
# If present, remove ? and & at end of path
path.chomp!('&')
path.chomp!('?')
path
end
end
include Helper
# The cookies for our client.
attr_reader :cookies
def initialize(args)
# Default params
@api_url, @api_version = 'https://my.rightscale.com', '1.5'
# Initializing all instance variables from hash
args.each { |key,value|
instance_variable_set("@#{key}", value) if value && AUTH_PARAMS.include?(key.to_s)
} if args.is_a? Hash
raise 'This API Client is only compatible with RightScale API 1.5 and upwards.' if (Float(@api_version) < 1.5)
@client = RestClient::Resource.new(@api_url)
# There are three options for login: credentials, instance token, or if the user already has the cookies they can just use those
@cookies ||= login()
if @instance_token
define_instance_method(:get_instance) do |*params|
Resource.new(self, *self.do_get(ROOT_INSTANCE_RESOURCE, *params))
end
# Like tags, you cannot call api/clouds when using an instance_token
INSTANCE_ACTIONS.each do |dummy_meth, meths|
define_instance_method(dummy_meth) do |*params|
path = add_id_to_path("/api/clouds", *params)
DummyResource.new(self, path, meths)
end
end
# add in the hack for the backups
add_in_backups(self, "/api/backups")
else
# Session is the root resource that has links to all the base resources,
# to the client since they can be accessed directly
define_instance_method(:session) do |*params|
Resource.new(self, *self.do_get(ROOT_RESOURCE, *params))
end
get_associated_resources(self, session.links, nil)
end
end
def to_s
"#<RightApiClient>"
end
# Log HTTP calls to file (file can be STDOUT as well)
def log(file)
RestClient.log = file
end
# Users shouldn't need to call the following methods directly
# you can login with username and password or with an instance_token
def login
if @instance_token
params = {
'instance_token' => @instance_token
}
path = ROOT_INSTANCE_RESOURCE
else
params = {
'email' => @email,
'password' => @password,
}
path = ROOT_RESOURCE
end
params['account_href'] = "/api/accounts/#{@account_id}"
response = @client[path].post(params, 'X_API_VERSION' => @api_version) do |response, request, result, &block|
case response.code
when 302
response
else
response.return!(request, result, &block)
end
end
response.cookies
end
def headers
{'X_API_VERSION' => @api_version, :cookies => @cookies, :accept => :json}
end
# Generic get
def do_get(path, params={})
# Resource id is a special param as it needs to be added to the path
path = add_id_to_path(path, params)
# Normally you would just pass a hash of query params to RestClient,
# but unfortunately it only takes them as a hash, and for filtering
# we need to pass multiple parameters with the same key. The result
# is that we have to build up the query string manually.
path = add_filters_to_path(path, params)
begin
# Return content type so the resulting resource object knows what kind of resource it is.
resource_type, body = @client[path].get(headers) do |response, request, result, &block|
case response.code
when 200
# Get the resource_type from the content_type, the resource_type will
# be used later to add relevant methods to relevant resources.
type = ''
if result.content_type.index('rightscale')
type = get_resource_type(result.content_type)
end
[type, response.body]
else
raise "Unexpected response #{response.code.to_s}, #{response.body}"
end
end
#Session cookie is expired or invalid
rescue RuntimeError => e
if re_login?(e)
@cookies = login()
retry
else
raise e
end
end
data = JSON.parse(body)
[data, resource_type]
end
# Generic post
def do_post(path, params={})
begin
@client[path].post(params, headers) do |response, request, result, &block|
case response.code
when 201, 202
# Create and return the resource
href = response.headers[:location]
Resource.new(self, *self.do_get(href))
when 200..299
# this is needed for the tags Resource -- which returns a 200 and has a content type
# therefore, a resource object needs to be returned
if response.code == 200 && response.headers[:content_type].index('rightscale')
type = get_resource_type(response.headers[:content_type])
Resource.new(self, JSON.parse(response), type, path)
else
response.return!(request, result, &block)
end
else
raise "Unexpected response #{response.code.to_s}, #{response.body}"
end
end
rescue RuntimeError => e
if re_login?(e)
@cookies = login()
retry
else
raise e
end
end
end
# Generic delete
def do_delete(path)
begin
@client[path].delete(headers) do |response, request, result, &block|
case response.code
when 200
else
raise "Unexpected response #{response.code.to_s}, #{response.body}"
end
end
rescue RuntimeError => e
if re_login?(e)
@cookies = login()
retry
else
raise e
end
end
end
# Generic put
def do_put(path, params={})
begin
@client[path].put(params, headers) do |response, request, result, &block|
case response.code
when 204
else
raise "Unexpected response #{response.code.to_s}, #{response.body}"
end
end
rescue RuntimeError => e
if re_login?(e)
@cookies = login()
retry
else
raise e
end
end
end
def re_login?(e)
e.message.index('403') && e.message =~ %r(.*Session cookie is expired or invalid)
end
# returns the resource_type
def get_resource_type(content_type)
content_type.scan(/\.rightscale\.(.*)\+json/)[0][0]
end
# Given a path returns a RightApiClient::Resource instance.
#
def resource(path,params={})
Resource.new(self, *do_get(path,params))
end
# This is need for resources like tags where the api/tags/ call is not supported.
# This will define a dummy object and its methods
class DummyResource
include Helper
# path is the base_resource's href
# params is a hash where:
# key = method name
# value = action that is needed (like do_post, do_get...)
def initialize(client, path, params={})
params.each do |meth, action|
define_instance_method(meth) do |*args|
# do_get does not return a resource object (unlike do_post)
if meth == :instances
path = path.to_str + add_id_to_path("/instances", *args)
DummyResource.new(client, path, {:live_tasks => 'do_get'})
elsif meth == :live_tasks
Resource.new(client, *client.do_get(path.to_str + '/live/tasks', *args))
elsif action == 'do_get'
Resource.new(client, *client.do_get(path.to_str + '/' + meth.to_s, *args))
elsif meth == :create
client.send action, path, *args
else
# send converts action (a string) into a method call
client.send action, (path.to_str + '/' + meth.to_s), *args
end
end
end
end
end
class RootResource
include Helper
ROOT_RESOURCE_ACTIONS = {
:create => [:deployments, :server_arrays, :servers, :ssh_keys, :volumes, :volume_snapshots, :volume_attachments],
:index => [:clouds],
}
ROOT_RESOURCE_SPECAIL_ACTIONS = {
# Special cases
:instances => {:multi_terminate => 'do_post', :multi_run_executable => 'do_post'},
:input => {:multi_update => 'do_put'}
}
def initialize(client, path, resource_type)
# Add create methods for the relevant root resources
if ROOT_RESOURCE_ACTIONS[:create].include?(resource_type)
self.define_instance_method('create') do |*args|
client.do_post(path, *args)
end
end
if ROOT_RESOURCE_ACTIONS[:index].include?(resource_type)
self.define_instance_method('index') do |*args|
Resource.new(client, *client.do_get(path, *args))
end
end
# ADDING IN SPECIAL CASES
ROOT_RESOURCE_SPECAIL_ACTIONS[resource_type].each do |meth, action|
action_path = Resource.insert_in_path(path, meth)
self.define_instance_method(meth) do |*args|
client.send action, action_path, *args
end
end
# if ['instance'].include?(resource_type)
# ['multi_terminate', 'multi_run_executable'].each do |multi_action|
# multi_action_path = Resource.insert_in_path(path, multi_action)
# self.define_instance_method(multi_action) do |*args|
# client.do_post(multi_action_path, *args)
# end
# end
# end
#
# # Add multi_update to input resource
# if ['input'].include?(resource_type)
# self.define_instance_method('multi_update') do |*args|
# multi_update_path = Resource.insert_in_path(path, 'multi_update')
# client.do_put(multi_update_path, *args)
# end
# end
end
end
# Represents resources returned by API calls, this class dynamically adds
# methods and properties to instances depending on what type of resource
# they are.
class Resource
include Helper
# The API does not provide information about the basic actions that can be
# performed on resources so we need to define them
RESOURCE_ACTIONS = {
:destroy => ['deployment', 'server_array', 'server', 'ssh_key', 'volume', 'volume_snapshot', 'volume_attachment', 'backup'],
:update => ['deployment', 'instance', 'server_array', 'server', 'backup'],
:show => ['cloud']
}
attr_reader :client, :attributes, :associations, :actions, :raw, :resource_type
# Insert the given term at the correct place in the path, so
# if there are parameters in the path then insert it before them.
def self.insert_in_path(path, term)
if path.index('?')
new_path = path.sub('?', "/#{term}?")
else
new_path = "#{path}/#{term}"
end
end
# Takes some response data from the API
# Returns a single Resource object or a collection if there were many
# def self.process(client, data, resource_type, path)
# if data.kind_of?(Array)
# resource_array = data.map { |obj| Resource.new(client, obj, resource_type) }
# # Bring in the helper so we can add methods to it before it's returned.
# # The next few if statements might be nicer as a case but some
# # resources might need multiple methods so we'll keep things as
# # separate if statements for now.
# resource_array.extend(Helper)
#
# # Add create methods for the relevant resources
# if RESOURCE_ACTIONS[:create].include?(resource_type)
# resource_array.define_instance_method('create') do |*args|
# client.do_post(path, *args)
# end
# end
# # Add create methods for the relevant resources
# if RESOURCE_ACTIONS[:show].include?(resource_type)
# resource_array.define_instance_method('show') do |*args|
# client.do_get(path, *args)
# end
# end
#
# # && Add in the resource_actions: the special cases: no need for dummy_object.
# # ADD IN HERE:
#
# # Add multi methods for the instance resource
# if ['instance'].include?(resource_type)
# ['multi_terminate', 'multi_run_executable'].each do |multi_action|
# multi_action_path = Resource.insert_in_path(path, multi_action)
#
# resource_array.define_instance_method(multi_action) do |*args|
# client.do_post(multi_action_path, *args)
# end
# end
# end
#
# # Add multi_update to input resource
# if ['input'].include?(resource_type)
# resource_array.define_instance_method('multi_update') do |*args|
# multi_update_path = Resource.insert_in_path(path, 'multi_update')
#
# client.do_put(multi_update_path, *args)
# end
# end
#
# resource_array
# else
# p "****************************"
# Resource.new(client, data, resource_type)
# end
# end
def inspect
"#<#{self.class.name} " +
"resource_type=\"#{@resource_type}\"" +
"#{', name='+name.inspect if self.respond_to?(:name)}" +
"#{', resource_uid='+resource_uid.inspect if self.respond_to?(:resource_uid)}>"
end
def initialize(client, hash, resource_type)
@client = client
@resource_type = resource_type
@raw = hash.dup
@attributes, @associations, @actions = Set.new, Set.new, Set.new
links = hash.delete('links') || []
raw_actions = hash.delete('actions') || []
# We obviously can't re-define a method called 'self', so pull
# out the 'self' link and make it 'href'.
self_index = links.any? && links.each_with_index do |link, idx|
if link['rel'] == 'self'
break idx
end
if idx == links.size-1
break nil
end
end
if self_index
hash['href'] = links.delete_at(self_index)['href']
end
# Add links to attributes set and create a method that returns the links
attributes << :links
define_instance_method(:links) { return links }
# API doesn't tell us whether a resource action is a GET or a POST, but
# I think they are all post so add them all as posts for now.
raw_actions.each do |action|
action_name = action['rel']
# Add it to the actions set
actions << action_name.to_sym
define_instance_method(action_name.to_sym) do |*args|
href = hash['href'] + "/" + action['rel']
client.do_post(href, *args)
end
end
get_associated_resources(client, links, associations)
hash.each do |k, v|
# If a parent resource is requested with a view then it might return
# extra data that can be used to build child resources here, without
# doing another get request.
if associations.include?(k.to_sym)
# We could use one rescue block rather than these multiple ifs, but
# exceptions are slow and the whole points of this code block is
# optimization so we'll stick to using ifs.
# v might be an array or hash so use include rather than has_key
if v.include?('links')
child_self_link = v['links'].find { |target| target['rel'] == 'self' }
if child_self_link
child_href = child_self_link['href']
if child_href
# Currently, only instances need this optimization, but in the
# future we might like to extract resource_type from child_href
# and not hard-code it.
if child_href.index('instance')
define_instance_method(k) { Resource.new(client, v, 'instance', child_href) }
end
end
end
end
else
# Add it to the attributes set and create a method for it
attributes << k.to_sym
define_instance_method(k) { return v }
end
end
# Some resources are not linked together, so they have to be manually
# added here.
case @resource_type
when 'instance'
define_instance_method('live_tasks') do |*args|
Resource.new(client, *client.do_get(href + '/live/tasks', *args))
end
end
if RESOURCE_ACTIONS[:show].include?(@resource_type)
define_instance_method('show') do |*args|
client.do_get(href, *args)
end
end
# Add destroy method to relevant resources
if RESOURCE_ACTIONS[:destroy].include?(@resource_type)
define_instance_method('destroy') do
client.do_delete(href)
end
end
# Add update method to relevant resources
if RESOURCE_ACTIONS[:update].include?(@resource_type)
define_instance_method('update') do |*args|
client.do_put(href, *args)
end
end
end
end
end
|
class Gesturecontroller
def initialize
@gestures = Array.new
File.open('Gestures/gestures.yml', 'r') do |gesture_file|
YAML.load_documents(gesture_file) { |gesture| @gestures << gesture }
end
@dongle = Usbmodem.new()
@dongle.connect("COM4")
end
def createnewgesture
newgesture = Gesture.new
@gestures << newgesture
end
def storeallgestures
File.open('Gestures/gestures.yml') { |out| YAML.dump( @gestures, out) }
end
end
took the dongle code that was in gesturecontroller out
class Gesturecontroller
def initialize
@gestures = Array.new
File.open('Gestures/gestures.yml', 'r') do |gesture_file|
@gestures = YAML.load(gesture_file)
end
end
def createnewgesture
newgesture = Gesture.new
@gestures << newgesture
end
def storeallgestures
File.open('Gestures/gestures.yml') { |out| YAML.dump( @gestures, out) }
end
end |
# Copyright (c) 2010-2015 Arxopia LLC.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Arxopia LLC nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL ARXOPIA LLC BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
module Risu
module Models
# Item Model
#
class Item < ActiveRecord::Base
belongs_to :host
belongs_to :plugin
has_many :attachments
class << self
# Queries for all risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def risks
where(:severity => [0,1,2,3,4])
end
# Queries for all the high risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def critical_risks
where(:severity => 4)
end
# Queries for all the high risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def high_risks
where(:severity => 3)
end
# Queries for all the medium risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def medium_risks
where(:severity => 2)
end
# Queries for all the low risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def low_risks
where(:severity => 1)
end
# Queries for all the info risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def info_risks
where(:severity => 0)
end
# Queries for all the unique Critical risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def critical_risks_unique
where(:severity => 4).joins(:plugin).order("plugins.cvss_base_score").group(:plugin_id)
end
# Queries for all the unique high risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def high_risks_unique
where(:severity => 3).joins(:plugin).order("plugins.cvss_base_score").group(:plugin_id)
end
# Queries for all the unique Critical findings and sorts them by count
#
# @return [ActiveRecord::Relation] with the query results
def critical_risks_unique_sorted
select("items.*").select("count(*) as count_all").where(:severity => 4).group(:plugin_id).order("count_all DESC")
end
# Queries for all the unique high findings and sorts them by count
#
# @return [ActiveRecord::Relation] with the query results
def high_risks_unique_sorted
select("items.*").select("count(*) as count_all").where(:severity => 3).group(:plugin_id).order("count_all DESC")
end
# Queries for all the unique medium risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def medium_risks_unique
where(:severity => 2).joins(:plugin).order("plugins.cvss_base_score").group(:plugin_id)
end
# Queries for all the unique medium findings and sorts them by count
#
# @return [ActiveRecord::Relation] with the query results
def medium_risks_unique_sorted
select("items.*").select("count(*) as count_all").where(:severity => 2).group(:plugin_id).order("count_all DESC")
end
# Queries for all the unique low risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def low_risks_unique
where(:severity => 1).joins(:plugin).order("plugins.cvss_base_score").group(:plugin_id)
end
# Queries for all the unique low findings and sorts them by count
#
# @return [ActiveRecord::Relation] with the query results
def low_risks_unique_sorted
select("items.*").select("count(*) as count_all").where(:severity => 1).group(:plugin_id).order("count_all DESC")
end
# Queries for all the unique info risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def info_risks_unique
#where(:severity => 0).joins(:plugin).order(:cvss_base_score).group(:plugin_id)
where(:severity => 0).joins(:plugin).order("plugins.cvss_base_score").group(:plugin_id)
end
# Queries for all the unique info findings and sorts them by count
#
# @return [ActiveRecord::Relation] with the query results
def info_risks_unique_sorted
select("items.*").select("count(*) as count_all").where(:severity => 0).group(:plugin_id).order("count_all DESC")
end
# Queries for all the risks grouped by service type, used for the Vulnerabilities by Service graph
#
# @return [ActiveRecord::Relation] with the query results
def risks_by_service(limit=10)
select("items.*").select("count(*) as count_all").where("svc_name != 'unknown' and svc_name != 'general'").group(:svc_name).order("count_all DESC").limit(limit)
end
# Queries for all the Critical risks by plugin
#
# @param limit Limits the result to a specific number, default 10
#
# @return [ActiveRecord::Relation] with the query results
def risks_by_plugin(limit=10)
select("items.*").select("count(*) as count_all").joins(:plugin).where("plugin_id != 1").where(:severity => 4).group(:plugin_id).order("count_all DESC").limit(limit)
end
# Queries for all the Critical risks by host
#
# @param limit Limits the result to a specific number, default 10
#
# @return [ActiveRecord::Relation] with the query results
def risks_by_host(limit=10)
select("items.*").select("count(*) as count_all").joins(:host).where("plugin_id != 1").where(:severity => 4).group(:host_id).order("count_all DESC").limit(limit)
end
# Queries for all the Critical risks by host
#
# @param limit Limits the result to a specific number, default 10
#
# @return [ActiveRecord::Relation] with the query results
def critical_risks_by_host(limit=10)
select("items.*").select("count(*) as count_all").joins(:host).where("plugin_id != 1").where(:severity => 4).group(:host_id).order("count_all DESC").limit(limit)
end
# Queries for all the High risks by host
#
# @param limit Limits the result to a specific number, default 10
#
# @return [ActiveRecord::Relation] with the query results
def high_risks_by_host(limit=10)
select("items.*").select("count(*) as count_all").joins(:host).where("plugin_id != 1").where(:severity => 3).group(:host_id).order("count_all DESC").limit(limit)
end
# Queries for all the Medium risks by host
#
# @param limit Limits the result to a specific number, default 10
#
# @return [ActiveRecord::Relation] with the query results
def medium_risks_by_host(limit=10)
select("items.*").select("count(*) as count_all").joins(:host).where("plugin_id != 1").where(:severity => 2).group(:host_id).order("count_all DESC").limit(limit)
end
# Queries for all the Low risks by host
#
# @param limit Limits the result to a specific number, default 10
#
# @return [ActiveRecord::Relation] with the query results
def low_risks_by_host(limit=10)
select("items.*").select("count(*) as count_all").joins(:host).where("plugin_id != 1").where(:severity => 1).group(:host_id).order("count_all DESC").limit(limit)
end
# Queries for all the hosts with the Microsoft patch summary plugin (38153)
#
# @return [ActiveRecord::Relation] with the query results
def ms_patches
where(:plugin_id => 38153).joins(:host)
end
# Queries for all host with the Microsoft Update Summary plugin(12028)
#
# @return [ActiveRecord::Relation] with the query results
def ms_update
where(:plugin_id => 12028).joins(:host)
end
# Generates a Graph of all the risks by service
#
# @param limit Limits the result to a specific number, default 10
#
# @return [StringIO] Object containing the generated PNG image
def risks_by_service_graph(limit=10)
g = Gruff::Pie.new(GRAPH_WIDTH)
g.title = sprintf "Top %d Services By Vulnerability", Item.risks_by_service(limit).to_a.count
g.sort = false
g.marker_count = 1
g.theme = {
:colors => Risu::GRAPH_COLORS,
:background_colors => %w(white white)
}
Item.risks_by_service(limit).to_a.each do |service|
g.data(service.svc_name, Item.all.where(:svc_name => service.svc_name).count)
end
StringIO.new(g.to_blob)
end
# Generates text for the Risks by Service graph
#
# @return [String] Text based on the Risks by Service graph
def risks_by_service_graph_text
"This graph is a representation of the findings found by service. This graph can help " +
"understand what services are running on the network and if they are vulnerable, where " +
"the risks are and how they should be protected.\n\n"
end
# Generates a Graph of all the risks by severity
#
# @return [StringIO] Object containing the generated PNG image
def risks_by_severity_graph
g = Gruff::Bar.new(GRAPH_WIDTH)
g.title = "Risks By Severity"
g.sort = false
g.marker_count = 1
g.theme = {
:colors => Risu::GRAPH_COLORS,
:background_colors => %w(white white)
}
crit = Item.critical_risks.count
high = Item.high_risks.count
medium = Item.medium_risks.count
low = Item.low_risks.count
#info = Item.info_risks.count
if crit == nil then crit = 0 end
if high == nil then high = 0 end
if medium == nil then medium = 0 end
if low == nil then low = 0 end
#if info == nil then info = 0 end
g.data("Critical", crit)
g.data("High", high)
g.data("Medium", medium)
g.data("Low", low)
StringIO.new(g.to_blob)
end
# Queries for all DISA Stig findings by category
#
# @param category The DISA Stig category I, II, III
#
# @return [ActiveRecord::Relation] with the query results
def stig_findings(category="I")
where('plugin_id IN (:plugins)', :plugins => Plugin.where(:stig_severity => category).select(:id)).order("severity DESC")
end
# Generates a Graph of all the risks by severity
#
# @return [StringIO] Object containing the generated PNG image
def stigs_severity_graph
g = Gruff::Bar.new(GRAPH_WIDTH)
g.title = "Stigs By Severity"
g.sort = false
g.marker_count = 1
g.theme = {
:colors => Risu::GRAPH_COLORS,
:background_colors => %w(white white)
}
i = Item.stig_findings("I").count
ii = Item.stig_findings("II").count
iii = Item.stig_findings("III").count
if i == nil then i = 0 end
if ii == nil then ii = 0 end
if iii == nil then iii = 0 end
g.data("Cat I", i)
g.data("Cat II", ii)
g.data("Cat III", iii)
StringIO.new(g.to_blob)
end
# Calculates a vulnerable host percent based on Critical and High findings
# (unique_vuln_crit_high_count / host_count) * 100
#
# @return [FixNum] Percentage of vulnerable hosts
def calculate_vulnerable_host_percent
#patch to fix double counting
#unique_hosts_with_critical_and_high = Host.unique_hosts_with_critical.count + Host.unique_hosts_with_high.count
unique_hosts_with_critical_and_high = Host.unique_hosts_with_critical_and_high_count
host_percent = (unique_hosts_with_critical_and_high.to_f / Host.count.to_f) * 100
end
#
def calculate_vulnerable_host_percent_with_patches_applied
exclude_list = []
hosts = []
Item.notable_order_by_cvss_raw.each do |h, k|
exclude_list << h
end
criticals = Item.critical_risks.where.not(:plugin_id => exclude_list)
criticals.each do |item|
hosts << item.host_id
end
Item.high_risks.each do |item|
hosts << item.host_id
end
hosts.uniq!
(hosts.count.to_f / Host.count.to_f) * 100
end
# Based on the risk_percent returns a adjective representative
#
# @param risk_percent Calculated percentage of risk based on {Item::calculate_vulnerable_host_percent}
#
# @return [String] Textual representation of the risk_percent
def adjective_for_risk_text risk_percent
adjective = case risk_percent
when 0..5
"excellent"
when 6..10
"great"
when 11..15
"good"
when 16..20
"fair"
else
"poor"
end
end
# Builds a sentence based on the risk_percent to describe the risk
#
# @param risk_percent Calculated percentage of risk based on {Item::calculate_vulnerable_host_percent}
#
# @return [String] Sentence describing the implied significance of the risk_percent
def risk_text risk_percent
percent_text = case risk_percent
when 0..5.99
"This implies that only a handful of computers are missing patches, and the current patch management is working well."
when 6..10.99
"This implies that there is a minor patch management issue. If there is a patch management system, it should be checked for problems. " +
"Each host should also be inspected to be certain it can receive patches."
when 11..15.99
"This implies that there is a substantial patch management issue. If there is a patch management system, it should be checked for problems. " +
"Each host should also be inspected to be certain it can receive patches."
when 16..20
"This implies that there is a significant patch management issue. If there is a patch management system, it should be checked for problems. " +
"Each host should also be inspected to be certain it can receive patches."
else
"This implies that there is a critical patch management problem on the network. Any patch management solutions should " +
"be inspected for issues and they should be corrected as soon as possible. Each host should also be inspected to be certain it can receive patches."
end
end
# @todo change Report.title to a real variable
# @todo rewrite this
def risks_by_severity_graph_text
host_percent = calculate_vulnerable_host_percent()
adjective = adjective_for_risk_text(host_percent)
risk_text = risk_text(host_percent)
graph_text = "This bar graph is a representation of the findings by severity; the " +
"graph shows that, overall, #{Report.title} has a #{adjective} handle on the patch " +
"management of the network. "
#graph_text = "This bar graph is a representation of the findings by severity; the " +
#{}"graph shows that, Overall #{Report.title} needs to implement patch management and configuration management as a priority."
#if adjective == "good" or adjective == "fair"
# graph_text << "But improvements in patch management could be made to ensure an excellent rating."
#end
graph_text << "\n\n"
graph_text << "The majority of the critical findings were found on #{host_percent.round}% of the total assessed computers. #{risk_text}\n\n"
graph_text << "The systems with critical vulnerabilities represent the largest threat to the network, " +
"so patching this group is paramount to the overall network security. It only takes one vulnerability " +
"to create a security incident.\n\n"
graph_text << "It should be noted that low findings and open ports represent the discovery "
graph_text << "of network services and open ports. Typically, these are not an indication of "
graph_text << "a serious problem and pose little to no threat. However, the correlation of "
graph_text << "data between the different severity levels could be used to determine degree "
graph_text << "of vulnerability for a given system.\n"
return graph_text
end
def risk_percent_rounded_text
"#{calculate_vulnerable_host_percent().round}%"
end
def risk_percent_patched_rounded_text
"#{calculate_vulnerable_host_percent_with_patches_applied().round}%"
end
#
# @todo comment
#
def notable_order_by_cvss_raw
#MIGHT NOT BE CORRECT @TODO
#return Item.joins(:plugin).where(:severity => 4).order("plugins.cvss_base_score").count(:all, :group => :plugin_id)
return Item.joins(:plugin).where(:severity => 4).order("plugins.cvss_base_score").group(:plugin_id).distinct.count
end
# Scrubs a plugin_name to remove all pointless data
#
# @return [String] Scrubbed plugin name
def scrub_plugin_name (name)
return name.gsub("(remote check)", "").gsub("(uncredentialed check)", "").gsub(/(\(\d.*\))/, "")
end
# Returns an array of plugin_id and plugin_name for the top 10
# findings unsorted
#
# @return [Array] Unsorted top 10 findings
def top_10_sorted_raw
raw = notable_order_by_cvss_raw
data = Array.new
raw.each do |vuln|
row = Array.new
plugin_id = vuln[0]
count = vuln[1]
row.push(plugin_id)
row.push(count)
data.push(row)
end
data = data.sort do |a, b|
b[1] <=> a[1]
end
return data
end
# Returns an array of plugin_id and plugin_name for the top 10
# findings sorted by CVSS score
#
# @return [Array] Sorted top 10 findings
def top_10_sorted
raw = notable_order_by_cvss_raw
data = Array.new
raw.each do |vuln|
row = Array.new
plugin_id = vuln[0]
count = vuln[1]
name = scrub_plugin_name(Plugin.find_by_id(plugin_id).plugin_name)
row.push(name)
row.push(count)
data.push(row)
end
data = data.sort do |a, b|
b[1] <=> a[1]
end
return data
end
# Returns a prawn pdf table for the top 10 notable findings
#
# @todo change this method to return a array/table and let the template render it
# @todo rename to notable_table also
#
# @param output device to write the table to
def top_10_table(output)
headers = ["Description", "Count"]
header_widths = {0 => (output.bounds.width - 50), 1 => 50}
data = top_10_sorted
output.table([headers] + data[0..9], :header => true, :column_widths => header_widths, :width => output.bounds.width) do
row(0).style(:font_style => :bold, :background_color => 'cccccc')
cells.borders = [:top, :bottom, :left, :right]
end
end
# Queries for all unique risks and sorts them by count
#
# @return [ActiveRecord::Relation] with the query results
def all_risks_unique_sorted
select("items.*").select("count(*) as count_all").group(:plugin_id).order("count_all DESC")
end
# Returns the plugin that this [Item] belongs to
#
# @return [Plugin] the that this [Item] references
def plugin
Plugin.where(:id => Item.first.attributes["plugin_id"])
end
# Builds a array of findings with their exploitablity values
#
# @param [ActiveRecord::Relation] findings to build matrix on
#
# @return [Array] with the rows of name, total, core, metasploit, canvas, exploithub, d2elliot
def exploitablity_matrix findings
results = Array.new
findings.each do |item|
plugin = Plugin.where(:id => item.plugin_id).first
name = scrub_plugin_name(plugin.plugin_name)
total = Item.where(:plugin_id => item.plugin_id).count
core = if plugin.exploit_framework_core == "true" then "Yes" else nil end
metasploit = if plugin.exploit_framework_metasploit == "true" then "Yes" else nil end
canvas = if plugin.exploit_framework_canvas == "true" then "Yes" else nil end
exploithub = if plugin.exploit_framework_exploithub == "true" then "Yes" else nil end
d2elliot = if plugin.exploit_framework_d2_elliot == "true" then "Yes" else nil end
results.push [name, total, core, metasploit, canvas, exploithub, d2elliot]
end
return results
end
end
end
end
end
Added non rounded risk % text methods
# Copyright (c) 2010-2015 Arxopia LLC.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Arxopia LLC nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL ARXOPIA LLC BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
module Risu
module Models
# Item Model
#
class Item < ActiveRecord::Base
belongs_to :host
belongs_to :plugin
has_many :attachments
class << self
# Queries for all risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def risks
where(:severity => [0,1,2,3,4])
end
# Queries for all the high risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def critical_risks
where(:severity => 4)
end
# Queries for all the high risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def high_risks
where(:severity => 3)
end
# Queries for all the medium risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def medium_risks
where(:severity => 2)
end
# Queries for all the low risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def low_risks
where(:severity => 1)
end
# Queries for all the info risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def info_risks
where(:severity => 0)
end
# Queries for all the unique Critical risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def critical_risks_unique
where(:severity => 4).joins(:plugin).order("plugins.cvss_base_score").group(:plugin_id)
end
# Queries for all the unique high risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def high_risks_unique
where(:severity => 3).joins(:plugin).order("plugins.cvss_base_score").group(:plugin_id)
end
# Queries for all the unique Critical findings and sorts them by count
#
# @return [ActiveRecord::Relation] with the query results
def critical_risks_unique_sorted
select("items.*").select("count(*) as count_all").where(:severity => 4).group(:plugin_id).order("count_all DESC")
end
# Queries for all the unique high findings and sorts them by count
#
# @return [ActiveRecord::Relation] with the query results
def high_risks_unique_sorted
select("items.*").select("count(*) as count_all").where(:severity => 3).group(:plugin_id).order("count_all DESC")
end
# Queries for all the unique medium risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def medium_risks_unique
where(:severity => 2).joins(:plugin).order("plugins.cvss_base_score").group(:plugin_id)
end
# Queries for all the unique medium findings and sorts them by count
#
# @return [ActiveRecord::Relation] with the query results
def medium_risks_unique_sorted
select("items.*").select("count(*) as count_all").where(:severity => 2).group(:plugin_id).order("count_all DESC")
end
# Queries for all the unique low risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def low_risks_unique
where(:severity => 1).joins(:plugin).order("plugins.cvss_base_score").group(:plugin_id)
end
# Queries for all the unique low findings and sorts them by count
#
# @return [ActiveRecord::Relation] with the query results
def low_risks_unique_sorted
select("items.*").select("count(*) as count_all").where(:severity => 1).group(:plugin_id).order("count_all DESC")
end
# Queries for all the unique info risks in the database
#
# @return [ActiveRecord::Relation] with the query results
def info_risks_unique
#where(:severity => 0).joins(:plugin).order(:cvss_base_score).group(:plugin_id)
where(:severity => 0).joins(:plugin).order("plugins.cvss_base_score").group(:plugin_id)
end
# Queries for all the unique info findings and sorts them by count
#
# @return [ActiveRecord::Relation] with the query results
def info_risks_unique_sorted
select("items.*").select("count(*) as count_all").where(:severity => 0).group(:plugin_id).order("count_all DESC")
end
# Queries for all the risks grouped by service type, used for the Vulnerabilities by Service graph
#
# @return [ActiveRecord::Relation] with the query results
def risks_by_service(limit=10)
select("items.*").select("count(*) as count_all").where("svc_name != 'unknown' and svc_name != 'general'").group(:svc_name).order("count_all DESC").limit(limit)
end
# Queries for all the Critical risks by plugin
#
# @param limit Limits the result to a specific number, default 10
#
# @return [ActiveRecord::Relation] with the query results
def risks_by_plugin(limit=10)
select("items.*").select("count(*) as count_all").joins(:plugin).where("plugin_id != 1").where(:severity => 4).group(:plugin_id).order("count_all DESC").limit(limit)
end
# Queries for all the Critical risks by host
#
# @param limit Limits the result to a specific number, default 10
#
# @return [ActiveRecord::Relation] with the query results
def risks_by_host(limit=10)
select("items.*").select("count(*) as count_all").joins(:host).where("plugin_id != 1").where(:severity => 4).group(:host_id).order("count_all DESC").limit(limit)
end
# Queries for all the Critical risks by host
#
# @param limit Limits the result to a specific number, default 10
#
# @return [ActiveRecord::Relation] with the query results
def critical_risks_by_host(limit=10)
select("items.*").select("count(*) as count_all").joins(:host).where("plugin_id != 1").where(:severity => 4).group(:host_id).order("count_all DESC").limit(limit)
end
# Queries for all the High risks by host
#
# @param limit Limits the result to a specific number, default 10
#
# @return [ActiveRecord::Relation] with the query results
def high_risks_by_host(limit=10)
select("items.*").select("count(*) as count_all").joins(:host).where("plugin_id != 1").where(:severity => 3).group(:host_id).order("count_all DESC").limit(limit)
end
# Queries for all the Medium risks by host
#
# @param limit Limits the result to a specific number, default 10
#
# @return [ActiveRecord::Relation] with the query results
def medium_risks_by_host(limit=10)
select("items.*").select("count(*) as count_all").joins(:host).where("plugin_id != 1").where(:severity => 2).group(:host_id).order("count_all DESC").limit(limit)
end
# Queries for all the Low risks by host
#
# @param limit Limits the result to a specific number, default 10
#
# @return [ActiveRecord::Relation] with the query results
def low_risks_by_host(limit=10)
select("items.*").select("count(*) as count_all").joins(:host).where("plugin_id != 1").where(:severity => 1).group(:host_id).order("count_all DESC").limit(limit)
end
# Queries for all the hosts with the Microsoft patch summary plugin (38153)
#
# @return [ActiveRecord::Relation] with the query results
def ms_patches
where(:plugin_id => 38153).joins(:host)
end
# Queries for all host with the Microsoft Update Summary plugin(12028)
#
# @return [ActiveRecord::Relation] with the query results
def ms_update
where(:plugin_id => 12028).joins(:host)
end
# Generates a Graph of all the risks by service
#
# @param limit Limits the result to a specific number, default 10
#
# @return [StringIO] Object containing the generated PNG image
def risks_by_service_graph(limit=10)
g = Gruff::Pie.new(GRAPH_WIDTH)
g.title = sprintf "Top %d Services By Vulnerability", Item.risks_by_service(limit).to_a.count
g.sort = false
g.marker_count = 1
g.theme = {
:colors => Risu::GRAPH_COLORS,
:background_colors => %w(white white)
}
Item.risks_by_service(limit).to_a.each do |service|
g.data(service.svc_name, Item.all.where(:svc_name => service.svc_name).count)
end
StringIO.new(g.to_blob)
end
# Generates text for the Risks by Service graph
#
# @return [String] Text based on the Risks by Service graph
def risks_by_service_graph_text
"This graph is a representation of the findings found by service. This graph can help " +
"understand what services are running on the network and if they are vulnerable, where " +
"the risks are and how they should be protected.\n\n"
end
# Generates a Graph of all the risks by severity
#
# @return [StringIO] Object containing the generated PNG image
def risks_by_severity_graph
g = Gruff::Bar.new(GRAPH_WIDTH)
g.title = "Risks By Severity"
g.sort = false
g.marker_count = 1
g.theme = {
:colors => Risu::GRAPH_COLORS,
:background_colors => %w(white white)
}
crit = Item.critical_risks.count
high = Item.high_risks.count
medium = Item.medium_risks.count
low = Item.low_risks.count
#info = Item.info_risks.count
if crit == nil then crit = 0 end
if high == nil then high = 0 end
if medium == nil then medium = 0 end
if low == nil then low = 0 end
#if info == nil then info = 0 end
g.data("Critical", crit)
g.data("High", high)
g.data("Medium", medium)
g.data("Low", low)
StringIO.new(g.to_blob)
end
# Queries for all DISA Stig findings by category
#
# @param category The DISA Stig category I, II, III
#
# @return [ActiveRecord::Relation] with the query results
def stig_findings(category="I")
where('plugin_id IN (:plugins)', :plugins => Plugin.where(:stig_severity => category).select(:id)).order("severity DESC")
end
# Generates a Graph of all the risks by severity
#
# @return [StringIO] Object containing the generated PNG image
def stigs_severity_graph
g = Gruff::Bar.new(GRAPH_WIDTH)
g.title = "Stigs By Severity"
g.sort = false
g.marker_count = 1
g.theme = {
:colors => Risu::GRAPH_COLORS,
:background_colors => %w(white white)
}
i = Item.stig_findings("I").count
ii = Item.stig_findings("II").count
iii = Item.stig_findings("III").count
if i == nil then i = 0 end
if ii == nil then ii = 0 end
if iii == nil then iii = 0 end
g.data("Cat I", i)
g.data("Cat II", ii)
g.data("Cat III", iii)
StringIO.new(g.to_blob)
end
# Calculates a vulnerable host percent based on Critical and High findings
# (unique_vuln_crit_high_count / host_count) * 100
#
# @return [FixNum] Percentage of vulnerable hosts
def calculate_vulnerable_host_percent
#patch to fix double counting
#unique_hosts_with_critical_and_high = Host.unique_hosts_with_critical.count + Host.unique_hosts_with_high.count
unique_hosts_with_critical_and_high = Host.unique_hosts_with_critical_and_high_count
host_percent = (unique_hosts_with_critical_and_high.to_f / Host.count.to_f) * 100
end
# @todo w t f
def calculate_vulnerable_host_percent_with_patches_applied
exclude_list = []
hosts = []
Item.notable_order_by_cvss_raw.each do |h, k|
exclude_list << h
end
criticals = Item.critical_risks.where.not(:plugin_id => exclude_list)
criticals.each do |item|
hosts << item.host_id
end
Item.high_risks.each do |item|
hosts << item.host_id
end
hosts.uniq!
(hosts.count.to_f / Host.count.to_f) * 100
end
# Based on the risk_percent returns a adjective representative
#
# @param risk_percent Calculated percentage of risk based on {Item::calculate_vulnerable_host_percent}
#
# @return [String] Textual representation of the risk_percent
def adjective_for_risk_text risk_percent
adjective = case risk_percent
when 0..5
"excellent"
when 6..10
"great"
when 11..15
"good"
when 16..20
"fair"
else
"poor"
end
end
# Builds a sentence based on the risk_percent to describe the risk
#
# @param risk_percent Calculated percentage of risk based on {Item::calculate_vulnerable_host_percent}
#
# @return [String] Sentence describing the implied significance of the risk_percent
def risk_text risk_percent
percent_text = case risk_percent
when 0..5.99
"This implies that only a handful of computers are missing patches, and the current patch management is working well."
when 6..10.99
"This implies that there is a minor patch management issue. If there is a patch management system, it should be checked for problems. " +
"Each host should also be inspected to be certain it can receive patches."
when 11..15.99
"This implies that there is a substantial patch management issue. If there is a patch management system, it should be checked for problems. " +
"Each host should also be inspected to be certain it can receive patches."
when 16..20
"This implies that there is a significant patch management issue. If there is a patch management system, it should be checked for problems. " +
"Each host should also be inspected to be certain it can receive patches."
else
"This implies that there is a critical patch management problem on the network. Any patch management solutions should " +
"be inspected for issues and they should be corrected as soon as possible. Each host should also be inspected to be certain it can receive patches."
end
end
# @todo change Report.title to a real variable
# @todo rewrite this
def risks_by_severity_graph_text
host_percent = calculate_vulnerable_host_percent()
adjective = adjective_for_risk_text(host_percent)
risk_text = risk_text(host_percent)
graph_text = "This bar graph is a representation of the findings by severity; the " +
"graph shows that, overall, #{Report.title} has a #{adjective} handle on the patch " +
"management of the network. "
#graph_text = "This bar graph is a representation of the findings by severity; the " +
#{}"graph shows that, Overall #{Report.title} needs to implement patch management and configuration management as a priority."
#if adjective == "good" or adjective == "fair"
# graph_text << "But improvements in patch management could be made to ensure an excellent rating."
#end
graph_text << "\n\n"
graph_text << "The majority of the critical findings were found on #{host_percent.round}% of the total assessed computers. #{risk_text}\n\n"
graph_text << "The systems with critical vulnerabilities represent the largest threat to the network, " +
"so patching this group is paramount to the overall network security. It only takes one vulnerability " +
"to create a security incident.\n\n"
graph_text << "It should be noted that low findings and open ports represent the discovery "
graph_text << "of network services and open ports. Typically, these are not an indication of "
graph_text << "a serious problem and pose little to no threat. However, the correlation of "
graph_text << "data between the different severity levels could be used to determine degree "
graph_text << "of vulnerability for a given system.\n"
return graph_text
end
def risk_percent_rounded_text
"#{calculate_vulnerable_host_percent().round}%"
end
def risk_percent_patched_rounded_text
"#{calculate_vulnerable_host_percent_with_patches_applied().round}%"
end
def risk_percent_text
"%.2f%" % calculate_vulnerable_host_percent()
end
def risk_percent_patched_text
"%.2f%" % calculate_vulnerable_host_percent_with_patches_applied()
end
#
# @todo comment
#
def notable_order_by_cvss_raw
#MIGHT NOT BE CORRECT @TODO
#return Item.joins(:plugin).where(:severity => 4).order("plugins.cvss_base_score").count(:all, :group => :plugin_id)
return Item.joins(:plugin).where(:severity => 4).order("plugins.cvss_base_score").group(:plugin_id).distinct.count
end
# Scrubs a plugin_name to remove all pointless data
#
# @return [String] Scrubbed plugin name
def scrub_plugin_name (name)
return name.gsub("(remote check)", "").gsub("(uncredentialed check)", "").gsub(/(\(\d.*\))/, "")
end
# Returns an array of plugin_id and plugin_name for the top 10
# findings unsorted
#
# @return [Array] Unsorted top 10 findings
def top_10_sorted_raw
raw = notable_order_by_cvss_raw
data = Array.new
raw.each do |vuln|
row = Array.new
plugin_id = vuln[0]
count = vuln[1]
row.push(plugin_id)
row.push(count)
data.push(row)
end
data = data.sort do |a, b|
b[1] <=> a[1]
end
return data
end
# Returns an array of plugin_id and plugin_name for the top 10
# findings sorted by CVSS score
#
# @return [Array] Sorted top 10 findings
def top_10_sorted
raw = notable_order_by_cvss_raw
data = Array.new
raw.each do |vuln|
row = Array.new
plugin_id = vuln[0]
count = vuln[1]
name = scrub_plugin_name(Plugin.find_by_id(plugin_id).plugin_name)
row.push(name)
row.push(count)
data.push(row)
end
data = data.sort do |a, b|
b[1] <=> a[1]
end
return data
end
# Returns a prawn pdf table for the top 10 notable findings
#
# @todo change this method to return a array/table and let the template render it
# @todo rename to notable_table also
#
# @param output device to write the table to
def top_10_table(output)
headers = ["Description", "Count"]
header_widths = {0 => (output.bounds.width - 50), 1 => 50}
data = top_10_sorted
output.table([headers] + data[0..9], :header => true, :column_widths => header_widths, :width => output.bounds.width) do
row(0).style(:font_style => :bold, :background_color => 'cccccc')
cells.borders = [:top, :bottom, :left, :right]
end
end
# Queries for all unique risks and sorts them by count
#
# @return [ActiveRecord::Relation] with the query results
def all_risks_unique_sorted
select("items.*").select("count(*) as count_all").group(:plugin_id).order("count_all DESC")
end
# Returns the plugin that this [Item] belongs to
#
# @return [Plugin] the that this [Item] references
def plugin
Plugin.where(:id => Item.first.attributes["plugin_id"])
end
# Builds a array of findings with their exploitablity values
#
# @param [ActiveRecord::Relation] findings to build matrix on
#
# @return [Array] with the rows of name, total, core, metasploit, canvas, exploithub, d2elliot
def exploitablity_matrix findings
results = Array.new
findings.each do |item|
plugin = Plugin.where(:id => item.plugin_id).first
name = scrub_plugin_name(plugin.plugin_name)
total = Item.where(:plugin_id => item.plugin_id).count
core = if plugin.exploit_framework_core == "true" then "Yes" else nil end
metasploit = if plugin.exploit_framework_metasploit == "true" then "Yes" else nil end
canvas = if plugin.exploit_framework_canvas == "true" then "Yes" else nil end
exploithub = if plugin.exploit_framework_exploithub == "true" then "Yes" else nil end
d2elliot = if plugin.exploit_framework_d2_elliot == "true" then "Yes" else nil end
results.push [name, total, core, metasploit, canvas, exploithub, d2elliot]
end
return results
end
end
end
end
end
|
module RockRMS
VERSION = '5.6.0'.freeze
end
Bump to v5.7.0
module RockRMS
VERSION = '5.7.0'.freeze
end
|
module RSpotify
VERSION = '1.16.0'
end
bump to 1.16.1
module RSpotify
VERSION = '1.16.1'
end
|
class EnginesCore
require "/opt/engines/lib/ruby/SystemUtils.rb"
require "/opt/engines/lib/ruby/system/DNSHosting.rb"
class SystemApi
attr_reader :last_error
def initialize(api)
@engines_api = api
end
# def
# @docker_api.update_self_hosted_domain( params)
# end
def create_container(container)
clear_error
begin
cid = read_container_id(container)
container.container_id=(cid)
if save_container(container) == true
return register_dns_and_site(container)
else
return false #save_container false
end
rescue Exception=>e
container.last_error=("Failed To Create " + e.to_s)
log_exception(e)
return false
end
end
def register_dns_and_site(container)
if container.conf_register_dns == true
if container.register_dns() == true
if container.conf_register_site() == true
if container.register_site == true
return true
else
return false #failed to register
end
end # if reg site
else
return false #reg dns failed
end
end #if reg dns
return true
end
def reload_dns
dns_pid = File.read(SysConfig.NamedPIDFile)
p :kill_HUP_TO_DNS
p dns_pid.to_s
return @engines_api.signal_service_process(dns_pid.to_s,'HUP','dns')
rescue Exception=>e
log_exception(e)
return false
end
def restart_nginx_process
begin
clear_error
cmd= "docker exec nginx ps ax |grep \"nginx: master\" |grep -v grep |awk '{ print $1}'"
SystemUtils.debug_output(cmd)
nginxpid= %x<#{cmd}>
SystemUtils.debug_output(nginxpid)
#FIXME read from pid file this is just silly
docker_cmd = "docker exec nginx kill -HUP " + nginxpid.to_s
SystemUtils.debug_output(docker_cmd)
if nginxpid.to_s != "-"
return run_system(docker_cmd)
else
return false
end
rescue Exception=>e
log_exception(e)
return false
end
end
def clear_cid(container)
container.container_id=(-1)
end
def is_startup_complete container
clear_error
begin
runDir=container_state_dir(container)
if File.exists?(runDir + "/startup_complete")
return true
else
return false
end
rescue Exception=>e
log_exception(e)
return false
end
end
def clear_cid_file container
clear_error
begin
cidfile = container_cid_file(container)
if File.exists? cidfile
File.delete cidfile
end
return true
rescue Exception=>e
container.last_error=("Failed To Create " + e.to_s)
log_exception(e)
return false
end
end
def read_container_id(container)
clear_error
begin
cidfile = container_cid_file(container)
if File.exists?(cidfile)
cid = File.read(cidfile)
return cid
end
rescue Exception=>e
log_exception(e)
return "-1";
end
end
def destroy_container container
clear_error
begin
container.container_id=(-1)
if File.exists?( container_cid_file(container)) ==true
File.delete( container_cid_file(container))
end
return true #File may or may not exist
rescue Exception=>e
container.last_error=( "Failed To Destroy " + e.to_s)
log_exception(e)
return false
end
end
def register_dns(top_level_hostname,ip_addr_str) # no Gem made this simple (need to set tiny TTL) and and all used nsupdate anyhow
clear_error
begin
fqdn_str = top_level_hostname + "." + SysConfig.internalDomain
#FIXME need unique name for temp file
dns_cmd_file_name="/tmp/.dns_cmd_file"
dns_cmd_file = File.new(dns_cmd_file_name,"w+")
dns_cmd_file.puts("server " + SysConfig.defaultDNS)
dns_cmd_file.puts("update delete " + fqdn_str)
dns_cmd_file.puts("send")
dns_cmd_file.puts("update add " + fqdn_str + " 30 A " + ip_addr_str)
dns_cmd_file.puts("send")
dns_cmd_file.close
cmd_str = "nsupdate -k " + SysConfig.ddnsKey + " " + dns_cmd_file_name
retval = run_system(cmd_str)
#File.delete(dns_cmd_file_name)
return retval
rescue Exception=>e
log_exception(e)
return false
end
end
def delete_container_configs(container)
clear_error
begin
stateDir = container_state_dir(container) + "/config.yaml"
File.delete(stateDir)
cidfile = SysConfig.CidDir + "/" + container.containerName + ".cid"
if File.exists?(cidfile)
File.delete(cidfile)
end
return true
rescue Exception=>e
container.last_error=( "Failed To Delete " )
log_exception(e)
return false
end
end
def deregister_dns(top_level_hostname)
clear_error
begin
fqdn_str = top_level_hostname + "." + SysConfig.internalDomain
dns_cmd_file_name="/tmp/.top_level_hostname.dns_cmd_file"
dns_cmd_file = File.new(dns_cmd_file_name,"w")
dns_cmd_file.puts("server " + SysConfig.defaultDNS)
dns_cmd_file.puts("update delete " + fqdn_str)
dns_cmd_file.puts("send")
dns_cmd_file.close
cmd_str = "nsupdate -k " + SysConfig.ddnsKey + " " + dns_cmd_file_name
retval = run_system(cmd_str)
File.delete(dns_cmd_file_name)
return retval
rescue Exception=>e
log_exception(e)
return false
end
end
def get_cert_name(fqdn)
if File.exists?(SysConfig.NginxCertDir + "/" + fqdn + ".crt")
return fqdn
else
return SysConfig.NginxDefaultCert
end
end
def register_site(site_hash)
clear_error
begin
proto = site_hash[:proto]
if proto =="http https"
template_file=SysConfig.HttpHttpsNginxTemplate
elsif proto =="http"
template_file=SysConfig.HttpNginxTemplate
elsif proto == "https"
template_file=SysConfig.HttpsNginxTemplate
elsif proto == nil
p "Proto nil"
template_file=SysConfig.HttpHttpsNginxTemplate
else
p "Proto" + proto + " unknown"
template_file=SysConfig.HttpHttpsNginxTemplate
end
file_contents=File.read(template_file)
site_config_contents = file_contents.sub("FQDN",site_hash[:variables][:fqdn])
site_config_contents = site_config_contents.sub("PORT",site_hash[:variables][:port])
site_config_contents = site_config_contents.sub("SERVER",site_hash[:variables][:name]) #Not HostName
if proto =="https" || proto =="http https"
site_config_contents = site_config_contents.sub("CERTNAME",get_cert_name(site_hash[:variables][:fqdn])) #Not HostName
site_config_contents = site_config_contents.sub("CERTNAME",get_cert_name(site_hash[:variables][:fqdn])) #Not HostName
end
if proto =="http https"
#Repeat for second entry
site_config_contents = site_config_contents.sub("FQDN",site_hash[:variables][:fqdn])
site_config_contents = site_config_contents.sub("PORT",site_hash[:variables][:port])
site_config_contents = site_config_contents.sub("SERVER",site_hash[:variables][:name]) #Not HostName
end
site_filename = get_site_file_name(site_hash)
site_file = File.open(site_filename,'w')
site_file.write(site_config_contents)
site_file.close
result = restart_nginx_process()
return result
rescue Exception=>e
log_exception(e)
return false
end
end
def hash_to_site_str(site_hash)
clear_error
begin
return site_hash[:name].to_s + ":" + site_hash[:variables][:fqdn].to_s + ":" + site_hash[:variables][:port].to_s + ":" + site_hash[:variables][:proto].to_s
rescue Exception=>e
log_exception(e)
return false
end
end
def get_site_file_name(site_hash)
file_name = String.new
proto = site_hash[:proto]
p :proto
p proto
if proto == "http https"
proto ="http_https"
end
file_name=SysConfig.NginxSiteDir + "/" + proto + "_" + site_hash[:variables][:fqdn] + ".site"
return file_name
end
def deregister_site(site_hash)
clear_error
begin
# # ssh_cmd=SysConfig.rmSiteCmd + " \"" + hash_to_site_str(site_hash) + "\""
# #FIXME Should write site conf file via template (either standard or supplied with blueprint)
# ssh_cmd = "/opt/engines/scripts/nginx/rmsite.sh " + " \"" + hash_to_site_str(site_hash) + "\""
# SystemUtils.debug_output ssh_cmd
# result = run_system(ssh_cmd)
site_filename = get_site_file_name(site_hash)
if File.exists?(site_filename)
File.delete(site_filename)
end
result = restart_nginx_process()
return result
rescue Exception=>e
log_exception(e)
return false
end
end
def add_monitor(site_hash)
clear_error
begin
ssh_cmd=SysConfig.addSiteMonitorCmd + " \"" + hash_to_site_str(site_hash) + " \""
return run_system(ssh_cmd)
rescue Exception=>e
log_exception(e)
return false
end
end
def rm_monitor(site_hash)
clear_error
begin
ssh_cmd=SysConfig.rmSiteMonitorCmd + " \"" + hash_to_site_str(site_hash) + " \""
return run_system(ssh_cmd)
rescue Exception=>e
log_exception(e)
return false
end
end
def save_container(container)
clear_error
begin
#FIXME
api = container.core_api
container.core_api = nil
serialized_object = YAML::dump(container)
container.core_api = api
stateDir=SysConfig.CidDir + "/" + container.ctype + "s/" + container.containerName
if File.directory?(stateDir) ==false
Dir.mkdir(stateDir)
Dir.mkdir(stateDir + "/run")
end
log_dir = container_log_dir(container)
if File.directory?(log_dir) ==false
Dir.mkdir(log_dir)
end
statefile=stateDir + "/config.yaml"
# BACKUP Current file with rename
if File.exists?(statefile)
statefile_bak = statefile + ".bak"
File.rename( statefile, statefile_bak)
end
f = File.new(statefile,File::CREAT|File::TRUNC|File::RDWR, 0644)
f.puts(serialized_object)
f.close
return true
rescue Exception=>e
container.last_error=( "load error")
log_exception(e)
return false
end
end
def save_blueprint(blueprint,container)
clear_error
begin
if blueprint != nil
puts blueprint.to_s
else
return false
end
stateDir=container_state_dir(container)
if File.directory?(stateDir) ==false
Dir.mkdir(stateDir)
end
statefile=stateDir + "/blueprint.json"
f = File.new(statefile,File::CREAT|File::TRUNC|File::RDWR, 0644)
f.write(blueprint.to_json)
f.close
rescue Exception=>e
log_exception(e)
return false
end
end
def load_blueprint(container)
clear_error
begin
stateDir=container_state_dir(container)
if File.directory?(stateDir) ==false
return false
end
statefile=stateDir + "/blueprint.json"
if File.exists?(statefile)
f = File.new(statefile,"r")
blueprint = JSON.parse( f.read())
f.close
else
return false
end
return blueprint
rescue Exception=>e
log_exception(e)
return false
end
end
def save_domains(domains)
clear_error
begin
domain_file = File.open(SysConfig.DomainsFile,"w")
domain_file.write(domains.to_yaml())
domain_file.close
return true
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
end
def load_domains
clear_error
begin
if File.exists?(SysConfig.DomainsFile) == false
p :creating_new_domain_list
self_hosted_domain_file = File.open(SysConfig.DomainsFile,"w")
self_hosted_domain_file.close
return Hash.new
else
self_hosted_domain_file = File.open(SysConfig.DomainsFile,"r")
end
domains = YAML::load( self_hosted_domain_file )
self_hosted_domain_file.close
if domains == false
p :domains_error_in_load
return Hash.new
end
return domains
rescue Exception=>e
domains = Hash.new
p "failed_to_load_domains"
SystemUtils.log_exception(e)
return domains
end
end
def list_domains
domains = load_domains
return domains
rescue Exception=>e
domains = Hash.new
p :error_listing_domains
SystemUtils.log_exception(e)
return domains
end
def add_domain(params)
clear_error
domain= params[:domain_name]
if params[:self_hosted]
add_self_hosted_domain params
end
p :add_domain
p params
domains = load_domains()
domains[params[:domain_name]] = params
if save_domains(domains)
return true
end
p :failed_add_hosted_domains
return false
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
def rm_domain(domain,system_api)
clear_error
domains = load_domains
if domains.has_key?(domain)
domains.delete(domain)
save_domains(domains)
system_api.reload_dns
end
end
def update_domain(old_domain_name, params,system_api)
clear_error
begin
domains = load_domains()
domains.delete(old_domain_name)
domains[params[:domain_name]] = params
save_domains(domains)
if params[:self_hosted]
add_self_hosted_domain params
rm_self_hosted_domain(old_domain_name)
system_api.reload_dns
end
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def add_self_hosted_domain params
clear_error
begin
p :Lachlan_Sent_parrams
p params
return DNSHosting.add_hosted_domain(params,self)
# if ( DNSHosting.add_hosted_domain(params,self) == false)
# return false
# end
#
# domains = load_self_hosted_domains()
# domains[params[:domain_name]] = params
#
return save_self_hosted_domains(domains)
rescue Exception=>e
log_exception(e)
return false
end
end
def list_self_hosted_domains()
clear_error
begin
return DNSHosting.load_self_hosted_domains()
# domains = load_self_hosted_domains()
# p domains
# return domains
rescue Exception=>e
log_exception(e)
return false
end
end
def update_self_hosted_domain(old_domain_name, params)
clear_error
begin
domains = load_self_hosted_domains()
domains.delete(old_domain_name)
domains[params[:domain_name]] = params
save_self_hosted_domains(domains)
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def remove_self_hosted_domain( domain_name)
clear_error
begin
return DNSHosting.rm_hosted_domain(domain_name,self)
rescue Exception=>e
log_exception(e)
return false
end
end
def save_system_preferences
clear_error
begin
SystemUtils.debug_output :pdsf
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def load_system_preferences
clear_error
begin
SystemUtils.debug_output :psdfsd
rescue Exception=>e
log_exception(e)
return false
end
end
def get_container_memory_stats(container)
clear_error
ret_val= Hash.new
begin
if container && container.container_id == nil || container.container_id == '-1'
container_id = read_container_id(container)
container.container_id=(container_id)
end
if container && container.container_id != nil && container.container_id != '-1'
path = "/sys/fs/cgroup/memory/docker/" + container.container_id + "/"
if Dir.exists?(path)
ret_val.store(:maximum , File.read(path + "/memory.max_usage_in_bytes"))
ret_val.store(:current , File.read(path + "/memory.usage_in_bytes"))
ret_val.store(:limit , File.read(path + "/memory.limit_in_bytes"))
else
p :no_cgroup_file
p path
ret_val.store(:maximum , "No Container")
ret_val.store(:current , "No Container")
ret_val.store(:limit , "No Container")
end
end
return ret_val
rescue Exception=>e
log_exception(e)
ret_val.store(:maximum , e.to_s)
ret_val.store(:current , "NA")
ret_val.store(:limit , "NA")
return ret_val
end
end
def set_engine_network_properties(engine, params)
clear_error
begin
engine_name = params[:engine_name]
protocol = params[:http_protocol]
if protocol.nil?
p params
return false
end
SystemUtils.debug_output("Changing protocol to _" + protocol + "_")
if protocol.include?("HTTPS only")
engine.enable_https_only
elsif protocol.include?("HTTP only")
engine.enable_http_only
elsif protocol.include?("HTTPS and HTTP")
engine.enable_http_and_https
end
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def set_engine_hostname_details(container,params)
clear_error
begin
engine_name = params[:engine_name]
hostname = params[:host_name]
domain_name = params[:domain_name]
SystemUtils.debug_output("Changing Domainame to " + domain_name)
if container.hostName != hostname || container.domainName != domain_name
saved_hostName = container.hostName
saved_domainName = container.domainName
SystemUtils.debug_output("Changing Domainame to " + domain_name)
if container.set_hostname_details(hostname,domain_name) == true
nginx_service = EnginesOSapi.loadManagedService("nginx",self)
nginx_service.remove_consumer(container)
dns_service = EnginesOSapi.loadManagedService("dns",self)
dns_service.remove_consumer(container)
dns_service.add_consumer(container)
nginx_service.add_consumer(container)
save_container(container)
end
return true
end
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def get_system_memory_info
clear_error
ret_val = Hash.new
begin
proc_mem_info_file = File.open("/proc/meminfo")
proc_mem_info_file.each_line do |line|
values=line.split(" ")
case values[0]
when "MemTotal:"
ret_val[:total] = values[1]
when "MemFree:"
ret_val[:free]= values[1]
when "Buffers:"
ret_val[:buffers]= values[1]
when "Cached:"
ret_val[:file_cache]= values[1]
when "Active:"
ret_val[:active]= values[1]
when "Inactive:"
ret_val[:inactive]= values[1]
when "SwapTotal:"
ret_val[:swap_total]= values[1]
when "SwapFree:"
ret_val[:swap_free] = values[1]
end
end
return ret_val
rescue Exception=>e
log_exception(e)
ret_val[:total] = e.to_s
ret_val[:free] = -1
ret_val[:active] = -1
ret_val[:inactive] = -1
ret_val[:file_cache] = -1
ret_val[:buffers] = -1
ret_val[:swap_total] = -1
ret_val[:swap_free] = -1
return ret_val
end
end
def get_system_load_info
clear_error
ret_val = Hash.new
begin
loadavg_info = File.read("/proc/loadavg")
values = loadavg_info.split(" ")
ret_val[:one] = values[0]
ret_val[:five] = values[1]
ret_val[:fithteen] = values[2]
run_idle = values[3].split("/")
ret_val[:running] = run_idle[0]
ret_val[:idle] = run_idle[1]
rescue Exception=>e
log_exception(e)
ret_val[:one] = -1
ret_val[:five] = -1
ret_val[:fithteen] = -1
ret_val[:running] = -1
ret_val[:idle] = -1
return ret_val
rescue Exception=>e
log_exception(e)
return false
end
end
def getManagedEngines()
begin
ret_val=Array.new
Dir.entries(SysConfig.CidDir + "/containers/").each do |contdir|
yfn = SysConfig.CidDir + "/containers/" + contdir + "/config.yaml"
if File.exists?(yfn) == true
managed_engine = loadManagedEngine(contdir)
if managed_engine.is_a?(ManagedEngine)
ret_val.push(managed_engine)
else
log_error("failed to load " + yfn)
end
end
end
return ret_val
rescue Exception=>e
log_exception(e)
return false
end
end
def loadManagedEngine(engine_name)
if engine_name == nil || engine_name.length ==0
last_error="No Engine Name"
return false
end
begin
yam_file_name = SysConfig.CidDir + "/containers/" + engine_name + "/config.yaml"
if File.exists?(yam_file_name) == false
log_error("no such file " + yam_file_name )
return false # return failed(yam_file_name,"No such configuration:","Load Engine")
end
yaml_file = File.open(yam_file_name)
managed_engine = ManagedEngine.from_yaml( yaml_file,@engines_api)
if(managed_engine == nil || managed_engine == false)
p :from_yaml_returned_nil
return false # failed(yam_file_name,"Failed to Load configuration:","Load Engine")
end
return managed_engine
rescue Exception=>e
if engine_name != nil
if managed_engine !=nil
managed_engine.last_error=( "Failed To get Managed Engine " + engine_name + " " + e.to_s)
log_error(managed_engine.last_error)
end
else
log_error("nil Engine Name")
end
log_exception(e)
return false
end
end
def loadManagedService(service_name)
begin
if service_name == nil || service_name.length ==0
last_error="No Service Name"
return false
end
yam_file_name = SysConfig.CidDir + "/services/" + service_name + "/config.yaml"
if File.exists?(yam_file_name) == false
return false # return failed(yam_file_name,"No such configuration:","Load Service")
end
yaml_file = File.open(yam_file_name)
# managed_service = YAML::load( yaml_file)
managed_service = ManagedService.from_yaml(yaml_file,@engines_api)
if managed_service == nil
return false # return EnginsOSapiResult.failed(yam_file_name,"Fail to Load configuration:","Load Service")
end
return managed_service
rescue Exception=>e
if service_name != nil
if managed_service !=nil
managed_service.last_error=( "Failed To get Managed Engine " + service_name + " " + e.to_s)
log_error(managed_service.last_error)
end
else
log_error("nil Service Name")
end
log_exception(e)
return false
end
end
def getManagedServices()
begin
ret_val=Array.new
Dir.entries(SysConfig.CidDir + "/services/").each do |contdir|
yfn = SysConfig.CidDir + "/services/" + contdir + "/config.yaml"
if File.exists?(yfn) == true
yf = File.open(yfn)
managed_service = ManagedService.from_yaml(yf,@engines_api)
if managed_service
ret_val.push(managed_service)
end
yf.close
end
end
return ret_val
rescue Exception=>e
log_exception(e)
return false
end
end
def list_managed_engines
clear_error
ret_val=Array.new
begin
Dir.entries(SysConfig.CidDir + "/containers/").each do |contdir|
yfn = SysConfig.CidDir + "/containers/" + contdir + "/config.yaml"
if File.exists?(yfn) == true
ret_val.push(contdir)
end
end
rescue Exception=>e
log_exception(e)
return ret_val
end
return ret_val
end
def list_managed_services
clear_error
ret_val=Array.new
begin
Dir.entries(SysConfig.CidDir + "/services/").each do |contdir|
yfn = SysConfig.CidDir + "/services/" + contdir + "/config.yaml"
if File.exists?(yfn) == true
ret_val.push(contdir)
end
end
rescue Exception=>e
log_exception(e)
return ret_val
end
return ret_val
end
def clear_container_var_run(container)
clear_error
begin
dir = container_state_dir(container)
#
#remove startup only
#latter have function to reset subs and other flags
if File.exists?(dir + "/startup_complete")
File.unlink(dir + "/startup_complete")
end
return true
rescue Exception=>e
log_exception(e)
return false
end
end
protected
def container_cid_file(container)
return SysConfig.CidDir + "/" + container.containerName + ".cid"
end
def container_state_dir(container)
return SysConfig.CidDir + "/" + container.ctype + "s/" + container.containerName
end
def container_log_dir container
return SysConfig.SystemLogRoot + "/" + container.ctype + "s/" + container.containerName
end
def run_system (cmd)
clear_error
begin
cmd = cmd + " 2>&1"
res= %x<#{cmd}>
SystemUtils.debug_output res
#FIXME should be case insensitive The last one is a pure kludge
#really need to get stderr and stdout separately
if $? == 0 && res.downcase.include?("error") == false && res.downcase.include?("fail") == false && res.downcase.include?("could not resolve hostname") == false && res.downcase.include?("unsuccessful") == false
return true
else
return res
end
rescue Exception=>e
log_exception(e)
return ret_val
end
end
def clear_error
@last_error = ""
end
def log_error(e_str)
@last_error = e_str
SystemUtils.log_output(e_str,10)
end
def log_exception(e)
e_str = e.to_s()
e.backtrace.each do |bt |
e_str += bt
end
log_error(e_str)
end
end #END of SystemApi
class DockerApi
attr_reader :last_error
def create_container container
clear_error
begin
commandargs = container_commandline_args(container)
commandargs = " run -d " + commandargs
SystemUtils.debug_output commandargs
retval = run_docker(commandargs,container)
return retval
rescue Exception=>e
container.last_error=("Failed To Create ")
log_exception(e)
return false
end
end
def start_container container
clear_error
begin
commandargs =" start " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def stop_container container
clear_error
begin
commandargs=" stop " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def pause_container container
clear_error
begin
commandargs = " pause " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def image_exists? (image_name)
cmd= "docker images -q " + image_name
res = SystemUtils.run_system(cmd)
if res.length >0
return true
else
return false
end
end
def unpause_container container
clear_error
begin
commandargs=" unpause " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def ps_container container
clear_error
begin
commandargs=" top " + container.containerName + " axl"
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def signal_container_process(pid,signal,container)
clear_error
commandargs=" exec " + container.containerName + " kill -" + signal + " " + pid.to_s
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
def logs_container container
clear_error
begin
commandargs=" logs " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def inspect_container container
clear_error
begin
commandargs=" inspect " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def destroy_container container
clear_error
begin
commandargs= " rm " + container.containerName
ret_val = run_docker(commandargs,container)
rescue Exception=>e
container.last_error=( "Failed To Destroy " + e.to_s)
log_exception(e)
return false
end
end
def delete_image container
clear_error
begin
commandargs= " rmi " + container.image
ret_val = run_docker(commandargs,container)
return ret_val
rescue Exception=>e
container.last_error=( "Failed To Delete " + e.to_s)
log_exception(e)
return false
end
end
def docker_exec(container,command,args)
run_args = "exec " + container.containerName + " " + command + " " + args
return run_docker(run_args,container)
end
def run_docker (args,container)
clear_error
require 'open3'
SystemUtils.debug_output(args)
res = String.new
error_mesg = String.new
begin
container.last_result=( "")
Open3.popen3("docker " + args ) do |stdin, stdout, stderr, th|
oline = String.new
stderr_is_open=true
begin
stdout.each { |line|
line = line.gsub(/\\\"/,"")
oline = line
res += line.chop
# p :lne_by_line
# p line
if stderr_is_open
error_mesg += stderr.read_nonblock(256)
end
}
rescue Errno::EIO
res += oline.chop
SystemUtils.debug_output(oline)
error_mesg += stderr.read_nonblock(256)
rescue IO::WaitReadable
retry
rescue EOFError
if stdout.closed? == false
stderr_is_open = false
retry
elsif stderr.closed? == false
error_mesg += stderr.read_nonblock(1000)
container.last_result=( res)
container.last_error=( error_mesgs)
else
container.last_result=( res)
container.last_error=( error_mesgs)
end
end
@last_error=error_mesg
if error_mesg.include?("Error")
container.last_error=(error_mesg)
return false
else
container.last_error=("")
end
#
# if res.start_with?("[") == true
# res = res +"]"
# end
if res != nil && res.end_with?(']') == false
res+=']'
end
container.last_result=(res)
return true
end
rescue Exception=>e
@last_error=error_mesg + e.to_s
container.last_result=(res)
container.last_error=(error_mesg + e.to_s)
log_exception(e)
return false
end
return true
end
def get_envionment_options(container)
e_option =String.new
if(container.environments)
container.environments.each do |environment|
if environment != nil
e_option = e_option + " -e " + environment.name + "=" + '"' + environment.value + '"'
end
end
end
return e_option
rescue Exception=>e
log_exception(e)
return e.to_s
end
def get_port_options(container)
eportoption = String.new
if(container.eports )
container.eports.each do |eport|
if eport != nil
eportoption = eportoption + " -p "
if eport.external != nil && eport.external >0
eportoption = eportoption + eport.external.to_s + ":"
end
eportoption = eportoption + eport.port.to_s
if eport.proto_type == nil
eport.proto_type=('tcp')
end
eportoption = eportoption + "/"+ eport.proto_type + " "
end
end
end
return eportoption
rescue Exception=>e
log_exception(e)
return e.to_s
end
def container_commandline_args(container)
clear_error
begin
envionment_options = get_envionment_options( container)
port_options = get_port_options( container)
volume_option = get_volume_option( container)
if container.conf_self_start == false
start_cmd=" /bin/bash /home/init.sh"
else
start_cmd=" "
end
commandargs = "-h " + container.hostName + \
envionment_options + \
" --memory=" + container.memory.to_s + "m " +\
volume_option + " " +\
port_options +\
" --cidfile " + SysConfig.CidDir + "/" + container.containerName + ".cid " +\
"--name " + container.containerName + \
" -t " + container.image + " " +\
start_cmd
return commandargs
rescue Exception=>e
log_exception(e)
return e.to_s
end
end
def get_volume_option(container)
clear_error
begin
#System
volume_option = SysConfig.timeZone_fileMapping #latter this will be customised
volume_option += " -v " + container_state_dir(container) + "/run:/engines/var/run:rw "
# if container.ctype == "service"
# volume_option += " -v " + container_log_dir(container) + ":/var/log:rw "
incontainer_logdir = get_container_logdir(container)
volume_option += " -v " + container_log_dir(container) + ":/" + incontainer_logdir + ":rw "
if incontainer_logdir !="/var/log" && incontainer_logdir !="/var/log/"
volume_option += " -v " + container_log_dir(container) + "/vlog:/var/log/:rw"
end
#end
#container specific
if(container.volumes)
container.volumes.each_value do |volume|
if volume !=nil
if volume.localpath !=nil
volume_option = volume_option.to_s + " -v " + volume.localpath.to_s + ":/" + volume.remotepath.to_s + ":" + volume.mapping_permissions.to_s
end
end
end
end
return volume_option
rescue Exception=>e
log_exception(e)
return false
end
end
def get_container_logdir(container)
clear_error
if container.framework == nil || container.framework.length ==0
return "/var/log"
end
container_logdetails_file_name = false
framework_logdetails_file_name = SysConfig.DeploymentTemplates + "/" + container.framework + "/home/LOG_DIR"
SystemUtils.debug_output(framework_logdetails_file_name)
if File.exists?(framework_logdetails_file_name )
container_logdetails_file_name = framework_logdetails_file_name
else
container_logdetails_file_name = SysConfig.DeploymentTemplates + "/global/home/LOG_DIR"
end
SystemUtils.debug_output(container_logdetails_file_name)
begin
container_logdetails = File.read(container_logdetails_file_name)
rescue
container_logdetails = "/var/log"
end
return container_logdetails
rescue Exception=>e
log_exception(e)
return false
end
protected
def container_state_dir(container)
return SysConfig.CidDir + "/" + container.ctype + "s/" + container.containerName
end
def container_log_dir container
return SysConfig.SystemLogRoot + "/" + container.ctype + "s/" + container.containerName
end
def clear_error
@last_error = ""
end
def log_exception(e)
e_str = e.to_s()
n=0
e.backtrace.each do |bt |
e_str += bt
if n >10
break
end
++n
end
@last_error = e_str
SystemUtils.log_output(e_str,10)
end
end#END of DockerApi
def initialize
@docker_api = DockerApi.new
@system_api = SystemApi.new(self) #will change to to docker_api and not self
@last_error = String.new
end
attr_reader :last_error
def software_service_definition(params)
sm = loadServiceManager
return sm.software_service_definition(params)
end
def add_domain(params)
return @system_api.add_domain(params)
end
#
# def remove_containers_cron_list(containerName)
# p :remove_containers_cron
# if @system_api.remove_containers_cron_list(containerName)
# cron_service = loadManagedService("cron")
# return @system_api.rebuild_crontab(cron_service)
# else
# return false
# end
# end
#
# def rebuild_crontab(cron_service)
# #acutally a rebuild (or resave) as hadh already removed from consumer list
# p :rebuild_crontab
# return @system_api.rebuild_crontab(cron_service)
# end
def remove_domain(params)
return @system_api.rm_domain(params[:domain_name],@system_api)
end
def update_domain(old_domain,params)
return @system_api.update_domain(old_domain,params,@system_api)
end
def signal_service_process(pid,sig,name)
container = loadManagedService(name)
return @docker_api.signal_container_process(pid,sig,container)
end
def start_container(container)
if @docker_api.start_container(container) == true
return true
end
return false
end
def inspect_container(container)
return @docker_api.inspect_container(container)
end
def stop_container(container)
if @docker_api.stop_container(container) == true
return true
end
return false
end
def pause_container(container)
return @docker_api.pause_container(container)
end
def unpause_container(container)
return @docker_api.unpause_container(container)
end
def ps_container(container)
return @docker_api.ps_container(container)
end
def logs_container(container)
return @docker_api.logs_container(container)
end
def add_monitor(site_hash)
return @system_api.add_monitor(site_hash)
end
def rm_monitor(site_hash)
return @system_api.rm_monitor(site_hash)
end
def save_container(container)
return @system_api.save_container(container)
end
def save_blueprint(blueprint,container)
return @system_api.save_blueprint(blueprint,container)
end
def load_blueprint(container)
return @system_api.load_blueprint(container)
end
def add_volume(site_hash)
return @system_api.add_volume(site_hash)
end
def rm_volume(site_hash)
return @system_api.rm_volume(site_hash)
end
def remove_self_hosted_domain(domain_name)
return @system_api.remove_self_hosted_domain(domain_name)
end
def add_self_hosted_domain(params)
return @system_api.add_self_hosted_domain(params)
end
def list_self_hosted_domains()
return @system_api.list_self_hosted_domains()
end
def update_self_hosted_domain(old_domain_name, params)
@system_api.update_self_hosted_domain(old_domain_name, params)
end
def load_system_preferences
return @system_api.load_system_preferences
end
def save_system_preferences
return @system_api.save_system_preferences
end
def register_site(site_hash)
return @system_api.register_site(site_hash)
end
def deregister_site(site_hash)
return @system_api.deregister_site(site_hash)
end
def hash_to_site_str(site_hash)
return @system_api.hash_to_site_str(site_hash)
end
def deregister_dns(top_level_hostname)
return @system_api.deregister_dns(top_level_hostname)
end
def register_dns(top_level_hostname,ip_addr_str)
return @system_api.register_dns(top_level_hostname,ip_addr_str)
end
def get_container_memory_stats(container)
return @system_api.get_container_memory_stats(container)
end
def set_engine_hostname_details(container,params)
return @system_api.set_engine_hostname_details(container,params)
end
def image_exists?(containerName)
imageName = containerName +"/deploy"
return @docker_api.image_exists?(imageName)
rescue Exception=>e
log_execption(e)
return false
end
def list_attached_services_for(objectName,identifier)
sm = loadServiceManager()
return sm.list_attached_services_for(objectName,identifier)
rescue Exception=>e
log_exception e
# object_name = object.class.name.split('::').last
#
# case object_name
# when "ManagedEngine"
# retval = Hash.new
#
# retval[:database] = object.databases
# retval[:volume] = object.volumes
# retval[:cron] = object.cron_job_list
#
# return retval
#
# #list services
# # which includes volumes databases cron
# end
# p "missed object name"
# p object_name
#
# service_manager = loadServiceManager()
#
# if service_manager !=nil
# return service_manager.attached_services(object)
#
# end
# return false
end
def list_avail_services_for(object)
objectname = object.class.name.split('::').last
services = load_avail_services_for(objectname)
subservices = load_avail_component_services_for(object)
retval = Hash.new
retval[:services] = services
retval[:subservices] = subservices
return retval
rescue Exception=>e
log_exception e
end
def load_software_service(params)
sm = loadServiceManager()
p :load_software_service
p params
service_container = sm.get_software_service_container_name(params)
params[:service_container_name] = service_container
p :service_container_name
p service_container
service = loadManagedService(service_container)
if service == nil
return nil
end
return service
rescue Exception=>e
log_exception e
end
def setup_email_params(params)
arg="smarthost_hostname=" + params[:smarthost_hostname] \
+ ":smarthost_username=" + params[:smarthost_username]\
+ ":smarthost_password=" + params[:smarthost_password]\
+ ":mail_name=smtp." + params[:default_domain]
container=loadManagedService("smtp")
return @docker_api.docker_exec(container,SysConfig.SetupParamsScript,arg)
rescue Exception=>e
log_exception(e)
end
def set_database_password(container_name,params)
arg = "mysql_password=" + params[:mysql_password] +":" \
+ "server=" + container_name + ":" \
+ "psql_password=" + params[:psql_password] #Need two args
if container_name
server_container = loadManagedService(container_name)
return @docker_api.docker_exec(server_container,SysConfig.SetupParamsScript,arg)
end
return true
rescue Exception=>e
log_exception(e)
return false
end
def attach_service(service_hash)
if service_hash == nil
p :attached_Service_passed_nil
return false
end
service = load_software_service(service_hash)
p :attaching_to_service
p service_hash
if service !=nil && service != false
return service.add_consumer(service_hash)
end
@last_error = "Failed to attach Service: " + @last_error
return false
rescue Exception=>e
log_exception e
end
def dettach_service(params)
service = load_software_service(params)
if service !=nil && service != false
return service.remove_consumer(params)
end
@last_error = "Failed to dettach Service: " + @last_error
return false
rescue Exception=>e
log_exception e
end
def list_providers_in_use
sm = loadServiceManager()
return sm.list_providers_in_use
end
def loadServiceManager()
if @service_manager == nil
@service_manager = ServiceManager.new()
return @service_manager
end
return @service_manager
end
def find_service_consumers(params)
sm = loadServiceManager()
return sm.find_service_consumers(params)
end
def find_engine_services(params)
sm = loadServiceManager()
return sm.find_engine_services(params)
end
def load_service_definition(filename)
yaml_file = File.open(filename)
p :open
p filename
return SoftwareServiceDefinition.from_yaml(yaml_file)
rescue
rescue Exception=>e
log_exception e
end
def load_avail_services_for(objectname)
p :load_avail_services_for
p objectname
retval = Array.new
dir = SysConfig.ServiceMapTemplateDir + "/" + objectname
p :dir
p dir
if Dir.exists?(dir)
Dir.foreach(dir) do |service_dir_entry|
begin
if service_dir_entry.start_with?(".") == true
next
end
p :service_dir_entry
p service_dir_entry
if service_dir_entry.end_with?(".yaml")
service = load_service_definition(dir + "/" + service_dir_entry)
if service != nil
p :service_as_serivce
p service
p :as_hash
p service.to_h
p :as_yaml
p service.to_yaml()
retval.push(service.to_h)
end
end
rescue Exception=>e
log_exception e
next
end
end
end
p objectname
p retval
return retval
rescue Exception=>e
log_exception e
end
def load_avail_component_services_for(object)
retval = Hash.new
if object.is_a?(ManagedEngine)
if object.volumes.count >0
p :loading_vols
volumes = load_avail_services_for("Volume") #Array of hashes
retval[:volume] = volumes
end
if object.databases.count >0
databases = load_avail_services_for("Database") #Array of hashes
retval[:database] = databases
end
return retval
else
return nil
end
rescue Exception=>e
log_exception e
end
def set_engine_runtime_properties(params)
#FIX ME also need to deal with Env Variables
engine_name = params[:engine_name]
engine = loadManagedEngine(engine_name)
if engine.is_a?(EnginesOSapiResult) == true
last_error = engine.result_mesg
return false
end
if engine.is_active == true
last_error="Container is active"
return false
end
if params.has_key?(:memory)
if params[:memory] == engine.memory
last_error="No Change in Memory Value"
return false
end
if engine.update_memory(params[:memory]) == false
last_error= engine.last_error
return false
end
end
if engine.has_container? == true
if destroy_container(engine) == false
last_error= engine.last_error
return false
end
end
if create_container(engine) == false
last_error= engine.last_error
return false
end
return true
end
def set_engine_network_properties (engine, params)
return @system_api.set_engine_network_properties(engine,params)
end
def get_system_load_info
return @system_api.get_system_load_info
end
def get_system_memory_info
return @system_api.get_system_memory_info
end
def getManagedEngines
return @system_api.getManagedEngines
end
def loadManagedEngine(engine_name)
return @system_api.loadManagedEngine(engine_name)
end
def loadManagedService(service_name)
return @system_api.loadManagedService(service_name)
end
def getManagedServices
return @system_api.getManagedServices
end
def list_domains
return @system_api.list_domains
end
def list_managed_engines
return @system_api.list_managed_engines
end
def list_managed_services
return @system_api.list_managed_services
end
def destroy_container(container)
clear_error
begin
if @docker_api.destroy_container(container) != false
container.deregister_registered
@system_api.destroy_container(container) #removes cid file
return true
else
return false
end
rescue Exception=>e
container.last_error=( "Failed To Destroy " + e.to_s)
log_exception(e)
return false
end
end
def delete_image(container)
begin
clear_error
if @docker_api.delete_image(container) == true
res = @system_api.delete_container_configs(container)
return res
else
return false
end
rescue Exception=>e
container.last_error=( "Failed To Delete " + e.to_s)
log_exception(e)
return false
end
end
def run_system(cmd)
clear_error
begin
cmd = cmd + " 2>&1"
res= %x<#{cmd}>
SystemUtils.debug_output res
#FIXME should be case insensitive The last one is a pure kludge
#really need to get stderr and stdout separately
if $? == 0 && res.downcase.include?("error") == false && res.downcase.include?("fail") == false && res.downcase.include?("could not resolve hostname") == false && res.downcase.include?("unsuccessful") == false
return true
else
@last_error = res
SystemUtils.debug_output res
return false
end
rescue Exception=>e
log_exception(e)
return ret_val
end
end
def run_volume_builder(container,username)
clear_error
begin
if File.exists?(SysConfig.CidDir + "/volbuilder.cid") == true
command = "docker stop volbuilder"
run_system(command)
command = "docker rm volbuilder"
run_system(command)
File.delete(SysConfig.CidDir + "/volbuilder.cid")
end
mapped_vols = get_volbuild_volmaps container
command = "docker run --name volbuilder --memory=20m -e fw_user=" + username + " --cidfile /opt/engines/run/volbuilder.cid " + mapped_vols + " -t engines/volbuilder /bin/sh /home/setup_vols.sh "
SystemUtils.debug_output command
run_system(command)
command = "docker stop volbuilder; docker rm volbuilder"
if File.exists?(SysConfig.CidDir + "/volbuilder.cid") == true
File.delete(SysConfig.CidDir + "/volbuilder.cid")
end
res = run_system(command)
if res != true
log_error(res)
return false
end
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def create_container(container)
clear_error
begin
if @system_api.clear_cid(container) != false
@system_api.clear_container_var_run(container)
if @docker_api.create_container(container) == true
return @system_api.create_container(container)
end
else
return false
end
rescue Exception=>e
container.last_error=("Failed To Create " + e.to_s)
log_exception(e)
return false
end
end
def rebuild_image(container)
clear_error
begin
params=Hash.new
params[:engine_name] = container.containerName
params[:domain_name] = container.domainName
params[:host_name] = container.hostName
params[:env_variables] = container.environments
params[:http_protocol] = container.protocol
params[:repository_url] = container.repo
params[:software_environment_variables] = container.environments
# custom_env=params
# @http_protocol = params[:http_protocol] = container.
builder = EngineBuilder.new(params, self)
return builder.rebuild_managed_container(container)
rescue Exception=>e
log_exception(e)
return false
end
end
# @container_name = params[:engine_name]
# @domain_name = params[:domain_name]
# @hostname = params[:host_name]
# custom_env= params[:software_environment_variables]
# # custom_env=params
# @core_api = core_api
# @http_protocol = params[:http_protocol]
# p params
# @repoName= params[:repository_url]
# @cron_job_list = Array.new
# @build_name = File.basename(@repoName).sub(/\.git$/,"")
# @workerPorts=Array.new
# @webPort=8000
# @vols=Array.new
#FIXME Kludge
def get_container_network_metrics(containerName)
begin
ret_val = Hash.new
clear_error
cmd = "docker exec " + containerName + " netstat --interfaces -e | grep bytes |head -1 | awk '{ print $2 " " $6}' 2>&1"
res= %x<#{cmd}>
vals = res.split("bytes:")
if vals.count < 2
if vals[1] != nil && vals[2] != nil
ret_val[:in] = vals[1].chop
ret_val[:out] = vals[2].chop
else
ret_val[:in] ="-1"
ret_val[:out] ="-1"
end
else
ret_val[:in] ="-1"
ret_val[:out] ="-1"
end
return ret_val
rescue Exception=>e
log_exception(e)
ret_val[:in] = -1
ret_val[:out] = -1
return ret_val
end
end
def is_startup_complete container
clear_error
begin
return @system_api.is_startup_complete(container)
rescue Exception=>e
log_exception(e)
return false
end
end
protected
def get_volbuild_volmaps container
begin
clear_error
state_dir = SysConfig.CidDir + "/containers/" + container.containerName + "/run/"
log_dir = SysConfig.SystemLogRoot + "/containers/" + container.containerName
volume_option = " -v " + state_dir + ":/client/state:rw "
volume_option += " -v " + log_dir + ":/client/log:rw "
if container.volumes != nil
container.volumes.each_value do |vol|
SystemUtils.debug_output vol
volume_option += " -v " + vol.localpath.to_s + ":/dest/fs:rw"
end
end
volume_option += " --volumes-from " + container.containerName
return volume_option
rescue Exception=>e
log_exception(e)
return false
end
end
def clear_error
@last_error = ""
end
def log_exception(e)
e_str = e.to_s()
e.backtrace.each do |bt |
e_str += bt
end
@last_error = e_str
SystemUtils.log_output(e_str,10)
end
end
[:variables][
class EnginesCore
require "/opt/engines/lib/ruby/SystemUtils.rb"
require "/opt/engines/lib/ruby/system/DNSHosting.rb"
class SystemApi
attr_reader :last_error
def initialize(api)
@engines_api = api
end
# def
# @docker_api.update_self_hosted_domain( params)
# end
def create_container(container)
clear_error
begin
cid = read_container_id(container)
container.container_id=(cid)
if save_container(container) == true
return register_dns_and_site(container)
else
return false #save_container false
end
rescue Exception=>e
container.last_error=("Failed To Create " + e.to_s)
log_exception(e)
return false
end
end
def register_dns_and_site(container)
if container.conf_register_dns == true
if container.register_dns() == true
if container.conf_register_site() == true
if container.register_site == true
return true
else
return false #failed to register
end
end # if reg site
else
return false #reg dns failed
end
end #if reg dns
return true
end
def reload_dns
dns_pid = File.read(SysConfig.NamedPIDFile)
p :kill_HUP_TO_DNS
p dns_pid.to_s
return @engines_api.signal_service_process(dns_pid.to_s,'HUP','dns')
rescue Exception=>e
log_exception(e)
return false
end
def restart_nginx_process
begin
clear_error
cmd= "docker exec nginx ps ax |grep \"nginx: master\" |grep -v grep |awk '{ print $1}'"
SystemUtils.debug_output(cmd)
nginxpid= %x<#{cmd}>
SystemUtils.debug_output(nginxpid)
#FIXME read from pid file this is just silly
docker_cmd = "docker exec nginx kill -HUP " + nginxpid.to_s
SystemUtils.debug_output(docker_cmd)
if nginxpid.to_s != "-"
return run_system(docker_cmd)
else
return false
end
rescue Exception=>e
log_exception(e)
return false
end
end
def clear_cid(container)
container.container_id=(-1)
end
def is_startup_complete container
clear_error
begin
runDir=container_state_dir(container)
if File.exists?(runDir + "/startup_complete")
return true
else
return false
end
rescue Exception=>e
log_exception(e)
return false
end
end
def clear_cid_file container
clear_error
begin
cidfile = container_cid_file(container)
if File.exists? cidfile
File.delete cidfile
end
return true
rescue Exception=>e
container.last_error=("Failed To Create " + e.to_s)
log_exception(e)
return false
end
end
def read_container_id(container)
clear_error
begin
cidfile = container_cid_file(container)
if File.exists?(cidfile)
cid = File.read(cidfile)
return cid
end
rescue Exception=>e
log_exception(e)
return "-1";
end
end
def destroy_container container
clear_error
begin
container.container_id=(-1)
if File.exists?( container_cid_file(container)) ==true
File.delete( container_cid_file(container))
end
return true #File may or may not exist
rescue Exception=>e
container.last_error=( "Failed To Destroy " + e.to_s)
log_exception(e)
return false
end
end
def register_dns(top_level_hostname,ip_addr_str) # no Gem made this simple (need to set tiny TTL) and and all used nsupdate anyhow
clear_error
begin
fqdn_str = top_level_hostname + "." + SysConfig.internalDomain
#FIXME need unique name for temp file
dns_cmd_file_name="/tmp/.dns_cmd_file"
dns_cmd_file = File.new(dns_cmd_file_name,"w+")
dns_cmd_file.puts("server " + SysConfig.defaultDNS)
dns_cmd_file.puts("update delete " + fqdn_str)
dns_cmd_file.puts("send")
dns_cmd_file.puts("update add " + fqdn_str + " 30 A " + ip_addr_str)
dns_cmd_file.puts("send")
dns_cmd_file.close
cmd_str = "nsupdate -k " + SysConfig.ddnsKey + " " + dns_cmd_file_name
retval = run_system(cmd_str)
#File.delete(dns_cmd_file_name)
return retval
rescue Exception=>e
log_exception(e)
return false
end
end
def delete_container_configs(container)
clear_error
begin
stateDir = container_state_dir(container) + "/config.yaml"
File.delete(stateDir)
cidfile = SysConfig.CidDir + "/" + container.containerName + ".cid"
if File.exists?(cidfile)
File.delete(cidfile)
end
return true
rescue Exception=>e
container.last_error=( "Failed To Delete " )
log_exception(e)
return false
end
end
def deregister_dns(top_level_hostname)
clear_error
begin
fqdn_str = top_level_hostname + "." + SysConfig.internalDomain
dns_cmd_file_name="/tmp/.top_level_hostname.dns_cmd_file"
dns_cmd_file = File.new(dns_cmd_file_name,"w")
dns_cmd_file.puts("server " + SysConfig.defaultDNS)
dns_cmd_file.puts("update delete " + fqdn_str)
dns_cmd_file.puts("send")
dns_cmd_file.close
cmd_str = "nsupdate -k " + SysConfig.ddnsKey + " " + dns_cmd_file_name
retval = run_system(cmd_str)
File.delete(dns_cmd_file_name)
return retval
rescue Exception=>e
log_exception(e)
return false
end
end
def get_cert_name(fqdn)
if File.exists?(SysConfig.NginxCertDir + "/" + fqdn + ".crt")
return fqdn
else
return SysConfig.NginxDefaultCert
end
end
def register_site(site_hash)
clear_error
begin
proto = site_hash[:proto]
if proto =="http https"
template_file=SysConfig.HttpHttpsNginxTemplate
elsif proto =="http"
template_file=SysConfig.HttpNginxTemplate
elsif proto == "https"
template_file=SysConfig.HttpsNginxTemplate
elsif proto == nil
p "Proto nil"
template_file=SysConfig.HttpHttpsNginxTemplate
else
p "Proto" + proto + " unknown"
template_file=SysConfig.HttpHttpsNginxTemplate
end
file_contents=File.read(template_file)
site_config_contents = file_contents.sub("FQDN",site_hash[:variables][:fqdn])
site_config_contents = site_config_contents.sub("PORT",site_hash[:variables][:port])
site_config_contents = site_config_contents.sub("SERVER",site_hash[:variables][:name]) #Not HostName
if proto =="https" || proto =="http https"
site_config_contents = site_config_contents.sub("CERTNAME",get_cert_name(site_hash[:variables][:fqdn])) #Not HostName
site_config_contents = site_config_contents.sub("CERTNAME",get_cert_name(site_hash[:variables][:fqdn])) #Not HostName
end
if proto =="http https"
#Repeat for second entry
site_config_contents = site_config_contents.sub("FQDN",site_hash[:variables][:fqdn])
site_config_contents = site_config_contents.sub("PORT",site_hash[:variables][:port])
site_config_contents = site_config_contents.sub("SERVER",site_hash[:variables][:name]) #Not HostName
end
site_filename = get_site_file_name(site_hash)
site_file = File.open(site_filename,'w')
site_file.write(site_config_contents)
site_file.close
result = restart_nginx_process()
return result
rescue Exception=>e
log_exception(e)
return false
end
end
def hash_to_site_str(site_hash)
clear_error
begin
return site_hash[:name].to_s + ":" + site_hash[:variables][:fqdn].to_s + ":" + site_hash[:variables][:port].to_s + ":" + site_hash[:variables][:proto].to_s
rescue Exception=>e
log_exception(e)
return false
end
end
def get_site_file_name(site_hash)
file_name = String.new
proto = site_hash[:variables][:proto]
p :proto
p proto
if proto == "http https"
proto ="http_https"
end
file_name=SysConfig.NginxSiteDir + "/" + proto + "_" + site_hash[:variables][:fqdn] + ".site"
return file_name
end
def deregister_site(site_hash)
clear_error
begin
# # ssh_cmd=SysConfig.rmSiteCmd + " \"" + hash_to_site_str(site_hash) + "\""
# #FIXME Should write site conf file via template (either standard or supplied with blueprint)
# ssh_cmd = "/opt/engines/scripts/nginx/rmsite.sh " + " \"" + hash_to_site_str(site_hash) + "\""
# SystemUtils.debug_output ssh_cmd
# result = run_system(ssh_cmd)
site_filename = get_site_file_name(site_hash)
if File.exists?(site_filename)
File.delete(site_filename)
end
result = restart_nginx_process()
return result
rescue Exception=>e
log_exception(e)
return false
end
end
def add_monitor(site_hash)
clear_error
begin
ssh_cmd=SysConfig.addSiteMonitorCmd + " \"" + hash_to_site_str(site_hash) + " \""
return run_system(ssh_cmd)
rescue Exception=>e
log_exception(e)
return false
end
end
def rm_monitor(site_hash)
clear_error
begin
ssh_cmd=SysConfig.rmSiteMonitorCmd + " \"" + hash_to_site_str(site_hash) + " \""
return run_system(ssh_cmd)
rescue Exception=>e
log_exception(e)
return false
end
end
def save_container(container)
clear_error
begin
#FIXME
api = container.core_api
container.core_api = nil
serialized_object = YAML::dump(container)
container.core_api = api
stateDir=SysConfig.CidDir + "/" + container.ctype + "s/" + container.containerName
if File.directory?(stateDir) ==false
Dir.mkdir(stateDir)
Dir.mkdir(stateDir + "/run")
end
log_dir = container_log_dir(container)
if File.directory?(log_dir) ==false
Dir.mkdir(log_dir)
end
statefile=stateDir + "/config.yaml"
# BACKUP Current file with rename
if File.exists?(statefile)
statefile_bak = statefile + ".bak"
File.rename( statefile, statefile_bak)
end
f = File.new(statefile,File::CREAT|File::TRUNC|File::RDWR, 0644)
f.puts(serialized_object)
f.close
return true
rescue Exception=>e
container.last_error=( "load error")
log_exception(e)
return false
end
end
def save_blueprint(blueprint,container)
clear_error
begin
if blueprint != nil
puts blueprint.to_s
else
return false
end
stateDir=container_state_dir(container)
if File.directory?(stateDir) ==false
Dir.mkdir(stateDir)
end
statefile=stateDir + "/blueprint.json"
f = File.new(statefile,File::CREAT|File::TRUNC|File::RDWR, 0644)
f.write(blueprint.to_json)
f.close
rescue Exception=>e
log_exception(e)
return false
end
end
def load_blueprint(container)
clear_error
begin
stateDir=container_state_dir(container)
if File.directory?(stateDir) ==false
return false
end
statefile=stateDir + "/blueprint.json"
if File.exists?(statefile)
f = File.new(statefile,"r")
blueprint = JSON.parse( f.read())
f.close
else
return false
end
return blueprint
rescue Exception=>e
log_exception(e)
return false
end
end
def save_domains(domains)
clear_error
begin
domain_file = File.open(SysConfig.DomainsFile,"w")
domain_file.write(domains.to_yaml())
domain_file.close
return true
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
end
def load_domains
clear_error
begin
if File.exists?(SysConfig.DomainsFile) == false
p :creating_new_domain_list
self_hosted_domain_file = File.open(SysConfig.DomainsFile,"w")
self_hosted_domain_file.close
return Hash.new
else
self_hosted_domain_file = File.open(SysConfig.DomainsFile,"r")
end
domains = YAML::load( self_hosted_domain_file )
self_hosted_domain_file.close
if domains == false
p :domains_error_in_load
return Hash.new
end
return domains
rescue Exception=>e
domains = Hash.new
p "failed_to_load_domains"
SystemUtils.log_exception(e)
return domains
end
end
def list_domains
domains = load_domains
return domains
rescue Exception=>e
domains = Hash.new
p :error_listing_domains
SystemUtils.log_exception(e)
return domains
end
def add_domain(params)
clear_error
domain= params[:domain_name]
if params[:self_hosted]
add_self_hosted_domain params
end
p :add_domain
p params
domains = load_domains()
domains[params[:domain_name]] = params
if save_domains(domains)
return true
end
p :failed_add_hosted_domains
return false
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
def rm_domain(domain,system_api)
clear_error
domains = load_domains
if domains.has_key?(domain)
domains.delete(domain)
save_domains(domains)
system_api.reload_dns
end
end
def update_domain(old_domain_name, params,system_api)
clear_error
begin
domains = load_domains()
domains.delete(old_domain_name)
domains[params[:domain_name]] = params
save_domains(domains)
if params[:self_hosted]
add_self_hosted_domain params
rm_self_hosted_domain(old_domain_name)
system_api.reload_dns
end
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def add_self_hosted_domain params
clear_error
begin
p :Lachlan_Sent_parrams
p params
return DNSHosting.add_hosted_domain(params,self)
# if ( DNSHosting.add_hosted_domain(params,self) == false)
# return false
# end
#
# domains = load_self_hosted_domains()
# domains[params[:domain_name]] = params
#
return save_self_hosted_domains(domains)
rescue Exception=>e
log_exception(e)
return false
end
end
def list_self_hosted_domains()
clear_error
begin
return DNSHosting.load_self_hosted_domains()
# domains = load_self_hosted_domains()
# p domains
# return domains
rescue Exception=>e
log_exception(e)
return false
end
end
def update_self_hosted_domain(old_domain_name, params)
clear_error
begin
domains = load_self_hosted_domains()
domains.delete(old_domain_name)
domains[params[:domain_name]] = params
save_self_hosted_domains(domains)
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def remove_self_hosted_domain( domain_name)
clear_error
begin
return DNSHosting.rm_hosted_domain(domain_name,self)
rescue Exception=>e
log_exception(e)
return false
end
end
def save_system_preferences
clear_error
begin
SystemUtils.debug_output :pdsf
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def load_system_preferences
clear_error
begin
SystemUtils.debug_output :psdfsd
rescue Exception=>e
log_exception(e)
return false
end
end
def get_container_memory_stats(container)
clear_error
ret_val= Hash.new
begin
if container && container.container_id == nil || container.container_id == '-1'
container_id = read_container_id(container)
container.container_id=(container_id)
end
if container && container.container_id != nil && container.container_id != '-1'
path = "/sys/fs/cgroup/memory/docker/" + container.container_id + "/"
if Dir.exists?(path)
ret_val.store(:maximum , File.read(path + "/memory.max_usage_in_bytes"))
ret_val.store(:current , File.read(path + "/memory.usage_in_bytes"))
ret_val.store(:limit , File.read(path + "/memory.limit_in_bytes"))
else
p :no_cgroup_file
p path
ret_val.store(:maximum , "No Container")
ret_val.store(:current , "No Container")
ret_val.store(:limit , "No Container")
end
end
return ret_val
rescue Exception=>e
log_exception(e)
ret_val.store(:maximum , e.to_s)
ret_val.store(:current , "NA")
ret_val.store(:limit , "NA")
return ret_val
end
end
def set_engine_network_properties(engine, params)
clear_error
begin
engine_name = params[:engine_name]
protocol = params[:http_protocol]
if protocol.nil?
p params
return false
end
SystemUtils.debug_output("Changing protocol to _" + protocol + "_")
if protocol.include?("HTTPS only")
engine.enable_https_only
elsif protocol.include?("HTTP only")
engine.enable_http_only
elsif protocol.include?("HTTPS and HTTP")
engine.enable_http_and_https
end
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def set_engine_hostname_details(container,params)
clear_error
begin
engine_name = params[:engine_name]
hostname = params[:host_name]
domain_name = params[:domain_name]
SystemUtils.debug_output("Changing Domainame to " + domain_name)
if container.hostName != hostname || container.domainName != domain_name
saved_hostName = container.hostName
saved_domainName = container.domainName
SystemUtils.debug_output("Changing Domainame to " + domain_name)
if container.set_hostname_details(hostname,domain_name) == true
nginx_service = EnginesOSapi.loadManagedService("nginx",self)
nginx_service.remove_consumer(container)
dns_service = EnginesOSapi.loadManagedService("dns",self)
dns_service.remove_consumer(container)
dns_service.add_consumer(container)
nginx_service.add_consumer(container)
save_container(container)
end
return true
end
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def get_system_memory_info
clear_error
ret_val = Hash.new
begin
proc_mem_info_file = File.open("/proc/meminfo")
proc_mem_info_file.each_line do |line|
values=line.split(" ")
case values[0]
when "MemTotal:"
ret_val[:total] = values[1]
when "MemFree:"
ret_val[:free]= values[1]
when "Buffers:"
ret_val[:buffers]= values[1]
when "Cached:"
ret_val[:file_cache]= values[1]
when "Active:"
ret_val[:active]= values[1]
when "Inactive:"
ret_val[:inactive]= values[1]
when "SwapTotal:"
ret_val[:swap_total]= values[1]
when "SwapFree:"
ret_val[:swap_free] = values[1]
end
end
return ret_val
rescue Exception=>e
log_exception(e)
ret_val[:total] = e.to_s
ret_val[:free] = -1
ret_val[:active] = -1
ret_val[:inactive] = -1
ret_val[:file_cache] = -1
ret_val[:buffers] = -1
ret_val[:swap_total] = -1
ret_val[:swap_free] = -1
return ret_val
end
end
def get_system_load_info
clear_error
ret_val = Hash.new
begin
loadavg_info = File.read("/proc/loadavg")
values = loadavg_info.split(" ")
ret_val[:one] = values[0]
ret_val[:five] = values[1]
ret_val[:fithteen] = values[2]
run_idle = values[3].split("/")
ret_val[:running] = run_idle[0]
ret_val[:idle] = run_idle[1]
rescue Exception=>e
log_exception(e)
ret_val[:one] = -1
ret_val[:five] = -1
ret_val[:fithteen] = -1
ret_val[:running] = -1
ret_val[:idle] = -1
return ret_val
rescue Exception=>e
log_exception(e)
return false
end
end
def getManagedEngines()
begin
ret_val=Array.new
Dir.entries(SysConfig.CidDir + "/containers/").each do |contdir|
yfn = SysConfig.CidDir + "/containers/" + contdir + "/config.yaml"
if File.exists?(yfn) == true
managed_engine = loadManagedEngine(contdir)
if managed_engine.is_a?(ManagedEngine)
ret_val.push(managed_engine)
else
log_error("failed to load " + yfn)
end
end
end
return ret_val
rescue Exception=>e
log_exception(e)
return false
end
end
def loadManagedEngine(engine_name)
if engine_name == nil || engine_name.length ==0
last_error="No Engine Name"
return false
end
begin
yam_file_name = SysConfig.CidDir + "/containers/" + engine_name + "/config.yaml"
if File.exists?(yam_file_name) == false
log_error("no such file " + yam_file_name )
return false # return failed(yam_file_name,"No such configuration:","Load Engine")
end
yaml_file = File.open(yam_file_name)
managed_engine = ManagedEngine.from_yaml( yaml_file,@engines_api)
if(managed_engine == nil || managed_engine == false)
p :from_yaml_returned_nil
return false # failed(yam_file_name,"Failed to Load configuration:","Load Engine")
end
return managed_engine
rescue Exception=>e
if engine_name != nil
if managed_engine !=nil
managed_engine.last_error=( "Failed To get Managed Engine " + engine_name + " " + e.to_s)
log_error(managed_engine.last_error)
end
else
log_error("nil Engine Name")
end
log_exception(e)
return false
end
end
def loadManagedService(service_name)
begin
if service_name == nil || service_name.length ==0
last_error="No Service Name"
return false
end
yam_file_name = SysConfig.CidDir + "/services/" + service_name + "/config.yaml"
if File.exists?(yam_file_name) == false
return false # return failed(yam_file_name,"No such configuration:","Load Service")
end
yaml_file = File.open(yam_file_name)
# managed_service = YAML::load( yaml_file)
managed_service = ManagedService.from_yaml(yaml_file,@engines_api)
if managed_service == nil
return false # return EnginsOSapiResult.failed(yam_file_name,"Fail to Load configuration:","Load Service")
end
return managed_service
rescue Exception=>e
if service_name != nil
if managed_service !=nil
managed_service.last_error=( "Failed To get Managed Engine " + service_name + " " + e.to_s)
log_error(managed_service.last_error)
end
else
log_error("nil Service Name")
end
log_exception(e)
return false
end
end
def getManagedServices()
begin
ret_val=Array.new
Dir.entries(SysConfig.CidDir + "/services/").each do |contdir|
yfn = SysConfig.CidDir + "/services/" + contdir + "/config.yaml"
if File.exists?(yfn) == true
yf = File.open(yfn)
managed_service = ManagedService.from_yaml(yf,@engines_api)
if managed_service
ret_val.push(managed_service)
end
yf.close
end
end
return ret_val
rescue Exception=>e
log_exception(e)
return false
end
end
def list_managed_engines
clear_error
ret_val=Array.new
begin
Dir.entries(SysConfig.CidDir + "/containers/").each do |contdir|
yfn = SysConfig.CidDir + "/containers/" + contdir + "/config.yaml"
if File.exists?(yfn) == true
ret_val.push(contdir)
end
end
rescue Exception=>e
log_exception(e)
return ret_val
end
return ret_val
end
def list_managed_services
clear_error
ret_val=Array.new
begin
Dir.entries(SysConfig.CidDir + "/services/").each do |contdir|
yfn = SysConfig.CidDir + "/services/" + contdir + "/config.yaml"
if File.exists?(yfn) == true
ret_val.push(contdir)
end
end
rescue Exception=>e
log_exception(e)
return ret_val
end
return ret_val
end
def clear_container_var_run(container)
clear_error
begin
dir = container_state_dir(container)
#
#remove startup only
#latter have function to reset subs and other flags
if File.exists?(dir + "/startup_complete")
File.unlink(dir + "/startup_complete")
end
return true
rescue Exception=>e
log_exception(e)
return false
end
end
protected
def container_cid_file(container)
return SysConfig.CidDir + "/" + container.containerName + ".cid"
end
def container_state_dir(container)
return SysConfig.CidDir + "/" + container.ctype + "s/" + container.containerName
end
def container_log_dir container
return SysConfig.SystemLogRoot + "/" + container.ctype + "s/" + container.containerName
end
def run_system (cmd)
clear_error
begin
cmd = cmd + " 2>&1"
res= %x<#{cmd}>
SystemUtils.debug_output res
#FIXME should be case insensitive The last one is a pure kludge
#really need to get stderr and stdout separately
if $? == 0 && res.downcase.include?("error") == false && res.downcase.include?("fail") == false && res.downcase.include?("could not resolve hostname") == false && res.downcase.include?("unsuccessful") == false
return true
else
return res
end
rescue Exception=>e
log_exception(e)
return ret_val
end
end
def clear_error
@last_error = ""
end
def log_error(e_str)
@last_error = e_str
SystemUtils.log_output(e_str,10)
end
def log_exception(e)
e_str = e.to_s()
e.backtrace.each do |bt |
e_str += bt
end
log_error(e_str)
end
end #END of SystemApi
class DockerApi
attr_reader :last_error
def create_container container
clear_error
begin
commandargs = container_commandline_args(container)
commandargs = " run -d " + commandargs
SystemUtils.debug_output commandargs
retval = run_docker(commandargs,container)
return retval
rescue Exception=>e
container.last_error=("Failed To Create ")
log_exception(e)
return false
end
end
def start_container container
clear_error
begin
commandargs =" start " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def stop_container container
clear_error
begin
commandargs=" stop " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def pause_container container
clear_error
begin
commandargs = " pause " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def image_exists? (image_name)
cmd= "docker images -q " + image_name
res = SystemUtils.run_system(cmd)
if res.length >0
return true
else
return false
end
end
def unpause_container container
clear_error
begin
commandargs=" unpause " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def ps_container container
clear_error
begin
commandargs=" top " + container.containerName + " axl"
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def signal_container_process(pid,signal,container)
clear_error
commandargs=" exec " + container.containerName + " kill -" + signal + " " + pid.to_s
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
def logs_container container
clear_error
begin
commandargs=" logs " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def inspect_container container
clear_error
begin
commandargs=" inspect " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def destroy_container container
clear_error
begin
commandargs= " rm " + container.containerName
ret_val = run_docker(commandargs,container)
rescue Exception=>e
container.last_error=( "Failed To Destroy " + e.to_s)
log_exception(e)
return false
end
end
def delete_image container
clear_error
begin
commandargs= " rmi " + container.image
ret_val = run_docker(commandargs,container)
return ret_val
rescue Exception=>e
container.last_error=( "Failed To Delete " + e.to_s)
log_exception(e)
return false
end
end
def docker_exec(container,command,args)
run_args = "exec " + container.containerName + " " + command + " " + args
return run_docker(run_args,container)
end
def run_docker (args,container)
clear_error
require 'open3'
SystemUtils.debug_output(args)
res = String.new
error_mesg = String.new
begin
container.last_result=( "")
Open3.popen3("docker " + args ) do |stdin, stdout, stderr, th|
oline = String.new
stderr_is_open=true
begin
stdout.each { |line|
line = line.gsub(/\\\"/,"")
oline = line
res += line.chop
# p :lne_by_line
# p line
if stderr_is_open
error_mesg += stderr.read_nonblock(256)
end
}
rescue Errno::EIO
res += oline.chop
SystemUtils.debug_output(oline)
error_mesg += stderr.read_nonblock(256)
rescue IO::WaitReadable
retry
rescue EOFError
if stdout.closed? == false
stderr_is_open = false
retry
elsif stderr.closed? == false
error_mesg += stderr.read_nonblock(1000)
container.last_result=( res)
container.last_error=( error_mesgs)
else
container.last_result=( res)
container.last_error=( error_mesgs)
end
end
@last_error=error_mesg
if error_mesg.include?("Error")
container.last_error=(error_mesg)
return false
else
container.last_error=("")
end
#
# if res.start_with?("[") == true
# res = res +"]"
# end
if res != nil && res.end_with?(']') == false
res+=']'
end
container.last_result=(res)
return true
end
rescue Exception=>e
@last_error=error_mesg + e.to_s
container.last_result=(res)
container.last_error=(error_mesg + e.to_s)
log_exception(e)
return false
end
return true
end
def get_envionment_options(container)
e_option =String.new
if(container.environments)
container.environments.each do |environment|
if environment != nil
e_option = e_option + " -e " + environment.name + "=" + '"' + environment.value + '"'
end
end
end
return e_option
rescue Exception=>e
log_exception(e)
return e.to_s
end
def get_port_options(container)
eportoption = String.new
if(container.eports )
container.eports.each do |eport|
if eport != nil
eportoption = eportoption + " -p "
if eport.external != nil && eport.external >0
eportoption = eportoption + eport.external.to_s + ":"
end
eportoption = eportoption + eport.port.to_s
if eport.proto_type == nil
eport.proto_type=('tcp')
end
eportoption = eportoption + "/"+ eport.proto_type + " "
end
end
end
return eportoption
rescue Exception=>e
log_exception(e)
return e.to_s
end
def container_commandline_args(container)
clear_error
begin
envionment_options = get_envionment_options( container)
port_options = get_port_options( container)
volume_option = get_volume_option( container)
if container.conf_self_start == false
start_cmd=" /bin/bash /home/init.sh"
else
start_cmd=" "
end
commandargs = "-h " + container.hostName + \
envionment_options + \
" --memory=" + container.memory.to_s + "m " +\
volume_option + " " +\
port_options +\
" --cidfile " + SysConfig.CidDir + "/" + container.containerName + ".cid " +\
"--name " + container.containerName + \
" -t " + container.image + " " +\
start_cmd
return commandargs
rescue Exception=>e
log_exception(e)
return e.to_s
end
end
def get_volume_option(container)
clear_error
begin
#System
volume_option = SysConfig.timeZone_fileMapping #latter this will be customised
volume_option += " -v " + container_state_dir(container) + "/run:/engines/var/run:rw "
# if container.ctype == "service"
# volume_option += " -v " + container_log_dir(container) + ":/var/log:rw "
incontainer_logdir = get_container_logdir(container)
volume_option += " -v " + container_log_dir(container) + ":/" + incontainer_logdir + ":rw "
if incontainer_logdir !="/var/log" && incontainer_logdir !="/var/log/"
volume_option += " -v " + container_log_dir(container) + "/vlog:/var/log/:rw"
end
#end
#container specific
if(container.volumes)
container.volumes.each_value do |volume|
if volume !=nil
if volume.localpath !=nil
volume_option = volume_option.to_s + " -v " + volume.localpath.to_s + ":/" + volume.remotepath.to_s + ":" + volume.mapping_permissions.to_s
end
end
end
end
return volume_option
rescue Exception=>e
log_exception(e)
return false
end
end
def get_container_logdir(container)
clear_error
if container.framework == nil || container.framework.length ==0
return "/var/log"
end
container_logdetails_file_name = false
framework_logdetails_file_name = SysConfig.DeploymentTemplates + "/" + container.framework + "/home/LOG_DIR"
SystemUtils.debug_output(framework_logdetails_file_name)
if File.exists?(framework_logdetails_file_name )
container_logdetails_file_name = framework_logdetails_file_name
else
container_logdetails_file_name = SysConfig.DeploymentTemplates + "/global/home/LOG_DIR"
end
SystemUtils.debug_output(container_logdetails_file_name)
begin
container_logdetails = File.read(container_logdetails_file_name)
rescue
container_logdetails = "/var/log"
end
return container_logdetails
rescue Exception=>e
log_exception(e)
return false
end
protected
def container_state_dir(container)
return SysConfig.CidDir + "/" + container.ctype + "s/" + container.containerName
end
def container_log_dir container
return SysConfig.SystemLogRoot + "/" + container.ctype + "s/" + container.containerName
end
def clear_error
@last_error = ""
end
def log_exception(e)
e_str = e.to_s()
n=0
e.backtrace.each do |bt |
e_str += bt
if n >10
break
end
++n
end
@last_error = e_str
SystemUtils.log_output(e_str,10)
end
end#END of DockerApi
def initialize
@docker_api = DockerApi.new
@system_api = SystemApi.new(self) #will change to to docker_api and not self
@last_error = String.new
end
attr_reader :last_error
def software_service_definition(params)
sm = loadServiceManager
return sm.software_service_definition(params)
end
def add_domain(params)
return @system_api.add_domain(params)
end
#
# def remove_containers_cron_list(containerName)
# p :remove_containers_cron
# if @system_api.remove_containers_cron_list(containerName)
# cron_service = loadManagedService("cron")
# return @system_api.rebuild_crontab(cron_service)
# else
# return false
# end
# end
#
# def rebuild_crontab(cron_service)
# #acutally a rebuild (or resave) as hadh already removed from consumer list
# p :rebuild_crontab
# return @system_api.rebuild_crontab(cron_service)
# end
def remove_domain(params)
return @system_api.rm_domain(params[:domain_name],@system_api)
end
def update_domain(old_domain,params)
return @system_api.update_domain(old_domain,params,@system_api)
end
def signal_service_process(pid,sig,name)
container = loadManagedService(name)
return @docker_api.signal_container_process(pid,sig,container)
end
def start_container(container)
if @docker_api.start_container(container) == true
return true
end
return false
end
def inspect_container(container)
return @docker_api.inspect_container(container)
end
def stop_container(container)
if @docker_api.stop_container(container) == true
return true
end
return false
end
def pause_container(container)
return @docker_api.pause_container(container)
end
def unpause_container(container)
return @docker_api.unpause_container(container)
end
def ps_container(container)
return @docker_api.ps_container(container)
end
def logs_container(container)
return @docker_api.logs_container(container)
end
def add_monitor(site_hash)
return @system_api.add_monitor(site_hash)
end
def rm_monitor(site_hash)
return @system_api.rm_monitor(site_hash)
end
def save_container(container)
return @system_api.save_container(container)
end
def save_blueprint(blueprint,container)
return @system_api.save_blueprint(blueprint,container)
end
def load_blueprint(container)
return @system_api.load_blueprint(container)
end
def add_volume(site_hash)
return @system_api.add_volume(site_hash)
end
def rm_volume(site_hash)
return @system_api.rm_volume(site_hash)
end
def remove_self_hosted_domain(domain_name)
return @system_api.remove_self_hosted_domain(domain_name)
end
def add_self_hosted_domain(params)
return @system_api.add_self_hosted_domain(params)
end
def list_self_hosted_domains()
return @system_api.list_self_hosted_domains()
end
def update_self_hosted_domain(old_domain_name, params)
@system_api.update_self_hosted_domain(old_domain_name, params)
end
def load_system_preferences
return @system_api.load_system_preferences
end
def save_system_preferences
return @system_api.save_system_preferences
end
def register_site(site_hash)
return @system_api.register_site(site_hash)
end
def deregister_site(site_hash)
return @system_api.deregister_site(site_hash)
end
def hash_to_site_str(site_hash)
return @system_api.hash_to_site_str(site_hash)
end
def deregister_dns(top_level_hostname)
return @system_api.deregister_dns(top_level_hostname)
end
def register_dns(top_level_hostname,ip_addr_str)
return @system_api.register_dns(top_level_hostname,ip_addr_str)
end
def get_container_memory_stats(container)
return @system_api.get_container_memory_stats(container)
end
def set_engine_hostname_details(container,params)
return @system_api.set_engine_hostname_details(container,params)
end
def image_exists?(containerName)
imageName = containerName +"/deploy"
return @docker_api.image_exists?(imageName)
rescue Exception=>e
log_execption(e)
return false
end
def list_attached_services_for(objectName,identifier)
sm = loadServiceManager()
return sm.list_attached_services_for(objectName,identifier)
rescue Exception=>e
log_exception e
# object_name = object.class.name.split('::').last
#
# case object_name
# when "ManagedEngine"
# retval = Hash.new
#
# retval[:database] = object.databases
# retval[:volume] = object.volumes
# retval[:cron] = object.cron_job_list
#
# return retval
#
# #list services
# # which includes volumes databases cron
# end
# p "missed object name"
# p object_name
#
# service_manager = loadServiceManager()
#
# if service_manager !=nil
# return service_manager.attached_services(object)
#
# end
# return false
end
def list_avail_services_for(object)
objectname = object.class.name.split('::').last
services = load_avail_services_for(objectname)
subservices = load_avail_component_services_for(object)
retval = Hash.new
retval[:services] = services
retval[:subservices] = subservices
return retval
rescue Exception=>e
log_exception e
end
def load_software_service(params)
sm = loadServiceManager()
p :load_software_service
p params
service_container = sm.get_software_service_container_name(params)
params[:service_container_name] = service_container
p :service_container_name
p service_container
service = loadManagedService(service_container)
if service == nil
return nil
end
return service
rescue Exception=>e
log_exception e
end
def setup_email_params(params)
arg="smarthost_hostname=" + params[:smarthost_hostname] \
+ ":smarthost_username=" + params[:smarthost_username]\
+ ":smarthost_password=" + params[:smarthost_password]\
+ ":mail_name=smtp." + params[:default_domain]
container=loadManagedService("smtp")
return @docker_api.docker_exec(container,SysConfig.SetupParamsScript,arg)
rescue Exception=>e
log_exception(e)
end
def set_database_password(container_name,params)
arg = "mysql_password=" + params[:mysql_password] +":" \
+ "server=" + container_name + ":" \
+ "psql_password=" + params[:psql_password] #Need two args
if container_name
server_container = loadManagedService(container_name)
return @docker_api.docker_exec(server_container,SysConfig.SetupParamsScript,arg)
end
return true
rescue Exception=>e
log_exception(e)
return false
end
def attach_service(service_hash)
if service_hash == nil
p :attached_Service_passed_nil
return false
end
service = load_software_service(service_hash)
p :attaching_to_service
p service_hash
if service !=nil && service != false
return service.add_consumer(service_hash)
end
@last_error = "Failed to attach Service: " + @last_error
return false
rescue Exception=>e
log_exception e
end
def dettach_service(params)
service = load_software_service(params)
if service !=nil && service != false
return service.remove_consumer(params)
end
@last_error = "Failed to dettach Service: " + @last_error
return false
rescue Exception=>e
log_exception e
end
def list_providers_in_use
sm = loadServiceManager()
return sm.list_providers_in_use
end
def loadServiceManager()
if @service_manager == nil
@service_manager = ServiceManager.new()
return @service_manager
end
return @service_manager
end
def find_service_consumers(params)
sm = loadServiceManager()
return sm.find_service_consumers(params)
end
def find_engine_services(params)
sm = loadServiceManager()
return sm.find_engine_services(params)
end
def load_service_definition(filename)
yaml_file = File.open(filename)
p :open
p filename
return SoftwareServiceDefinition.from_yaml(yaml_file)
rescue
rescue Exception=>e
log_exception e
end
def load_avail_services_for(objectname)
p :load_avail_services_for
p objectname
retval = Array.new
dir = SysConfig.ServiceMapTemplateDir + "/" + objectname
p :dir
p dir
if Dir.exists?(dir)
Dir.foreach(dir) do |service_dir_entry|
begin
if service_dir_entry.start_with?(".") == true
next
end
p :service_dir_entry
p service_dir_entry
if service_dir_entry.end_with?(".yaml")
service = load_service_definition(dir + "/" + service_dir_entry)
if service != nil
p :service_as_serivce
p service
p :as_hash
p service.to_h
p :as_yaml
p service.to_yaml()
retval.push(service.to_h)
end
end
rescue Exception=>e
log_exception e
next
end
end
end
p objectname
p retval
return retval
rescue Exception=>e
log_exception e
end
def load_avail_component_services_for(object)
retval = Hash.new
if object.is_a?(ManagedEngine)
if object.volumes.count >0
p :loading_vols
volumes = load_avail_services_for("Volume") #Array of hashes
retval[:volume] = volumes
end
if object.databases.count >0
databases = load_avail_services_for("Database") #Array of hashes
retval[:database] = databases
end
return retval
else
return nil
end
rescue Exception=>e
log_exception e
end
def set_engine_runtime_properties(params)
#FIX ME also need to deal with Env Variables
engine_name = params[:engine_name]
engine = loadManagedEngine(engine_name)
if engine.is_a?(EnginesOSapiResult) == true
last_error = engine.result_mesg
return false
end
if engine.is_active == true
last_error="Container is active"
return false
end
if params.has_key?(:memory)
if params[:memory] == engine.memory
last_error="No Change in Memory Value"
return false
end
if engine.update_memory(params[:memory]) == false
last_error= engine.last_error
return false
end
end
if engine.has_container? == true
if destroy_container(engine) == false
last_error= engine.last_error
return false
end
end
if create_container(engine) == false
last_error= engine.last_error
return false
end
return true
end
def set_engine_network_properties (engine, params)
return @system_api.set_engine_network_properties(engine,params)
end
def get_system_load_info
return @system_api.get_system_load_info
end
def get_system_memory_info
return @system_api.get_system_memory_info
end
def getManagedEngines
return @system_api.getManagedEngines
end
def loadManagedEngine(engine_name)
return @system_api.loadManagedEngine(engine_name)
end
def loadManagedService(service_name)
return @system_api.loadManagedService(service_name)
end
def getManagedServices
return @system_api.getManagedServices
end
def list_domains
return @system_api.list_domains
end
def list_managed_engines
return @system_api.list_managed_engines
end
def list_managed_services
return @system_api.list_managed_services
end
def destroy_container(container)
clear_error
begin
if @docker_api.destroy_container(container) != false
container.deregister_registered
@system_api.destroy_container(container) #removes cid file
return true
else
return false
end
rescue Exception=>e
container.last_error=( "Failed To Destroy " + e.to_s)
log_exception(e)
return false
end
end
def delete_image(container)
begin
clear_error
if @docker_api.delete_image(container) == true
res = @system_api.delete_container_configs(container)
return res
else
return false
end
rescue Exception=>e
container.last_error=( "Failed To Delete " + e.to_s)
log_exception(e)
return false
end
end
def run_system(cmd)
clear_error
begin
cmd = cmd + " 2>&1"
res= %x<#{cmd}>
SystemUtils.debug_output res
#FIXME should be case insensitive The last one is a pure kludge
#really need to get stderr and stdout separately
if $? == 0 && res.downcase.include?("error") == false && res.downcase.include?("fail") == false && res.downcase.include?("could not resolve hostname") == false && res.downcase.include?("unsuccessful") == false
return true
else
@last_error = res
SystemUtils.debug_output res
return false
end
rescue Exception=>e
log_exception(e)
return ret_val
end
end
def run_volume_builder(container,username)
clear_error
begin
if File.exists?(SysConfig.CidDir + "/volbuilder.cid") == true
command = "docker stop volbuilder"
run_system(command)
command = "docker rm volbuilder"
run_system(command)
File.delete(SysConfig.CidDir + "/volbuilder.cid")
end
mapped_vols = get_volbuild_volmaps container
command = "docker run --name volbuilder --memory=20m -e fw_user=" + username + " --cidfile /opt/engines/run/volbuilder.cid " + mapped_vols + " -t engines/volbuilder /bin/sh /home/setup_vols.sh "
SystemUtils.debug_output command
run_system(command)
command = "docker stop volbuilder; docker rm volbuilder"
if File.exists?(SysConfig.CidDir + "/volbuilder.cid") == true
File.delete(SysConfig.CidDir + "/volbuilder.cid")
end
res = run_system(command)
if res != true
log_error(res)
return false
end
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def create_container(container)
clear_error
begin
if @system_api.clear_cid(container) != false
@system_api.clear_container_var_run(container)
if @docker_api.create_container(container) == true
return @system_api.create_container(container)
end
else
return false
end
rescue Exception=>e
container.last_error=("Failed To Create " + e.to_s)
log_exception(e)
return false
end
end
def rebuild_image(container)
clear_error
begin
params=Hash.new
params[:engine_name] = container.containerName
params[:domain_name] = container.domainName
params[:host_name] = container.hostName
params[:env_variables] = container.environments
params[:http_protocol] = container.protocol
params[:repository_url] = container.repo
params[:software_environment_variables] = container.environments
# custom_env=params
# @http_protocol = params[:http_protocol] = container.
builder = EngineBuilder.new(params, self)
return builder.rebuild_managed_container(container)
rescue Exception=>e
log_exception(e)
return false
end
end
# @container_name = params[:engine_name]
# @domain_name = params[:domain_name]
# @hostname = params[:host_name]
# custom_env= params[:software_environment_variables]
# # custom_env=params
# @core_api = core_api
# @http_protocol = params[:http_protocol]
# p params
# @repoName= params[:repository_url]
# @cron_job_list = Array.new
# @build_name = File.basename(@repoName).sub(/\.git$/,"")
# @workerPorts=Array.new
# @webPort=8000
# @vols=Array.new
#FIXME Kludge
def get_container_network_metrics(containerName)
begin
ret_val = Hash.new
clear_error
cmd = "docker exec " + containerName + " netstat --interfaces -e | grep bytes |head -1 | awk '{ print $2 " " $6}' 2>&1"
res= %x<#{cmd}>
vals = res.split("bytes:")
if vals.count < 2
if vals[1] != nil && vals[2] != nil
ret_val[:in] = vals[1].chop
ret_val[:out] = vals[2].chop
else
ret_val[:in] ="-1"
ret_val[:out] ="-1"
end
else
ret_val[:in] ="-1"
ret_val[:out] ="-1"
end
return ret_val
rescue Exception=>e
log_exception(e)
ret_val[:in] = -1
ret_val[:out] = -1
return ret_val
end
end
def is_startup_complete container
clear_error
begin
return @system_api.is_startup_complete(container)
rescue Exception=>e
log_exception(e)
return false
end
end
protected
def get_volbuild_volmaps container
begin
clear_error
state_dir = SysConfig.CidDir + "/containers/" + container.containerName + "/run/"
log_dir = SysConfig.SystemLogRoot + "/containers/" + container.containerName
volume_option = " -v " + state_dir + ":/client/state:rw "
volume_option += " -v " + log_dir + ":/client/log:rw "
if container.volumes != nil
container.volumes.each_value do |vol|
SystemUtils.debug_output vol
volume_option += " -v " + vol.localpath.to_s + ":/dest/fs:rw"
end
end
volume_option += " --volumes-from " + container.containerName
return volume_option
rescue Exception=>e
log_exception(e)
return false
end
end
def clear_error
@last_error = ""
end
def log_exception(e)
e_str = e.to_s()
e.backtrace.each do |bt |
e_str += bt
end
@last_error = e_str
SystemUtils.log_output(e_str,10)
end
end
|
class EnginesCore
require "/opt/engines/lib/ruby/SystemUtils.rb"
require "/opt/engines/lib/ruby/system/DNSHosting.rb"
class SystemApi
attr_reader :last_error
def initialize(api)
@engines_api = api
end
# def
# @docker_api.update_self_hosted_domain( params)
# end
def create_container(container)
clear_error
begin
cid = read_container_id(container)
container.container_id=(cid)
if save_container(container) == true
return register_dns_and_site(container)
else
return false #save_container false
end
rescue Exception=>e
container.last_error=("Failed To Create " + e.to_s)
log_exception(e)
return false
end
end
def register_dns_and_site(container)
if container.conf_register_dns == true
if container.register_dns() == true
if container.conf_register_site() == true
if container.register_site == true
return true
else
return false #failed to register
end
end # if reg site
else
return false #reg dns failed
end
end #if reg dns
return true
end
def reload_dns
dns_pid = File.read(SysConfig.NamedPIDFile)
p :kill_HUP_TO_DNS
p dns_pid.to_s
return @engines_api.signal_service_process(dns_pid.to_s,'HUP','dns')
rescue Exception=>e
log_exception(e)
return false
end
def restart_nginx_process
begin
clear_error
cmd= "docker exec nginx ps ax |grep \"nginx: master\" |grep -v grep |awk '{ print $1}'"
SystemUtils.debug_output(cmd)
nginxpid= %x<#{cmd}>
SystemUtils.debug_output(nginxpid)
#FIXME read from pid file this is just silly
docker_cmd = "docker exec nginx kill -HUP " + nginxpid.to_s
SystemUtils.debug_output(docker_cmd)
if nginxpid.to_s != "-"
return run_system(docker_cmd)
else
return false
end
rescue Exception=>e
log_exception(e)
return false
end
end
def clear_cid(container)
container.container_id=(-1)
end
def is_startup_complete container
clear_error
begin
runDir=container_state_dir(container)
if File.exists?(runDir + "/startup_complete")
return true
else
return false
end
rescue Exception=>e
log_exception(e)
return false
end
end
def clear_cid_file container
clear_error
begin
cidfile = container_cid_file(container)
if File.exists? cidfile
File.delete cidfile
end
return true
rescue Exception=>e
container.last_error=("Failed To Create " + e.to_s)
log_exception(e)
return false
end
end
def read_container_id(container)
clear_error
begin
cidfile = container_cid_file(container)
if File.exists?(cidfile)
cid = File.read(cidfile)
return cid
end
rescue Exception=>e
log_exception(e)
return "-1";
end
end
def destroy_container container
clear_error
begin
container.container_id=(-1)
if File.exists?( container_cid_file(container)) ==true
File.delete( container_cid_file(container))
end
return true #File may or may not exist
rescue Exception=>e
container.last_error=( "Failed To Destroy " + e.to_s)
log_exception(e)
return false
end
end
def register_dns(top_level_hostname,ip_addr_str) # no Gem made this simple (need to set tiny TTL) and and all used nsupdate anyhow
clear_error
begin
fqdn_str = top_level_hostname + "." + SysConfig.internalDomain
#FIXME need unique name for temp file
dns_cmd_file_name="/tmp/.dns_cmd_file"
dns_cmd_file = File.new(dns_cmd_file_name,"w+")
dns_cmd_file.puts("server " + SysConfig.defaultDNS)
dns_cmd_file.puts("update delete " + fqdn_str)
dns_cmd_file.puts("send")
dns_cmd_file.puts("update add " + fqdn_str + " 30 A " + ip_addr_str)
dns_cmd_file.puts("send")
dns_cmd_file.close
cmd_str = "nsupdate -k " + SysConfig.ddnsKey + " " + dns_cmd_file_name
retval = run_system(cmd_str)
#File.delete(dns_cmd_file_name)
return retval
rescue Exception=>e
log_exception(e)
return false
end
end
def delete_container_configs(container)
clear_error
begin
stateDir = container_state_dir(container) + "/config.yaml"
File.delete(stateDir)
cidfile = SysConfig.CidDir + "/" + container.containerName + ".cid"
if File.exists?(cidfile)
File.delete(cidfile)
end
return true
rescue Exception=>e
container.last_error=( "Failed To Delete " )
log_exception(e)
return false
end
end
def deregister_dns(top_level_hostname)
clear_error
begin
fqdn_str = top_level_hostname + "." + SysConfig.internalDomain
dns_cmd_file_name="/tmp/.top_level_hostname.dns_cmd_file"
dns_cmd_file = File.new(dns_cmd_file_name,"w")
dns_cmd_file.puts("server " + SysConfig.defaultDNS)
dns_cmd_file.puts("update delete " + fqdn_str)
dns_cmd_file.puts("send")
dns_cmd_file.close
cmd_str = "nsupdate -k " + SysConfig.ddnsKey + " " + dns_cmd_file_name
retval = run_system(cmd_str)
File.delete(dns_cmd_file_name)
return retval
rescue Exception=>e
log_exception(e)
return false
end
end
def get_cert_name(fqdn)
if File.exists?(SysConfig.NginxCertDir + "/" + fqdn + ".crt")
return fqdn
else
return SysConfig.NginxDefaultCert
end
end
def register_site(site_hash)
clear_error
begin
proto = site_hash[:proto]
if proto =="http https"
template_file=SysConfig.HttpHttpsNginxTemplate
elsif proto =="http"
template_file=SysConfig.HttpNginxTemplate
elsif proto == "https"
template_file=SysConfig.HttpsNginxTemplate
elsif proto == nil
p "Proto nil"
template_file=SysConfig.HttpHttpsNginxTemplate
else
p "Proto" + proto + " unknown"
template_file=SysConfig.HttpHttpsNginxTemplate
end
file_contents=File.read(template_file)
site_config_contents = file_contents.sub("FQDN",site_hash[:fqdn])
site_config_contents = site_config_contents.sub("PORT",site_hash[:port])
site_config_contents = site_config_contents.sub("SERVER",site_hash[:name]) #Not HostName
if proto =="https" || proto =="http https"
site_config_contents = site_config_contents.sub("CERTNAME",get_cert_name(site_hash[:fqdn])) #Not HostName
site_config_contents = site_config_contents.sub("CERTNAME",get_cert_name(site_hash[:fqdn])) #Not HostName
end
if proto =="http https"
#Repeat for second entry
site_config_contents = site_config_contents.sub("FQDN",site_hash[:fqdn])
site_config_contents = site_config_contents.sub("PORT",site_hash[:port])
site_config_contents = site_config_contents.sub("SERVER",site_hash[:name]) #Not HostName
end
site_filename = get_site_file_name(site_hash)
site_file = File.open(site_filename,'w')
site_file.write(site_config_contents)
site_file.close
result = restart_nginx_process()
return result
rescue Exception=>e
log_exception(e)
return false
end
end
def hash_to_site_str(site_hash)
clear_error
begin
return site_hash[:name].to_s + ":" + site_hash[:fqdn].to_s + ":" + site_hash[:port].to_s + ":" + site_hash[:proto].to_s
rescue Exception=>e
log_exception(e)
return false
end
end
def get_site_file_name(site_hash)
file_name = String.new
proto = site_hash[:proto]
p :proto
p proto
if proto == "http https"
proto ="http_https"
end
file_name=SysConfig.NginxSiteDir + "/" + proto + "_" + site_hash[:fqdn] + ".site"
return file_name
end
def deregister_site(site_hash)
clear_error
begin
# # ssh_cmd=SysConfig.rmSiteCmd + " \"" + hash_to_site_str(site_hash) + "\""
# #FIXME Should write site conf file via template (either standard or supplied with blueprint)
# ssh_cmd = "/opt/engines/scripts/nginx/rmsite.sh " + " \"" + hash_to_site_str(site_hash) + "\""
# SystemUtils.debug_output ssh_cmd
# result = run_system(ssh_cmd)
site_filename = get_site_file_name(site_hash)
if File.exists?(site_filename)
File.delete(site_filename)
end
result = restart_nginx_process()
return result
rescue Exception=>e
log_exception(e)
return false
end
end
def add_monitor(site_hash)
clear_error
begin
ssh_cmd=SysConfig.addSiteMonitorCmd + " \"" + hash_to_site_str(site_hash) + " \""
return run_system(ssh_cmd)
rescue Exception=>e
log_exception(e)
return false
end
end
def rm_monitor(site_hash)
clear_error
begin
ssh_cmd=SysConfig.rmSiteMonitorCmd + " \"" + hash_to_site_str(site_hash) + " \""
return run_system(ssh_cmd)
rescue Exception=>e
log_exception(e)
return false
end
end
def save_container(container)
clear_error
begin
#FIXME
api = container.core_api
container.core_api = nil
serialized_object = YAML::dump(container)
container.core_api = api
stateDir=SysConfig.CidDir + "/" + container.ctype + "s/" + container.containerName
if File.directory?(stateDir) ==false
Dir.mkdir(stateDir)
Dir.mkdir(stateDir + "/run")
end
log_dir = container_log_dir(container)
if File.directory?(log_dir) ==false
Dir.mkdir(log_dir)
end
statefile=stateDir + "/config.yaml"
# BACKUP Current file with rename
if File.exists?(statefile)
statefile_bak = statefile + ".bak"
File.rename( statefile, statefile_bak)
end
f = File.new(statefile,File::CREAT|File::TRUNC|File::RDWR, 0644)
f.puts(serialized_object)
f.close
return true
rescue Exception=>e
container.last_error=( "load error")
log_exception(e)
return false
end
end
def save_blueprint(blueprint,container)
clear_error
begin
if blueprint != nil
puts blueprint.to_s
else
return false
end
stateDir=container_state_dir(container)
if File.directory?(stateDir) ==false
Dir.mkdir(stateDir)
end
statefile=stateDir + "/blueprint.json"
f = File.new(statefile,File::CREAT|File::TRUNC|File::RDWR, 0644)
f.write(blueprint.to_json)
f.close
rescue Exception=>e
log_exception(e)
return false
end
end
def load_blueprint(container)
clear_error
begin
stateDir=container_state_dir(container)
if File.directory?(stateDir) ==false
return false
end
statefile=stateDir + "/blueprint.json"
if File.exists?(statefile)
f = File.new(statefile,"r")
blueprint = JSON.parse( f.read())
f.close
else
return false
end
return blueprint
rescue Exception=>e
log_exception(e)
return false
end
end
def save_domains(domains)
clear_error
begin
domain_file = File.open(SysConfig.DomainsFile,"w")
domain_file.write(domains.to_yaml())
domain_file.close
return true
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
end
def load_domains
clear_error
begin
if File.exists?(SysConfig.DomainsFile) == false
p :creating_new_domain_list
self_hosted_domain_file = File.open(SysConfig.DomainsFile,"w")
self_hosted_domain_file.close
return Hash.new
else
self_hosted_domain_file = File.open(SysConfig.DomainsFile,"r")
end
domains = YAML::load( self_hosted_domain_file )
self_hosted_domain_file.close
if domains == false
p :domains_error_in_load
return Hash.new
end
return domains
rescue Exception=>e
domains = Hash.new
p "failed_to_load_domains"
SystemUtils.log_exception(e)
return domains
end
end
def list_domains
domains = load_domains
return domains
rescue Exception=>e
domains = Hash.new
p :error_listing_domains
SystemUtils.log_exception(e)
return domains
end
def add_domain(params)
clear_error
domain= params[:domain_name]
if params[:self_hosted]
add_self_hosted_domain params
end
p :add_domain
p params
domains = load_domains()
domains[params[:domain_name]] = params
if save_domains(domains)
return true
end
p :failed_add_hosted_domains
return false
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
def rm_domain(domain,system_api)
clear_error
domains = load_domains
if domains.has_key?(domain)
domains.delete(domain)
save_domains(domains)
system_api.reload_dns
end
end
def update_domain(old_domain_name, params,system_api)
clear_error
begin
domains = load_domains()
domains.delete(old_domain_name)
domains[params[:domain_name]] = params
save_domains(domains)
if params[:self_hosted]
add_self_hosted_domain params
rm_self_hosted_domain(old_domain_name)
system_api.reload_dns
end
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def add_self_hosted_domain params
clear_error
begin
p :Lachlan_Sent_parrams
p params
return DNSHosting.add_hosted_domain(params,self)
# if ( DNSHosting.add_hosted_domain(params,self) == false)
# return false
# end
#
# domains = load_self_hosted_domains()
# domains[params[:domain_name]] = params
#
return save_self_hosted_domains(domains)
rescue Exception=>e
log_exception(e)
return false
end
end
def list_self_hosted_domains()
clear_error
begin
return DNSHosting.load_self_hosted_domains()
# domains = load_self_hosted_domains()
# p domains
# return domains
rescue Exception=>e
log_exception(e)
return false
end
end
def update_self_hosted_domain(old_domain_name, params)
clear_error
begin
domains = load_self_hosted_domains()
domains.delete(old_domain_name)
domains[params[:domain_name]] = params
save_self_hosted_domains(domains)
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def remove_self_hosted_domain( domain_name)
clear_error
begin
return DNSHosting.rm_hosted_domain(domain_name,self)
rescue Exception=>e
log_exception(e)
return false
end
end
def save_system_preferences
clear_error
begin
SystemUtils.debug_output :pdsf
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def load_system_preferences
clear_error
begin
SystemUtils.debug_output :psdfsd
rescue Exception=>e
log_exception(e)
return false
end
end
def get_container_memory_stats(container)
clear_error
ret_val= Hash.new
begin
if container && container.container_id == nil || container.container_id == '-1'
container_id = read_container_id(container)
container.container_id=(container_id)
end
if container && container.container_id != nil && container.container_id != '-1'
path = "/sys/fs/cgroup/memory/docker/" + container.container_id + "/"
if Dir.exists?(path)
ret_val.store(:maximum , File.read(path + "/memory.max_usage_in_bytes"))
ret_val.store(:current , File.read(path + "/memory.usage_in_bytes"))
ret_val.store(:limit , File.read(path + "/memory.limit_in_bytes"))
else
p :no_cgroup_file
p path
ret_val.store(:maximum , "No Container")
ret_val.store(:current , "No Container")
ret_val.store(:limit , "No Container")
end
end
return ret_val
rescue Exception=>e
log_exception(e)
ret_val.store(:maximum , e.to_s)
ret_val.store(:current , "NA")
ret_val.store(:limit , "NA")
return ret_val
end
end
def set_engine_network_properties(engine, params)
clear_error
begin
engine_name = params[:engine_name]
protocol = params[:http_protocol]
if protocol.nil?
p params
return false
end
SystemUtils.debug_output("Changing protocol to _" + protocol + "_")
if protocol.include?("HTTPS only")
engine.enable_https_only
elsif protocol.include?("HTTP only")
engine.enable_http_only
elsif protocol.include?("HTTPS and HTTP")
engine.enable_http_and_https
end
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def set_engine_hostname_details(container,params)
clear_error
begin
engine_name = params[:engine_name]
hostname = params[:host_name]
domain_name = params[:domain_name]
SystemUtils.debug_output("Changing Domainame to " + domain_name)
if container.hostName != hostname || container.domainName != domain_name
saved_hostName = container.hostName
saved_domainName = container.domainName
SystemUtils.debug_output("Changing Domainame to " + domain_name)
if container.set_hostname_details(hostname,domain_name) == true
nginx_service = EnginesOSapi.loadManagedService("nginx",self)
nginx_service.remove_consumer(container)
dns_service = EnginesOSapi.loadManagedService("dns",self)
dns_service.remove_consumer(container)
dns_service.add_consumer(container)
nginx_service.add_consumer(container)
save_container(container)
end
return true
end
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def get_system_memory_info
clear_error
ret_val = Hash.new
begin
proc_mem_info_file = File.open("/proc/meminfo")
proc_mem_info_file.each_line do |line|
values=line.split(" ")
case values[0]
when "MemTotal:"
ret_val[:total] = values[1]
when "MemFree:"
ret_val[:free]= values[1]
when "Buffers:"
ret_val[:buffers]= values[1]
when "Cached:"
ret_val[:file_cache]= values[1]
when "Active:"
ret_val[:active]= values[1]
when "Inactive:"
ret_val[:inactive]= values[1]
when "SwapTotal:"
ret_val[:swap_total]= values[1]
when "SwapFree:"
ret_val[:swap_free] = values[1]
end
end
return ret_val
rescue Exception=>e
log_exception(e)
ret_val[:total] = e.to_s
ret_val[:free] = -1
ret_val[:active] = -1
ret_val[:inactive] = -1
ret_val[:file_cache] = -1
ret_val[:buffers] = -1
ret_val[:swap_total] = -1
ret_val[:swap_free] = -1
return ret_val
end
end
def get_system_load_info
clear_error
ret_val = Hash.new
begin
loadavg_info = File.read("/proc/loadavg")
values = loadavg_info.split(" ")
ret_val[:one] = values[0]
ret_val[:five] = values[1]
ret_val[:fithteen] = values[2]
run_idle = values[3].split("/")
ret_val[:running] = run_idle[0]
ret_val[:idle] = run_idle[1]
rescue Exception=>e
log_exception(e)
ret_val[:one] = -1
ret_val[:five] = -1
ret_val[:fithteen] = -1
ret_val[:running] = -1
ret_val[:idle] = -1
return ret_val
rescue Exception=>e
log_exception(e)
return false
end
end
def getManagedEngines()
begin
ret_val=Array.new
Dir.entries(SysConfig.CidDir + "/containers/").each do |contdir|
yfn = SysConfig.CidDir + "/containers/" + contdir + "/config.yaml"
if File.exists?(yfn) == true
managed_engine = loadManagedEngine(contdir)
if managed_engine.is_a?(ManagedEngine)
ret_val.push(managed_engine)
else
log_error("failed to load " + yfn)
end
end
end
return ret_val
rescue Exception=>e
log_exception(e)
return false
end
end
def loadManagedEngine(engine_name)
if engine_name == nil || engine_name.length ==0
last_error="No Engine Name"
return false
end
begin
yam_file_name = SysConfig.CidDir + "/containers/" + engine_name + "/config.yaml"
if File.exists?(yam_file_name) == false
log_error("no such file " + yam_file_name )
return false # return failed(yam_file_name,"No such configuration:","Load Engine")
end
yaml_file = File.open(yam_file_name)
managed_engine = ManagedEngine.from_yaml( yaml_file,@engines_api)
if(managed_engine == nil || managed_engine == false)
p :from_yaml_returned_nil
return false # failed(yam_file_name,"Failed to Load configuration:","Load Engine")
end
return managed_engine
rescue Exception=>e
if engine_name != nil
if managed_engine !=nil
managed_engine.last_error=( "Failed To get Managed Engine " + engine_name + " " + e.to_s)
log_error(managed_engine.last_error)
end
else
log_error("nil Engine Name")
end
log_exception(e)
return false
end
end
def loadManagedService(service_name)
begin
if service_name == nil || service_name.length ==0
last_error="No Service Name"
return false
end
yam_file_name = SysConfig.CidDir + "/services/" + service_name + "/config.yaml"
if File.exists?(yam_file_name) == false
return false # return failed(yam_file_name,"No such configuration:","Load Service")
end
yaml_file = File.open(yam_file_name)
# managed_service = YAML::load( yaml_file)
managed_service = ManagedService.from_yaml(yaml_file,@engines_api)
if managed_service == nil
return false # return EnginsOSapiResult.failed(yam_file_name,"Fail to Load configuration:","Load Service")
end
return managed_service
rescue Exception=>e
if service_name != nil
if managed_service !=nil
managed_service.last_error=( "Failed To get Managed Engine " + service_name + " " + e.to_s)
log_error(managed_service.last_error)
end
else
log_error("nil Service Name")
end
log_exception(e)
return false
end
end
def getManagedServices()
begin
ret_val=Array.new
Dir.entries(SysConfig.CidDir + "/services/").each do |contdir|
yfn = SysConfig.CidDir + "/services/" + contdir + "/config.yaml"
if File.exists?(yfn) == true
yf = File.open(yfn)
managed_service = ManagedService.from_yaml(yf,@engines_api)
if managed_service
ret_val.push(managed_service)
end
yf.close
end
end
return ret_val
rescue Exception=>e
log_exception(e)
return false
end
end
def list_managed_engines
clear_error
ret_val=Array.new
begin
Dir.entries(SysConfig.CidDir + "/containers/").each do |contdir|
yfn = SysConfig.CidDir + "/containers/" + contdir + "/config.yaml"
if File.exists?(yfn) == true
ret_val.push(contdir)
end
end
rescue Exception=>e
log_exception(e)
return ret_val
end
return ret_val
end
def list_managed_services
clear_error
ret_val=Array.new
begin
Dir.entries(SysConfig.CidDir + "/services/").each do |contdir|
yfn = SysConfig.CidDir + "/services/" + contdir + "/config.yaml"
if File.exists?(yfn) == true
ret_val.push(contdir)
end
end
rescue Exception=>e
log_exception(e)
return ret_val
end
return ret_val
end
def clear_container_var_run(container)
clear_error
begin
dir = container_state_dir(container)
#
#remove startup only
#latter have function to reset subs and other flags
if File.exists?(dir + "/startup_complete")
File.unlink(dir + "/startup_complete")
end
return true
rescue Exception=>e
log_exception(e)
return false
end
end
protected
def container_cid_file(container)
return SysConfig.CidDir + "/" + container.containerName + ".cid"
end
def container_state_dir(container)
return SysConfig.CidDir + "/" + container.ctype + "s/" + container.containerName
end
def container_log_dir container
return SysConfig.SystemLogRoot + "/" + container.ctype + "s/" + container.containerName
end
def run_system (cmd)
clear_error
begin
cmd = cmd + " 2>&1"
res= %x<#{cmd}>
SystemUtils.debug_output res
#FIXME should be case insensitive The last one is a pure kludge
#really need to get stderr and stdout separately
if $? == 0 && res.downcase.include?("error") == false && res.downcase.include?("fail") == false && res.downcase.include?("could not resolve hostname") == false && res.downcase.include?("unsuccessful") == false
return true
else
return res
end
rescue Exception=>e
log_exception(e)
return ret_val
end
end
def clear_error
@last_error = ""
end
def log_error(e_str)
@last_error = e_str
SystemUtils.log_output(e_str,10)
end
def log_exception(e)
e_str = e.to_s()
e.backtrace.each do |bt |
e_str += bt
end
log_error(e_str)
end
end #END of SystemApi
class DockerApi
attr_reader :last_error
def create_container container
clear_error
begin
commandargs = container_commandline_args(container)
commandargs = " run -d " + commandargs
SystemUtils.debug_output commandargs
retval = run_docker(commandargs,container)
return retval
rescue Exception=>e
container.last_error=("Failed To Create ")
log_exception(e)
return false
end
end
def start_container container
clear_error
begin
commandargs =" start " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def stop_container container
clear_error
begin
commandargs=" stop " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def pause_container container
clear_error
begin
commandargs = " pause " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def image_exists? (image_name)
cmd= "docker images -q " + image_name
res = SystemUtils.run_system(cmd)
if res.length >0
return true
else
return false
end
end
def unpause_container container
clear_error
begin
commandargs=" unpause " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def ps_container container
clear_error
begin
commandargs=" top " + container.containerName + " axl"
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def signal_container_process(pid,signal,container)
clear_error
commandargs=" exec " + container.containerName + " kill -" + signal + " " + pid.to_s
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
def logs_container container
clear_error
begin
commandargs=" logs " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def inspect_container container
clear_error
begin
commandargs=" inspect " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def destroy_container container
clear_error
begin
commandargs= " rm " + container.containerName
ret_val = run_docker(commandargs,container)
rescue Exception=>e
container.last_error=( "Failed To Destroy " + e.to_s)
log_exception(e)
return false
end
end
def delete_image container
clear_error
begin
commandargs= " rmi " + container.image
ret_val = run_docker(commandargs,container)
return ret_val
rescue Exception=>e
container.last_error=( "Failed To Delete " + e.to_s)
log_exception(e)
return false
end
end
def docker_exec(container,command,args)
run_args = "exec " + container.containerName + " " + command + " " + args
return run_docker(run_args,container)
end
def run_docker (args,container)
clear_error
require 'open3'
SystemUtils.debug_output(args)
res = String.new
error_mesg = String.new
begin
container.last_result=( "")
Open3.popen3("docker " + args ) do |stdin, stdout, stderr, th|
oline = String.new
stderr_is_open=true
begin
stdout.each { |line|
line = line.gsub(/\\\"/,"")
oline = line
res += line.chop
# p :lne_by_line
# p line
if stderr_is_open
error_mesg += stderr.read_nonblock(256)
end
}
rescue Errno::EIO
res += oline.chop
SystemUtils.debug_output(oline)
error_mesg += stderr.read_nonblock(256)
rescue IO::WaitReadable
retry
rescue EOFError
if stdout.closed? == false
stderr_is_open = false
retry
elsif stderr.closed? == false
error_mesg += stderr.read_nonblock(1000)
container.last_result=( res)
container.last_error=( error_mesgs)
else
container.last_result=( res)
container.last_error=( error_mesgs)
end
end
@last_error=error_mesg
if error_mesg.include?("Error")
container.last_error=(error_mesg)
return false
else
container.last_error=("")
end
#
# if res.start_with?("[") == true
# res = res +"]"
# end
if res != nil && res.end_with?(']') == false
res+=']'
end
container.last_result=(res)
return true
end
rescue Exception=>e
@last_error=error_mesg + e.to_s
container.last_result=(res)
container.last_error=(error_mesg + e.to_s)
log_exception(e)
return false
end
return true
end
def get_envionment_options(container)
e_option =String.new
if(container.environments)
container.environments.each do |environment|
if environment != nil
e_option = e_option + " -e " + environment.name + "=" + '"' + environment.value + '"'
end
end
end
return e_option
rescue Exception=>e
log_exception(e)
return e.to_s
end
def get_port_options(container)
eportoption = String.new
if(container.eports )
container.eports.each do |eport|
if eport != nil
eportoption = eportoption + " -p "
if eport.external != nil && eport.external >0
eportoption = eportoption + eport.external.to_s + ":"
end
eportoption = eportoption + eport.port.to_s
if eport.proto_type == nil
eport.proto_type=('tcp')
end
eportoption = eportoption + "/"+ eport.proto_type + " "
end
end
end
return eportoption
rescue Exception=>e
log_exception(e)
return e.to_s
end
def container_commandline_args(container)
clear_error
begin
envionment_options = get_envionment_options( container)
port_options = get_port_options( container)
volume_option = get_volume_option( container)
if container.conf_self_start == false
start_cmd=" /bin/bash /home/init.sh"
else
start_cmd=" "
end
commandargs = "-h " + container.hostName + \
envionment_options + \
" --memory=" + container.memory.to_s + "m " +\
volume_option + " " +\
port_options +\
" --cidfile " + SysConfig.CidDir + "/" + container.containerName + ".cid " +\
"--name " + container.containerName + \
" -t " + container.image + " " +\
start_cmd
return commandargs
rescue Exception=>e
log_exception(e)
return e.to_s
end
end
def get_volume_option(container)
clear_error
begin
#System
volume_option = SysConfig.timeZone_fileMapping #latter this will be customised
volume_option += " -v " + container_state_dir(container) + "/run:/engines/var/run:rw "
# if container.ctype == "service"
# volume_option += " -v " + container_log_dir(container) + ":/var/log:rw "
incontainer_logdir = get_container_logdir(container)
volume_option += " -v " + container_log_dir(container) + ":/" + incontainer_logdir + ":rw "
if incontainer_logdir !="/var/log" && incontainer_logdir !="/var/log/"
volume_option += " -v " + container_log_dir(container) + "/vlog:/var/log/:rw"
end
#end
#container specific
if(container.volumes)
container.volumes.each_value do |volume|
if volume !=nil
if volume.localpath !=nil
volume_option = volume_option.to_s + " -v " + volume.localpath.to_s + ":/" + volume.remotepath.to_s + ":" + volume.mapping_permissions.to_s
end
end
end
end
return volume_option
rescue Exception=>e
log_exception(e)
return false
end
end
def get_container_logdir(container)
clear_error
if container.framework == nil || container.framework.length ==0
return "/var/log"
end
container_logdetails_file_name = false
framework_logdetails_file_name = SysConfig.DeploymentTemplates + "/" + container.framework + "/home/LOG_DIR"
SystemUtils.debug_output(framework_logdetails_file_name)
if File.exists?(framework_logdetails_file_name )
container_logdetails_file_name = framework_logdetails_file_name
else
container_logdetails_file_name = SysConfig.DeploymentTemplates + "/global/home/LOG_DIR"
end
SystemUtils.debug_output(container_logdetails_file_name)
begin
container_logdetails = File.read(container_logdetails_file_name)
rescue
container_logdetails = "/var/log"
end
return container_logdetails
rescue Exception=>e
log_exception(e)
return false
end
protected
def container_state_dir(container)
return SysConfig.CidDir + "/" + container.ctype + "s/" + container.containerName
end
def container_log_dir container
return SysConfig.SystemLogRoot + "/" + container.ctype + "s/" + container.containerName
end
def clear_error
@last_error = ""
end
def log_exception(e)
e_str = e.to_s()
n=0
e.backtrace.each do |bt |
e_str += bt
if n >10
break
end
++n
end
@last_error = e_str
SystemUtils.log_output(e_str,10)
end
end#END of DockerApi
def initialize
@docker_api = DockerApi.new
@system_api = SystemApi.new(self) #will change to to docker_api and not self
end
attr_reader :last_error
def software_service_definition(params)
sm = loadServiceManager
return sm.software_service_definition(params)
end
def add_domain(params)
return @system_api.add_domain(params)
end
#
# def remove_containers_cron_list(containerName)
# p :remove_containers_cron
# if @system_api.remove_containers_cron_list(containerName)
# cron_service = loadManagedService("cron")
# return @system_api.rebuild_crontab(cron_service)
# else
# return false
# end
# end
#
# def rebuild_crontab(cron_service)
# #acutally a rebuild (or resave) as hadh already removed from consumer list
# p :rebuild_crontab
# return @system_api.rebuild_crontab(cron_service)
# end
def remove_domain(params)
return @system_api.rm_domain(params[:domain_name],@system_api)
end
def update_domain(old_domain,params)
return @system_api.update_domain(old_domain,params,@system_api)
end
def signal_service_process(pid,sig,name)
container = loadManagedService(name)
return @docker_api.signal_container_process(pid,sig,container)
end
def start_container(container)
if @docker_api.start_container(container) == true
return true
end
return false
end
def inspect_container(container)
return @docker_api.inspect_container(container)
end
def stop_container(container)
if @docker_api.stop_container(container) == true
return true
end
return false
end
def pause_container(container)
return @docker_api.pause_container(container)
end
def unpause_container(container)
return @docker_api.unpause_container(container)
end
def ps_container(container)
return @docker_api.ps_container(container)
end
def logs_container(container)
return @docker_api.logs_container(container)
end
def add_monitor(site_hash)
return @system_api.add_monitor(site_hash)
end
def rm_monitor(site_hash)
return @system_api.rm_monitor(site_hash)
end
def save_container(container)
return @system_api.save_container(container)
end
def save_blueprint(blueprint,container)
return @system_api.save_blueprint(blueprint,container)
end
def load_blueprint(container)
return @system_api.load_blueprint(container)
end
def add_volume(site_hash)
return @system_api.add_volume(site_hash)
end
def rm_volume(site_hash)
return @system_api.rm_volume(site_hash)
end
def remove_self_hosted_domain(domain_name)
return @system_api.remove_self_hosted_domain(domain_name)
end
def add_self_hosted_domain(params)
return @system_api.add_self_hosted_domain(params)
end
def list_self_hosted_domains()
return @system_api.list_self_hosted_domains()
end
def update_self_hosted_domain(old_domain_name, params)
@system_api.update_self_hosted_domain(old_domain_name, params)
end
def load_system_preferences
return @system_api.load_system_preferences
end
def save_system_preferences
return @system_api.save_system_preferences
end
def register_site(site_hash)
return @system_api.register_site(site_hash)
end
def deregister_site(site_hash)
return @system_api.deregister_site(site_hash)
end
def hash_to_site_str(site_hash)
return @system_api.hash_to_site_str(site_hash)
end
def deregister_dns(top_level_hostname)
return @system_api.deregister_dns(top_level_hostname)
end
def register_dns(top_level_hostname,ip_addr_str)
return @system_api.register_dns(top_level_hostname,ip_addr_str)
end
def get_container_memory_stats(container)
return @system_api.get_container_memory_stats(container)
end
def set_engine_hostname_details(container,params)
return @system_api.set_engine_hostname_details(container,params)
end
def image_exists?(containerName)
imageName = containerName +"/deploy"
return @docker_api.image_exists?(imageName)
rescue Exception=>e
log_execption(e)
return false
end
def list_attached_services_for(objectName,identifier)
sm = loadServiceManager()
return sm.list_attached_services_for(objectName,identifier)
rescue Exception=>e
log_exception e
# object_name = object.class.name.split('::').last
#
# case object_name
# when "ManagedEngine"
# retval = Hash.new
#
# retval[:database] = object.databases
# retval[:volume] = object.volumes
# retval[:cron] = object.cron_job_list
#
# return retval
#
# #list services
# # which includes volumes databases cron
# end
# p "missed object name"
# p object_name
#
# service_manager = loadServiceManager()
#
# if service_manager !=nil
# return service_manager.attached_services(object)
#
# end
# return false
end
def list_avail_services_for(object)
objectname = object.class.name.split('::').last
services = load_avail_services_for(objectname)
subservices = load_avail_component_services_for(object)
retval = Hash.new
retval[:services] = services
retval[:subservices] = subservices
return retval
rescue Exception=>e
log_exception e
end
def load_software_service(params)
sm = loadServiceManager()
p :load_software_service
p params
service_container = sm.get_software_service_container_name(params)
params[:service_container_name] = service_container
p :service_container_name
p service_container
service = loadManagedService(service_container)
if service == nil
return nil
end
return service
rescue Exception=>e
log_exception e
end
def setup_email_params(params)
arg="smarthost_hostname=" + params[:smarthost_hostname] \
+ ":smarthost_username=" + params[:smarthost_username]\
+ ":smarthost_password=" + params[:smarthost_password]\
+ ":mail_name=smtp." + params[:default_domain]
container=loadManagedService("smtp")
return @docker_api.docker_exec(container,SysConfig.SetupParamsScript,arg)
rescue Exception=>e
log_exception(e)
end
def set_database_password(container_name,params)
arg = "mysql_password=" + params[:mysql_password] +":" \
+ "server=" + container_name + ":" \
+ "psql_password=" + params[:psql_password] #Need two args
if container_name
server_container = loadManagedService(container_name)
return @docker_api.docker_exec(server_container,SysConfig.SetupParamsScript,arg)
end
return true
rescue Exception=>e
log_exception(e)
return false
end
def attach_service(service_hash)
#parent_engine
#service_type
#service_provider
#name
#service hash from fields in Software Service Definition for that service
if service_hash == nil
p :attached_Service_passed_nil
return false
end
service = load_software_service(service_hash)
p :attaching_to_service
p service
if service !=nil && service != false
return service.add_consumer(service_hash)
end
last_error = "Failed to attach Service: " + last_error
return false
rescue Exception=>e
log_exception e
end
def detach_service(params)
return false
end
def loadServiceManager()
if @service_manager == nil
@service_manager = ServiceManager.new()
return @service_manager
end
return @service_manager
end
def load_service_definition(filename)
yaml_file = File.open(filename)
p :open
p filename
return SoftwareServiceDefinition.from_yaml(yaml_file)
rescue
rescue Exception=>e
log_exception e
end
def load_avail_services_for(objectname)
p :load_avail_services_for
p objectname
retval = Array.new
dir = SysConfig.ServiceMapTemplateDir + "/" + objectname
p :dir
p dir
if Dir.exists?(dir)
Dir.foreach(dir) do |service_dir_entry|
begin
if service_dir_entry.start_with?(".") == true
next
end
p :service_dir_entry
p service_dir_entry
if service_dir_entry.end_with?(".yaml")
service = load_service_definition(dir + "/" + service_dir_entry)
if service != nil
p :service_as_serivce
p service
p :as_hash
p service.to_h
p :as_yaml
p service.to_yaml()
retval.push(service.to_h)
end
end
rescue Exception=>e
log_exception e
next
end
end
end
p objectname
p retval
return retval
rescue Exception=>e
log_exception e
end
def load_avail_component_services_for(object)
retval = Hash.new
if object.is_a?(ManagedEngine)
if object.volumes.count >0
p :loading_vols
volumes = load_avail_services_for("Volume") #Array of hashes
retval[:volume] = volumes
end
if object.databases.count >0
databases = load_avail_services_for("Database") #Array of hashes
retval[:database] = databases
end
return retval
else
return nil
end
rescue Exception=>e
log_exception e
end
def set_engine_runtime_properties(params)
#FIX ME also need to deal with Env Variables
engine_name = params[:engine_name]
engine = loadManagedEngine(engine_name)
if engine.is_a?(EnginesOSapiResult) == true
last_error = engine.result_mesg
return false
end
if engine.is_active == true
last_error="Container is active"
return false
end
if params.has_key?(:memory)
if params[:memory] == engine.memory
last_error="No Change in Memory Value"
return false
end
if engine.update_memory(params[:memory]) == false
last_error= engine.last_error
return false
end
end
if engine.has_container? == true
if destroy_container(engine) == false
last_error= engine.last_error
return false
end
end
if create_container(engine) == false
last_error= engine.last_error
return false
end
return true
end
def set_engine_network_properties (engine, params)
return @system_api.set_engine_network_properties(engine,params)
end
def get_system_load_info
return @system_api.get_system_load_info
end
def get_system_memory_info
return @system_api.get_system_memory_info
end
def getManagedEngines
return @system_api.getManagedEngines
end
def loadManagedEngine(engine_name)
return @system_api.loadManagedEngine(engine_name)
end
def loadManagedService(service_name)
return @system_api.loadManagedService(service_name)
end
def getManagedServices
return @system_api.getManagedServices
end
def list_domains
return @system_api.list_domains
end
def list_managed_engines
return @system_api.list_managed_engines
end
def list_managed_services
return @system_api.list_managed_services
end
def destroy_container(container)
clear_error
begin
if @docker_api.destroy_container(container) != false
container.deregister_registered
@system_api.destroy_container(container) #removes cid file
return true
else
return false
end
rescue Exception=>e
container.last_error=( "Failed To Destroy " + e.to_s)
log_exception(e)
return false
end
end
def delete_image(container)
begin
clear_error
if @docker_api.delete_image(container) == true
res = @system_api.delete_container_configs(container)
return res
else
return false
end
rescue Exception=>e
container.last_error=( "Failed To Delete " + e.to_s)
log_exception(e)
return false
end
end
def run_system(cmd)
clear_error
begin
cmd = cmd + " 2>&1"
res= %x<#{cmd}>
SystemUtils.debug_output res
#FIXME should be case insensitive The last one is a pure kludge
#really need to get stderr and stdout separately
if $? == 0 && res.downcase.include?("error") == false && res.downcase.include?("fail") == false && res.downcase.include?("could not resolve hostname") == false && res.downcase.include?("unsuccessful") == false
return true
else
@last_error = res
SystemUtils.debug_output res
return false
end
rescue Exception=>e
log_exception(e)
return ret_val
end
end
def run_volume_builder(container,username)
clear_error
begin
if File.exists?(SysConfig.CidDir + "/volbuilder.cid") == true
command = "docker stop volbuilder"
run_system(command)
command = "docker rm volbuilder"
run_system(command)
File.delete(SysConfig.CidDir + "/volbuilder.cid")
end
mapped_vols = get_volbuild_volmaps container
command = "docker run --name volbuilder --memory=20m -e fw_user=" + username + " --cidfile /opt/engines/run/volbuilder.cid " + mapped_vols + " -t engines/volbuilder /bin/sh /home/setup_vols.sh "
SystemUtils.debug_output command
run_system(command)
command = "docker stop volbuilder; docker rm volbuilder"
if File.exists?(SysConfig.CidDir + "/volbuilder.cid") == true
File.delete(SysConfig.CidDir + "/volbuilder.cid")
end
res = run_system(command)
if res != true
log_error(res)
return false
end
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def create_container(container)
clear_error
begin
if @system_api.clear_cid(container) != false
@system_api.clear_container_var_run(container)
if @docker_api.create_container(container) == true
return @system_api.create_container(container)
end
else
return false
end
rescue Exception=>e
container.last_error=("Failed To Create " + e.to_s)
log_exception(e)
return false
end
end
def rebuild_image(container)
clear_error
begin
params=Hash.new
params[:engine_name] = container.containerName
params[:domain_name] = container.domainName
params[:host_name] = container.hostName
params[:env_variables] = container.environments
params[:http_protocol] = container.protocol
params[:repository_url] = container.repo
params[:software_environment_variables] = container.environments
# custom_env=params
# @http_protocol = params[:http_protocol] = container.
builder = EngineBuilder.new(params, self)
return builder.rebuild_managed_container(container)
rescue Exception=>e
log_exception(e)
return false
end
end
# @container_name = params[:engine_name]
# @domain_name = params[:domain_name]
# @hostname = params[:host_name]
# custom_env= params[:software_environment_variables]
# # custom_env=params
# @core_api = core_api
# @http_protocol = params[:http_protocol]
# p params
# @repoName= params[:repository_url]
# @cron_job_list = Array.new
# @build_name = File.basename(@repoName).sub(/\.git$/,"")
# @workerPorts=Array.new
# @webPort=8000
# @vols=Array.new
#FIXME Kludge
def get_container_network_metrics(containerName)
begin
ret_val = Hash.new
clear_error
cmd = "docker exec " + containerName + " netstat --interfaces -e | grep bytes |head -1 | awk '{ print $2 " " $6}' 2>&1"
res= %x<#{cmd}>
vals = res.split("bytes:")
if vals.count < 2
if vals[1] != nil && vals[2] != nil
ret_val[:in] = vals[1].chop
ret_val[:out] = vals[2].chop
else
ret_val[:in] ="-1"
ret_val[:out] ="-1"
end
else
ret_val[:in] ="-1"
ret_val[:out] ="-1"
end
return ret_val
rescue Exception=>e
log_exception(e)
ret_val[:in] = -1
ret_val[:out] = -1
return ret_val
end
end
def is_startup_complete container
clear_error
begin
return @system_api.is_startup_complete(container)
rescue Exception=>e
log_exception(e)
return false
end
end
protected
def get_volbuild_volmaps container
begin
clear_error
state_dir = SysConfig.CidDir + "/containers/" + container.containerName + "/run/"
log_dir = SysConfig.SystemLogRoot + "/containers/" + container.containerName
volume_option = " -v " + state_dir + ":/client/state:rw "
volume_option += " -v " + log_dir + ":/client/log:rw "
if container.volumes != nil
container.volumes.each_value do |vol|
SystemUtils.debug_output vol
volume_option += " -v " + vol.localpath.to_s + ":/dest/fs:rw"
end
end
volume_option += " --volumes-from " + container.containerName
return volume_option
rescue Exception=>e
log_exception(e)
return false
end
end
def clear_error
@last_error = ""
end
def log_exception(e)
e_str = e.to_s()
e.backtrace.each do |bt |
e_str += bt
end
@last_error = e_str
SystemUtils.log_output(e_str,10)
end
end
instance of last_error not local varia
class EnginesCore
require "/opt/engines/lib/ruby/SystemUtils.rb"
require "/opt/engines/lib/ruby/system/DNSHosting.rb"
class SystemApi
attr_reader :last_error
def initialize(api)
@engines_api = api
end
# def
# @docker_api.update_self_hosted_domain( params)
# end
def create_container(container)
clear_error
begin
cid = read_container_id(container)
container.container_id=(cid)
if save_container(container) == true
return register_dns_and_site(container)
else
return false #save_container false
end
rescue Exception=>e
container.last_error=("Failed To Create " + e.to_s)
log_exception(e)
return false
end
end
def register_dns_and_site(container)
if container.conf_register_dns == true
if container.register_dns() == true
if container.conf_register_site() == true
if container.register_site == true
return true
else
return false #failed to register
end
end # if reg site
else
return false #reg dns failed
end
end #if reg dns
return true
end
def reload_dns
dns_pid = File.read(SysConfig.NamedPIDFile)
p :kill_HUP_TO_DNS
p dns_pid.to_s
return @engines_api.signal_service_process(dns_pid.to_s,'HUP','dns')
rescue Exception=>e
log_exception(e)
return false
end
def restart_nginx_process
begin
clear_error
cmd= "docker exec nginx ps ax |grep \"nginx: master\" |grep -v grep |awk '{ print $1}'"
SystemUtils.debug_output(cmd)
nginxpid= %x<#{cmd}>
SystemUtils.debug_output(nginxpid)
#FIXME read from pid file this is just silly
docker_cmd = "docker exec nginx kill -HUP " + nginxpid.to_s
SystemUtils.debug_output(docker_cmd)
if nginxpid.to_s != "-"
return run_system(docker_cmd)
else
return false
end
rescue Exception=>e
log_exception(e)
return false
end
end
def clear_cid(container)
container.container_id=(-1)
end
def is_startup_complete container
clear_error
begin
runDir=container_state_dir(container)
if File.exists?(runDir + "/startup_complete")
return true
else
return false
end
rescue Exception=>e
log_exception(e)
return false
end
end
def clear_cid_file container
clear_error
begin
cidfile = container_cid_file(container)
if File.exists? cidfile
File.delete cidfile
end
return true
rescue Exception=>e
container.last_error=("Failed To Create " + e.to_s)
log_exception(e)
return false
end
end
def read_container_id(container)
clear_error
begin
cidfile = container_cid_file(container)
if File.exists?(cidfile)
cid = File.read(cidfile)
return cid
end
rescue Exception=>e
log_exception(e)
return "-1";
end
end
def destroy_container container
clear_error
begin
container.container_id=(-1)
if File.exists?( container_cid_file(container)) ==true
File.delete( container_cid_file(container))
end
return true #File may or may not exist
rescue Exception=>e
container.last_error=( "Failed To Destroy " + e.to_s)
log_exception(e)
return false
end
end
def register_dns(top_level_hostname,ip_addr_str) # no Gem made this simple (need to set tiny TTL) and and all used nsupdate anyhow
clear_error
begin
fqdn_str = top_level_hostname + "." + SysConfig.internalDomain
#FIXME need unique name for temp file
dns_cmd_file_name="/tmp/.dns_cmd_file"
dns_cmd_file = File.new(dns_cmd_file_name,"w+")
dns_cmd_file.puts("server " + SysConfig.defaultDNS)
dns_cmd_file.puts("update delete " + fqdn_str)
dns_cmd_file.puts("send")
dns_cmd_file.puts("update add " + fqdn_str + " 30 A " + ip_addr_str)
dns_cmd_file.puts("send")
dns_cmd_file.close
cmd_str = "nsupdate -k " + SysConfig.ddnsKey + " " + dns_cmd_file_name
retval = run_system(cmd_str)
#File.delete(dns_cmd_file_name)
return retval
rescue Exception=>e
log_exception(e)
return false
end
end
def delete_container_configs(container)
clear_error
begin
stateDir = container_state_dir(container) + "/config.yaml"
File.delete(stateDir)
cidfile = SysConfig.CidDir + "/" + container.containerName + ".cid"
if File.exists?(cidfile)
File.delete(cidfile)
end
return true
rescue Exception=>e
container.last_error=( "Failed To Delete " )
log_exception(e)
return false
end
end
def deregister_dns(top_level_hostname)
clear_error
begin
fqdn_str = top_level_hostname + "." + SysConfig.internalDomain
dns_cmd_file_name="/tmp/.top_level_hostname.dns_cmd_file"
dns_cmd_file = File.new(dns_cmd_file_name,"w")
dns_cmd_file.puts("server " + SysConfig.defaultDNS)
dns_cmd_file.puts("update delete " + fqdn_str)
dns_cmd_file.puts("send")
dns_cmd_file.close
cmd_str = "nsupdate -k " + SysConfig.ddnsKey + " " + dns_cmd_file_name
retval = run_system(cmd_str)
File.delete(dns_cmd_file_name)
return retval
rescue Exception=>e
log_exception(e)
return false
end
end
def get_cert_name(fqdn)
if File.exists?(SysConfig.NginxCertDir + "/" + fqdn + ".crt")
return fqdn
else
return SysConfig.NginxDefaultCert
end
end
def register_site(site_hash)
clear_error
begin
proto = site_hash[:proto]
if proto =="http https"
template_file=SysConfig.HttpHttpsNginxTemplate
elsif proto =="http"
template_file=SysConfig.HttpNginxTemplate
elsif proto == "https"
template_file=SysConfig.HttpsNginxTemplate
elsif proto == nil
p "Proto nil"
template_file=SysConfig.HttpHttpsNginxTemplate
else
p "Proto" + proto + " unknown"
template_file=SysConfig.HttpHttpsNginxTemplate
end
file_contents=File.read(template_file)
site_config_contents = file_contents.sub("FQDN",site_hash[:fqdn])
site_config_contents = site_config_contents.sub("PORT",site_hash[:port])
site_config_contents = site_config_contents.sub("SERVER",site_hash[:name]) #Not HostName
if proto =="https" || proto =="http https"
site_config_contents = site_config_contents.sub("CERTNAME",get_cert_name(site_hash[:fqdn])) #Not HostName
site_config_contents = site_config_contents.sub("CERTNAME",get_cert_name(site_hash[:fqdn])) #Not HostName
end
if proto =="http https"
#Repeat for second entry
site_config_contents = site_config_contents.sub("FQDN",site_hash[:fqdn])
site_config_contents = site_config_contents.sub("PORT",site_hash[:port])
site_config_contents = site_config_contents.sub("SERVER",site_hash[:name]) #Not HostName
end
site_filename = get_site_file_name(site_hash)
site_file = File.open(site_filename,'w')
site_file.write(site_config_contents)
site_file.close
result = restart_nginx_process()
return result
rescue Exception=>e
log_exception(e)
return false
end
end
def hash_to_site_str(site_hash)
clear_error
begin
return site_hash[:name].to_s + ":" + site_hash[:fqdn].to_s + ":" + site_hash[:port].to_s + ":" + site_hash[:proto].to_s
rescue Exception=>e
log_exception(e)
return false
end
end
def get_site_file_name(site_hash)
file_name = String.new
proto = site_hash[:proto]
p :proto
p proto
if proto == "http https"
proto ="http_https"
end
file_name=SysConfig.NginxSiteDir + "/" + proto + "_" + site_hash[:fqdn] + ".site"
return file_name
end
def deregister_site(site_hash)
clear_error
begin
# # ssh_cmd=SysConfig.rmSiteCmd + " \"" + hash_to_site_str(site_hash) + "\""
# #FIXME Should write site conf file via template (either standard or supplied with blueprint)
# ssh_cmd = "/opt/engines/scripts/nginx/rmsite.sh " + " \"" + hash_to_site_str(site_hash) + "\""
# SystemUtils.debug_output ssh_cmd
# result = run_system(ssh_cmd)
site_filename = get_site_file_name(site_hash)
if File.exists?(site_filename)
File.delete(site_filename)
end
result = restart_nginx_process()
return result
rescue Exception=>e
log_exception(e)
return false
end
end
def add_monitor(site_hash)
clear_error
begin
ssh_cmd=SysConfig.addSiteMonitorCmd + " \"" + hash_to_site_str(site_hash) + " \""
return run_system(ssh_cmd)
rescue Exception=>e
log_exception(e)
return false
end
end
def rm_monitor(site_hash)
clear_error
begin
ssh_cmd=SysConfig.rmSiteMonitorCmd + " \"" + hash_to_site_str(site_hash) + " \""
return run_system(ssh_cmd)
rescue Exception=>e
log_exception(e)
return false
end
end
def save_container(container)
clear_error
begin
#FIXME
api = container.core_api
container.core_api = nil
serialized_object = YAML::dump(container)
container.core_api = api
stateDir=SysConfig.CidDir + "/" + container.ctype + "s/" + container.containerName
if File.directory?(stateDir) ==false
Dir.mkdir(stateDir)
Dir.mkdir(stateDir + "/run")
end
log_dir = container_log_dir(container)
if File.directory?(log_dir) ==false
Dir.mkdir(log_dir)
end
statefile=stateDir + "/config.yaml"
# BACKUP Current file with rename
if File.exists?(statefile)
statefile_bak = statefile + ".bak"
File.rename( statefile, statefile_bak)
end
f = File.new(statefile,File::CREAT|File::TRUNC|File::RDWR, 0644)
f.puts(serialized_object)
f.close
return true
rescue Exception=>e
container.last_error=( "load error")
log_exception(e)
return false
end
end
def save_blueprint(blueprint,container)
clear_error
begin
if blueprint != nil
puts blueprint.to_s
else
return false
end
stateDir=container_state_dir(container)
if File.directory?(stateDir) ==false
Dir.mkdir(stateDir)
end
statefile=stateDir + "/blueprint.json"
f = File.new(statefile,File::CREAT|File::TRUNC|File::RDWR, 0644)
f.write(blueprint.to_json)
f.close
rescue Exception=>e
log_exception(e)
return false
end
end
def load_blueprint(container)
clear_error
begin
stateDir=container_state_dir(container)
if File.directory?(stateDir) ==false
return false
end
statefile=stateDir + "/blueprint.json"
if File.exists?(statefile)
f = File.new(statefile,"r")
blueprint = JSON.parse( f.read())
f.close
else
return false
end
return blueprint
rescue Exception=>e
log_exception(e)
return false
end
end
def save_domains(domains)
clear_error
begin
domain_file = File.open(SysConfig.DomainsFile,"w")
domain_file.write(domains.to_yaml())
domain_file.close
return true
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
end
def load_domains
clear_error
begin
if File.exists?(SysConfig.DomainsFile) == false
p :creating_new_domain_list
self_hosted_domain_file = File.open(SysConfig.DomainsFile,"w")
self_hosted_domain_file.close
return Hash.new
else
self_hosted_domain_file = File.open(SysConfig.DomainsFile,"r")
end
domains = YAML::load( self_hosted_domain_file )
self_hosted_domain_file.close
if domains == false
p :domains_error_in_load
return Hash.new
end
return domains
rescue Exception=>e
domains = Hash.new
p "failed_to_load_domains"
SystemUtils.log_exception(e)
return domains
end
end
def list_domains
domains = load_domains
return domains
rescue Exception=>e
domains = Hash.new
p :error_listing_domains
SystemUtils.log_exception(e)
return domains
end
def add_domain(params)
clear_error
domain= params[:domain_name]
if params[:self_hosted]
add_self_hosted_domain params
end
p :add_domain
p params
domains = load_domains()
domains[params[:domain_name]] = params
if save_domains(domains)
return true
end
p :failed_add_hosted_domains
return false
rescue Exception=>e
SystemUtils.log_exception(e)
return false
end
def rm_domain(domain,system_api)
clear_error
domains = load_domains
if domains.has_key?(domain)
domains.delete(domain)
save_domains(domains)
system_api.reload_dns
end
end
def update_domain(old_domain_name, params,system_api)
clear_error
begin
domains = load_domains()
domains.delete(old_domain_name)
domains[params[:domain_name]] = params
save_domains(domains)
if params[:self_hosted]
add_self_hosted_domain params
rm_self_hosted_domain(old_domain_name)
system_api.reload_dns
end
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def add_self_hosted_domain params
clear_error
begin
p :Lachlan_Sent_parrams
p params
return DNSHosting.add_hosted_domain(params,self)
# if ( DNSHosting.add_hosted_domain(params,self) == false)
# return false
# end
#
# domains = load_self_hosted_domains()
# domains[params[:domain_name]] = params
#
return save_self_hosted_domains(domains)
rescue Exception=>e
log_exception(e)
return false
end
end
def list_self_hosted_domains()
clear_error
begin
return DNSHosting.load_self_hosted_domains()
# domains = load_self_hosted_domains()
# p domains
# return domains
rescue Exception=>e
log_exception(e)
return false
end
end
def update_self_hosted_domain(old_domain_name, params)
clear_error
begin
domains = load_self_hosted_domains()
domains.delete(old_domain_name)
domains[params[:domain_name]] = params
save_self_hosted_domains(domains)
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def remove_self_hosted_domain( domain_name)
clear_error
begin
return DNSHosting.rm_hosted_domain(domain_name,self)
rescue Exception=>e
log_exception(e)
return false
end
end
def save_system_preferences
clear_error
begin
SystemUtils.debug_output :pdsf
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def load_system_preferences
clear_error
begin
SystemUtils.debug_output :psdfsd
rescue Exception=>e
log_exception(e)
return false
end
end
def get_container_memory_stats(container)
clear_error
ret_val= Hash.new
begin
if container && container.container_id == nil || container.container_id == '-1'
container_id = read_container_id(container)
container.container_id=(container_id)
end
if container && container.container_id != nil && container.container_id != '-1'
path = "/sys/fs/cgroup/memory/docker/" + container.container_id + "/"
if Dir.exists?(path)
ret_val.store(:maximum , File.read(path + "/memory.max_usage_in_bytes"))
ret_val.store(:current , File.read(path + "/memory.usage_in_bytes"))
ret_val.store(:limit , File.read(path + "/memory.limit_in_bytes"))
else
p :no_cgroup_file
p path
ret_val.store(:maximum , "No Container")
ret_val.store(:current , "No Container")
ret_val.store(:limit , "No Container")
end
end
return ret_val
rescue Exception=>e
log_exception(e)
ret_val.store(:maximum , e.to_s)
ret_val.store(:current , "NA")
ret_val.store(:limit , "NA")
return ret_val
end
end
def set_engine_network_properties(engine, params)
clear_error
begin
engine_name = params[:engine_name]
protocol = params[:http_protocol]
if protocol.nil?
p params
return false
end
SystemUtils.debug_output("Changing protocol to _" + protocol + "_")
if protocol.include?("HTTPS only")
engine.enable_https_only
elsif protocol.include?("HTTP only")
engine.enable_http_only
elsif protocol.include?("HTTPS and HTTP")
engine.enable_http_and_https
end
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def set_engine_hostname_details(container,params)
clear_error
begin
engine_name = params[:engine_name]
hostname = params[:host_name]
domain_name = params[:domain_name]
SystemUtils.debug_output("Changing Domainame to " + domain_name)
if container.hostName != hostname || container.domainName != domain_name
saved_hostName = container.hostName
saved_domainName = container.domainName
SystemUtils.debug_output("Changing Domainame to " + domain_name)
if container.set_hostname_details(hostname,domain_name) == true
nginx_service = EnginesOSapi.loadManagedService("nginx",self)
nginx_service.remove_consumer(container)
dns_service = EnginesOSapi.loadManagedService("dns",self)
dns_service.remove_consumer(container)
dns_service.add_consumer(container)
nginx_service.add_consumer(container)
save_container(container)
end
return true
end
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def get_system_memory_info
clear_error
ret_val = Hash.new
begin
proc_mem_info_file = File.open("/proc/meminfo")
proc_mem_info_file.each_line do |line|
values=line.split(" ")
case values[0]
when "MemTotal:"
ret_val[:total] = values[1]
when "MemFree:"
ret_val[:free]= values[1]
when "Buffers:"
ret_val[:buffers]= values[1]
when "Cached:"
ret_val[:file_cache]= values[1]
when "Active:"
ret_val[:active]= values[1]
when "Inactive:"
ret_val[:inactive]= values[1]
when "SwapTotal:"
ret_val[:swap_total]= values[1]
when "SwapFree:"
ret_val[:swap_free] = values[1]
end
end
return ret_val
rescue Exception=>e
log_exception(e)
ret_val[:total] = e.to_s
ret_val[:free] = -1
ret_val[:active] = -1
ret_val[:inactive] = -1
ret_val[:file_cache] = -1
ret_val[:buffers] = -1
ret_val[:swap_total] = -1
ret_val[:swap_free] = -1
return ret_val
end
end
def get_system_load_info
clear_error
ret_val = Hash.new
begin
loadavg_info = File.read("/proc/loadavg")
values = loadavg_info.split(" ")
ret_val[:one] = values[0]
ret_val[:five] = values[1]
ret_val[:fithteen] = values[2]
run_idle = values[3].split("/")
ret_val[:running] = run_idle[0]
ret_val[:idle] = run_idle[1]
rescue Exception=>e
log_exception(e)
ret_val[:one] = -1
ret_val[:five] = -1
ret_val[:fithteen] = -1
ret_val[:running] = -1
ret_val[:idle] = -1
return ret_val
rescue Exception=>e
log_exception(e)
return false
end
end
def getManagedEngines()
begin
ret_val=Array.new
Dir.entries(SysConfig.CidDir + "/containers/").each do |contdir|
yfn = SysConfig.CidDir + "/containers/" + contdir + "/config.yaml"
if File.exists?(yfn) == true
managed_engine = loadManagedEngine(contdir)
if managed_engine.is_a?(ManagedEngine)
ret_val.push(managed_engine)
else
log_error("failed to load " + yfn)
end
end
end
return ret_val
rescue Exception=>e
log_exception(e)
return false
end
end
def loadManagedEngine(engine_name)
if engine_name == nil || engine_name.length ==0
last_error="No Engine Name"
return false
end
begin
yam_file_name = SysConfig.CidDir + "/containers/" + engine_name + "/config.yaml"
if File.exists?(yam_file_name) == false
log_error("no such file " + yam_file_name )
return false # return failed(yam_file_name,"No such configuration:","Load Engine")
end
yaml_file = File.open(yam_file_name)
managed_engine = ManagedEngine.from_yaml( yaml_file,@engines_api)
if(managed_engine == nil || managed_engine == false)
p :from_yaml_returned_nil
return false # failed(yam_file_name,"Failed to Load configuration:","Load Engine")
end
return managed_engine
rescue Exception=>e
if engine_name != nil
if managed_engine !=nil
managed_engine.last_error=( "Failed To get Managed Engine " + engine_name + " " + e.to_s)
log_error(managed_engine.last_error)
end
else
log_error("nil Engine Name")
end
log_exception(e)
return false
end
end
def loadManagedService(service_name)
begin
if service_name == nil || service_name.length ==0
last_error="No Service Name"
return false
end
yam_file_name = SysConfig.CidDir + "/services/" + service_name + "/config.yaml"
if File.exists?(yam_file_name) == false
return false # return failed(yam_file_name,"No such configuration:","Load Service")
end
yaml_file = File.open(yam_file_name)
# managed_service = YAML::load( yaml_file)
managed_service = ManagedService.from_yaml(yaml_file,@engines_api)
if managed_service == nil
return false # return EnginsOSapiResult.failed(yam_file_name,"Fail to Load configuration:","Load Service")
end
return managed_service
rescue Exception=>e
if service_name != nil
if managed_service !=nil
managed_service.last_error=( "Failed To get Managed Engine " + service_name + " " + e.to_s)
log_error(managed_service.last_error)
end
else
log_error("nil Service Name")
end
log_exception(e)
return false
end
end
def getManagedServices()
begin
ret_val=Array.new
Dir.entries(SysConfig.CidDir + "/services/").each do |contdir|
yfn = SysConfig.CidDir + "/services/" + contdir + "/config.yaml"
if File.exists?(yfn) == true
yf = File.open(yfn)
managed_service = ManagedService.from_yaml(yf,@engines_api)
if managed_service
ret_val.push(managed_service)
end
yf.close
end
end
return ret_val
rescue Exception=>e
log_exception(e)
return false
end
end
def list_managed_engines
clear_error
ret_val=Array.new
begin
Dir.entries(SysConfig.CidDir + "/containers/").each do |contdir|
yfn = SysConfig.CidDir + "/containers/" + contdir + "/config.yaml"
if File.exists?(yfn) == true
ret_val.push(contdir)
end
end
rescue Exception=>e
log_exception(e)
return ret_val
end
return ret_val
end
def list_managed_services
clear_error
ret_val=Array.new
begin
Dir.entries(SysConfig.CidDir + "/services/").each do |contdir|
yfn = SysConfig.CidDir + "/services/" + contdir + "/config.yaml"
if File.exists?(yfn) == true
ret_val.push(contdir)
end
end
rescue Exception=>e
log_exception(e)
return ret_val
end
return ret_val
end
def clear_container_var_run(container)
clear_error
begin
dir = container_state_dir(container)
#
#remove startup only
#latter have function to reset subs and other flags
if File.exists?(dir + "/startup_complete")
File.unlink(dir + "/startup_complete")
end
return true
rescue Exception=>e
log_exception(e)
return false
end
end
protected
def container_cid_file(container)
return SysConfig.CidDir + "/" + container.containerName + ".cid"
end
def container_state_dir(container)
return SysConfig.CidDir + "/" + container.ctype + "s/" + container.containerName
end
def container_log_dir container
return SysConfig.SystemLogRoot + "/" + container.ctype + "s/" + container.containerName
end
def run_system (cmd)
clear_error
begin
cmd = cmd + " 2>&1"
res= %x<#{cmd}>
SystemUtils.debug_output res
#FIXME should be case insensitive The last one is a pure kludge
#really need to get stderr and stdout separately
if $? == 0 && res.downcase.include?("error") == false && res.downcase.include?("fail") == false && res.downcase.include?("could not resolve hostname") == false && res.downcase.include?("unsuccessful") == false
return true
else
return res
end
rescue Exception=>e
log_exception(e)
return ret_val
end
end
def clear_error
@last_error = ""
end
def log_error(e_str)
@last_error = e_str
SystemUtils.log_output(e_str,10)
end
def log_exception(e)
e_str = e.to_s()
e.backtrace.each do |bt |
e_str += bt
end
log_error(e_str)
end
end #END of SystemApi
class DockerApi
attr_reader :last_error
def create_container container
clear_error
begin
commandargs = container_commandline_args(container)
commandargs = " run -d " + commandargs
SystemUtils.debug_output commandargs
retval = run_docker(commandargs,container)
return retval
rescue Exception=>e
container.last_error=("Failed To Create ")
log_exception(e)
return false
end
end
def start_container container
clear_error
begin
commandargs =" start " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def stop_container container
clear_error
begin
commandargs=" stop " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def pause_container container
clear_error
begin
commandargs = " pause " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def image_exists? (image_name)
cmd= "docker images -q " + image_name
res = SystemUtils.run_system(cmd)
if res.length >0
return true
else
return false
end
end
def unpause_container container
clear_error
begin
commandargs=" unpause " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def ps_container container
clear_error
begin
commandargs=" top " + container.containerName + " axl"
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def signal_container_process(pid,signal,container)
clear_error
commandargs=" exec " + container.containerName + " kill -" + signal + " " + pid.to_s
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
def logs_container container
clear_error
begin
commandargs=" logs " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def inspect_container container
clear_error
begin
commandargs=" inspect " + container.containerName
return run_docker(commandargs,container)
rescue Exception=>e
log_exception(e)
return false
end
end
def destroy_container container
clear_error
begin
commandargs= " rm " + container.containerName
ret_val = run_docker(commandargs,container)
rescue Exception=>e
container.last_error=( "Failed To Destroy " + e.to_s)
log_exception(e)
return false
end
end
def delete_image container
clear_error
begin
commandargs= " rmi " + container.image
ret_val = run_docker(commandargs,container)
return ret_val
rescue Exception=>e
container.last_error=( "Failed To Delete " + e.to_s)
log_exception(e)
return false
end
end
def docker_exec(container,command,args)
run_args = "exec " + container.containerName + " " + command + " " + args
return run_docker(run_args,container)
end
def run_docker (args,container)
clear_error
require 'open3'
SystemUtils.debug_output(args)
res = String.new
error_mesg = String.new
begin
container.last_result=( "")
Open3.popen3("docker " + args ) do |stdin, stdout, stderr, th|
oline = String.new
stderr_is_open=true
begin
stdout.each { |line|
line = line.gsub(/\\\"/,"")
oline = line
res += line.chop
# p :lne_by_line
# p line
if stderr_is_open
error_mesg += stderr.read_nonblock(256)
end
}
rescue Errno::EIO
res += oline.chop
SystemUtils.debug_output(oline)
error_mesg += stderr.read_nonblock(256)
rescue IO::WaitReadable
retry
rescue EOFError
if stdout.closed? == false
stderr_is_open = false
retry
elsif stderr.closed? == false
error_mesg += stderr.read_nonblock(1000)
container.last_result=( res)
container.last_error=( error_mesgs)
else
container.last_result=( res)
container.last_error=( error_mesgs)
end
end
@last_error=error_mesg
if error_mesg.include?("Error")
container.last_error=(error_mesg)
return false
else
container.last_error=("")
end
#
# if res.start_with?("[") == true
# res = res +"]"
# end
if res != nil && res.end_with?(']') == false
res+=']'
end
container.last_result=(res)
return true
end
rescue Exception=>e
@last_error=error_mesg + e.to_s
container.last_result=(res)
container.last_error=(error_mesg + e.to_s)
log_exception(e)
return false
end
return true
end
def get_envionment_options(container)
e_option =String.new
if(container.environments)
container.environments.each do |environment|
if environment != nil
e_option = e_option + " -e " + environment.name + "=" + '"' + environment.value + '"'
end
end
end
return e_option
rescue Exception=>e
log_exception(e)
return e.to_s
end
def get_port_options(container)
eportoption = String.new
if(container.eports )
container.eports.each do |eport|
if eport != nil
eportoption = eportoption + " -p "
if eport.external != nil && eport.external >0
eportoption = eportoption + eport.external.to_s + ":"
end
eportoption = eportoption + eport.port.to_s
if eport.proto_type == nil
eport.proto_type=('tcp')
end
eportoption = eportoption + "/"+ eport.proto_type + " "
end
end
end
return eportoption
rescue Exception=>e
log_exception(e)
return e.to_s
end
def container_commandline_args(container)
clear_error
begin
envionment_options = get_envionment_options( container)
port_options = get_port_options( container)
volume_option = get_volume_option( container)
if container.conf_self_start == false
start_cmd=" /bin/bash /home/init.sh"
else
start_cmd=" "
end
commandargs = "-h " + container.hostName + \
envionment_options + \
" --memory=" + container.memory.to_s + "m " +\
volume_option + " " +\
port_options +\
" --cidfile " + SysConfig.CidDir + "/" + container.containerName + ".cid " +\
"--name " + container.containerName + \
" -t " + container.image + " " +\
start_cmd
return commandargs
rescue Exception=>e
log_exception(e)
return e.to_s
end
end
def get_volume_option(container)
clear_error
begin
#System
volume_option = SysConfig.timeZone_fileMapping #latter this will be customised
volume_option += " -v " + container_state_dir(container) + "/run:/engines/var/run:rw "
# if container.ctype == "service"
# volume_option += " -v " + container_log_dir(container) + ":/var/log:rw "
incontainer_logdir = get_container_logdir(container)
volume_option += " -v " + container_log_dir(container) + ":/" + incontainer_logdir + ":rw "
if incontainer_logdir !="/var/log" && incontainer_logdir !="/var/log/"
volume_option += " -v " + container_log_dir(container) + "/vlog:/var/log/:rw"
end
#end
#container specific
if(container.volumes)
container.volumes.each_value do |volume|
if volume !=nil
if volume.localpath !=nil
volume_option = volume_option.to_s + " -v " + volume.localpath.to_s + ":/" + volume.remotepath.to_s + ":" + volume.mapping_permissions.to_s
end
end
end
end
return volume_option
rescue Exception=>e
log_exception(e)
return false
end
end
def get_container_logdir(container)
clear_error
if container.framework == nil || container.framework.length ==0
return "/var/log"
end
container_logdetails_file_name = false
framework_logdetails_file_name = SysConfig.DeploymentTemplates + "/" + container.framework + "/home/LOG_DIR"
SystemUtils.debug_output(framework_logdetails_file_name)
if File.exists?(framework_logdetails_file_name )
container_logdetails_file_name = framework_logdetails_file_name
else
container_logdetails_file_name = SysConfig.DeploymentTemplates + "/global/home/LOG_DIR"
end
SystemUtils.debug_output(container_logdetails_file_name)
begin
container_logdetails = File.read(container_logdetails_file_name)
rescue
container_logdetails = "/var/log"
end
return container_logdetails
rescue Exception=>e
log_exception(e)
return false
end
protected
def container_state_dir(container)
return SysConfig.CidDir + "/" + container.ctype + "s/" + container.containerName
end
def container_log_dir container
return SysConfig.SystemLogRoot + "/" + container.ctype + "s/" + container.containerName
end
def clear_error
@last_error = ""
end
def log_exception(e)
e_str = e.to_s()
n=0
e.backtrace.each do |bt |
e_str += bt
if n >10
break
end
++n
end
@last_error = e_str
SystemUtils.log_output(e_str,10)
end
end#END of DockerApi
def initialize
@docker_api = DockerApi.new
@system_api = SystemApi.new(self) #will change to to docker_api and not self
end
attr_reader :last_error
def software_service_definition(params)
sm = loadServiceManager
return sm.software_service_definition(params)
end
def add_domain(params)
return @system_api.add_domain(params)
end
#
# def remove_containers_cron_list(containerName)
# p :remove_containers_cron
# if @system_api.remove_containers_cron_list(containerName)
# cron_service = loadManagedService("cron")
# return @system_api.rebuild_crontab(cron_service)
# else
# return false
# end
# end
#
# def rebuild_crontab(cron_service)
# #acutally a rebuild (or resave) as hadh already removed from consumer list
# p :rebuild_crontab
# return @system_api.rebuild_crontab(cron_service)
# end
def remove_domain(params)
return @system_api.rm_domain(params[:domain_name],@system_api)
end
def update_domain(old_domain,params)
return @system_api.update_domain(old_domain,params,@system_api)
end
def signal_service_process(pid,sig,name)
container = loadManagedService(name)
return @docker_api.signal_container_process(pid,sig,container)
end
def start_container(container)
if @docker_api.start_container(container) == true
return true
end
return false
end
def inspect_container(container)
return @docker_api.inspect_container(container)
end
def stop_container(container)
if @docker_api.stop_container(container) == true
return true
end
return false
end
def pause_container(container)
return @docker_api.pause_container(container)
end
def unpause_container(container)
return @docker_api.unpause_container(container)
end
def ps_container(container)
return @docker_api.ps_container(container)
end
def logs_container(container)
return @docker_api.logs_container(container)
end
def add_monitor(site_hash)
return @system_api.add_monitor(site_hash)
end
def rm_monitor(site_hash)
return @system_api.rm_monitor(site_hash)
end
def save_container(container)
return @system_api.save_container(container)
end
def save_blueprint(blueprint,container)
return @system_api.save_blueprint(blueprint,container)
end
def load_blueprint(container)
return @system_api.load_blueprint(container)
end
def add_volume(site_hash)
return @system_api.add_volume(site_hash)
end
def rm_volume(site_hash)
return @system_api.rm_volume(site_hash)
end
def remove_self_hosted_domain(domain_name)
return @system_api.remove_self_hosted_domain(domain_name)
end
def add_self_hosted_domain(params)
return @system_api.add_self_hosted_domain(params)
end
def list_self_hosted_domains()
return @system_api.list_self_hosted_domains()
end
def update_self_hosted_domain(old_domain_name, params)
@system_api.update_self_hosted_domain(old_domain_name, params)
end
def load_system_preferences
return @system_api.load_system_preferences
end
def save_system_preferences
return @system_api.save_system_preferences
end
def register_site(site_hash)
return @system_api.register_site(site_hash)
end
def deregister_site(site_hash)
return @system_api.deregister_site(site_hash)
end
def hash_to_site_str(site_hash)
return @system_api.hash_to_site_str(site_hash)
end
def deregister_dns(top_level_hostname)
return @system_api.deregister_dns(top_level_hostname)
end
def register_dns(top_level_hostname,ip_addr_str)
return @system_api.register_dns(top_level_hostname,ip_addr_str)
end
def get_container_memory_stats(container)
return @system_api.get_container_memory_stats(container)
end
def set_engine_hostname_details(container,params)
return @system_api.set_engine_hostname_details(container,params)
end
def image_exists?(containerName)
imageName = containerName +"/deploy"
return @docker_api.image_exists?(imageName)
rescue Exception=>e
log_execption(e)
return false
end
def list_attached_services_for(objectName,identifier)
sm = loadServiceManager()
return sm.list_attached_services_for(objectName,identifier)
rescue Exception=>e
log_exception e
# object_name = object.class.name.split('::').last
#
# case object_name
# when "ManagedEngine"
# retval = Hash.new
#
# retval[:database] = object.databases
# retval[:volume] = object.volumes
# retval[:cron] = object.cron_job_list
#
# return retval
#
# #list services
# # which includes volumes databases cron
# end
# p "missed object name"
# p object_name
#
# service_manager = loadServiceManager()
#
# if service_manager !=nil
# return service_manager.attached_services(object)
#
# end
# return false
end
def list_avail_services_for(object)
objectname = object.class.name.split('::').last
services = load_avail_services_for(objectname)
subservices = load_avail_component_services_for(object)
retval = Hash.new
retval[:services] = services
retval[:subservices] = subservices
return retval
rescue Exception=>e
log_exception e
end
def load_software_service(params)
sm = loadServiceManager()
p :load_software_service
p params
service_container = sm.get_software_service_container_name(params)
params[:service_container_name] = service_container
p :service_container_name
p service_container
service = loadManagedService(service_container)
if service == nil
return nil
end
return service
rescue Exception=>e
log_exception e
end
def setup_email_params(params)
arg="smarthost_hostname=" + params[:smarthost_hostname] \
+ ":smarthost_username=" + params[:smarthost_username]\
+ ":smarthost_password=" + params[:smarthost_password]\
+ ":mail_name=smtp." + params[:default_domain]
container=loadManagedService("smtp")
return @docker_api.docker_exec(container,SysConfig.SetupParamsScript,arg)
rescue Exception=>e
log_exception(e)
end
def set_database_password(container_name,params)
arg = "mysql_password=" + params[:mysql_password] +":" \
+ "server=" + container_name + ":" \
+ "psql_password=" + params[:psql_password] #Need two args
if container_name
server_container = loadManagedService(container_name)
return @docker_api.docker_exec(server_container,SysConfig.SetupParamsScript,arg)
end
return true
rescue Exception=>e
log_exception(e)
return false
end
def attach_service(service_hash)
#parent_engine
#service_type
#service_provider
#name
#service hash from fields in Software Service Definition for that service
if service_hash == nil
p :attached_Service_passed_nil
return false
end
service = load_software_service(service_hash)
p :attaching_to_service
p service_hash
if service !=nil && service != false
return service.add_consumer(service_hash)
end
@last_error = "Failed to attach Service: " + @last_error
return false
rescue Exception=>e
log_exception e
end
def detach_service(params)
return false
end
def loadServiceManager()
if @service_manager == nil
@service_manager = ServiceManager.new()
return @service_manager
end
return @service_manager
end
def load_service_definition(filename)
yaml_file = File.open(filename)
p :open
p filename
return SoftwareServiceDefinition.from_yaml(yaml_file)
rescue
rescue Exception=>e
log_exception e
end
def load_avail_services_for(objectname)
p :load_avail_services_for
p objectname
retval = Array.new
dir = SysConfig.ServiceMapTemplateDir + "/" + objectname
p :dir
p dir
if Dir.exists?(dir)
Dir.foreach(dir) do |service_dir_entry|
begin
if service_dir_entry.start_with?(".") == true
next
end
p :service_dir_entry
p service_dir_entry
if service_dir_entry.end_with?(".yaml")
service = load_service_definition(dir + "/" + service_dir_entry)
if service != nil
p :service_as_serivce
p service
p :as_hash
p service.to_h
p :as_yaml
p service.to_yaml()
retval.push(service.to_h)
end
end
rescue Exception=>e
log_exception e
next
end
end
end
p objectname
p retval
return retval
rescue Exception=>e
log_exception e
end
def load_avail_component_services_for(object)
retval = Hash.new
if object.is_a?(ManagedEngine)
if object.volumes.count >0
p :loading_vols
volumes = load_avail_services_for("Volume") #Array of hashes
retval[:volume] = volumes
end
if object.databases.count >0
databases = load_avail_services_for("Database") #Array of hashes
retval[:database] = databases
end
return retval
else
return nil
end
rescue Exception=>e
log_exception e
end
def set_engine_runtime_properties(params)
#FIX ME also need to deal with Env Variables
engine_name = params[:engine_name]
engine = loadManagedEngine(engine_name)
if engine.is_a?(EnginesOSapiResult) == true
last_error = engine.result_mesg
return false
end
if engine.is_active == true
last_error="Container is active"
return false
end
if params.has_key?(:memory)
if params[:memory] == engine.memory
last_error="No Change in Memory Value"
return false
end
if engine.update_memory(params[:memory]) == false
last_error= engine.last_error
return false
end
end
if engine.has_container? == true
if destroy_container(engine) == false
last_error= engine.last_error
return false
end
end
if create_container(engine) == false
last_error= engine.last_error
return false
end
return true
end
def set_engine_network_properties (engine, params)
return @system_api.set_engine_network_properties(engine,params)
end
def get_system_load_info
return @system_api.get_system_load_info
end
def get_system_memory_info
return @system_api.get_system_memory_info
end
def getManagedEngines
return @system_api.getManagedEngines
end
def loadManagedEngine(engine_name)
return @system_api.loadManagedEngine(engine_name)
end
def loadManagedService(service_name)
return @system_api.loadManagedService(service_name)
end
def getManagedServices
return @system_api.getManagedServices
end
def list_domains
return @system_api.list_domains
end
def list_managed_engines
return @system_api.list_managed_engines
end
def list_managed_services
return @system_api.list_managed_services
end
def destroy_container(container)
clear_error
begin
if @docker_api.destroy_container(container) != false
container.deregister_registered
@system_api.destroy_container(container) #removes cid file
return true
else
return false
end
rescue Exception=>e
container.last_error=( "Failed To Destroy " + e.to_s)
log_exception(e)
return false
end
end
def delete_image(container)
begin
clear_error
if @docker_api.delete_image(container) == true
res = @system_api.delete_container_configs(container)
return res
else
return false
end
rescue Exception=>e
container.last_error=( "Failed To Delete " + e.to_s)
log_exception(e)
return false
end
end
def run_system(cmd)
clear_error
begin
cmd = cmd + " 2>&1"
res= %x<#{cmd}>
SystemUtils.debug_output res
#FIXME should be case insensitive The last one is a pure kludge
#really need to get stderr and stdout separately
if $? == 0 && res.downcase.include?("error") == false && res.downcase.include?("fail") == false && res.downcase.include?("could not resolve hostname") == false && res.downcase.include?("unsuccessful") == false
return true
else
@last_error = res
SystemUtils.debug_output res
return false
end
rescue Exception=>e
log_exception(e)
return ret_val
end
end
def run_volume_builder(container,username)
clear_error
begin
if File.exists?(SysConfig.CidDir + "/volbuilder.cid") == true
command = "docker stop volbuilder"
run_system(command)
command = "docker rm volbuilder"
run_system(command)
File.delete(SysConfig.CidDir + "/volbuilder.cid")
end
mapped_vols = get_volbuild_volmaps container
command = "docker run --name volbuilder --memory=20m -e fw_user=" + username + " --cidfile /opt/engines/run/volbuilder.cid " + mapped_vols + " -t engines/volbuilder /bin/sh /home/setup_vols.sh "
SystemUtils.debug_output command
run_system(command)
command = "docker stop volbuilder; docker rm volbuilder"
if File.exists?(SysConfig.CidDir + "/volbuilder.cid") == true
File.delete(SysConfig.CidDir + "/volbuilder.cid")
end
res = run_system(command)
if res != true
log_error(res)
return false
end
return true
rescue Exception=>e
log_exception(e)
return false
end
end
def create_container(container)
clear_error
begin
if @system_api.clear_cid(container) != false
@system_api.clear_container_var_run(container)
if @docker_api.create_container(container) == true
return @system_api.create_container(container)
end
else
return false
end
rescue Exception=>e
container.last_error=("Failed To Create " + e.to_s)
log_exception(e)
return false
end
end
def rebuild_image(container)
clear_error
begin
params=Hash.new
params[:engine_name] = container.containerName
params[:domain_name] = container.domainName
params[:host_name] = container.hostName
params[:env_variables] = container.environments
params[:http_protocol] = container.protocol
params[:repository_url] = container.repo
params[:software_environment_variables] = container.environments
# custom_env=params
# @http_protocol = params[:http_protocol] = container.
builder = EngineBuilder.new(params, self)
return builder.rebuild_managed_container(container)
rescue Exception=>e
log_exception(e)
return false
end
end
# @container_name = params[:engine_name]
# @domain_name = params[:domain_name]
# @hostname = params[:host_name]
# custom_env= params[:software_environment_variables]
# # custom_env=params
# @core_api = core_api
# @http_protocol = params[:http_protocol]
# p params
# @repoName= params[:repository_url]
# @cron_job_list = Array.new
# @build_name = File.basename(@repoName).sub(/\.git$/,"")
# @workerPorts=Array.new
# @webPort=8000
# @vols=Array.new
#FIXME Kludge
def get_container_network_metrics(containerName)
begin
ret_val = Hash.new
clear_error
cmd = "docker exec " + containerName + " netstat --interfaces -e | grep bytes |head -1 | awk '{ print $2 " " $6}' 2>&1"
res= %x<#{cmd}>
vals = res.split("bytes:")
if vals.count < 2
if vals[1] != nil && vals[2] != nil
ret_val[:in] = vals[1].chop
ret_val[:out] = vals[2].chop
else
ret_val[:in] ="-1"
ret_val[:out] ="-1"
end
else
ret_val[:in] ="-1"
ret_val[:out] ="-1"
end
return ret_val
rescue Exception=>e
log_exception(e)
ret_val[:in] = -1
ret_val[:out] = -1
return ret_val
end
end
def is_startup_complete container
clear_error
begin
return @system_api.is_startup_complete(container)
rescue Exception=>e
log_exception(e)
return false
end
end
protected
def get_volbuild_volmaps container
begin
clear_error
state_dir = SysConfig.CidDir + "/containers/" + container.containerName + "/run/"
log_dir = SysConfig.SystemLogRoot + "/containers/" + container.containerName
volume_option = " -v " + state_dir + ":/client/state:rw "
volume_option += " -v " + log_dir + ":/client/log:rw "
if container.volumes != nil
container.volumes.each_value do |vol|
SystemUtils.debug_output vol
volume_option += " -v " + vol.localpath.to_s + ":/dest/fs:rw"
end
end
volume_option += " --volumes-from " + container.containerName
return volume_option
rescue Exception=>e
log_exception(e)
return false
end
end
def clear_error
@last_error = ""
end
def log_exception(e)
e_str = e.to_s()
e.backtrace.each do |bt |
e_str += bt
end
@last_error = e_str
SystemUtils.log_output(e_str,10)
end
end
|
module Ruprint #:nodoc:
module Builders #:nodoc:
class GridBuilder
include ActionView::Helpers::TagHelper
# String / symbol - the css id applied to the main
# container div of the grid.
attr_accessor :css_id
# Array of string / symbol - any additional classes to
# be added to the main container div of the grid.
attr_accessor :css_classes
# Boolean (default false) - apply the container class to the grid, for use on the main outer grid of a page
attr_accessor :container
# Boolean (default false) - turn on showing of the grid.
attr_accessor :showgrid
# Internal use
attr_accessor :rows, :html, :template #:nodoc:
# On creation of a new GridBuilder object, pass in the
# template object (i.e. self when called within a helper).
def initialize(template)
@html = ""
@rows = []
@container = false
@showgrid = false
@css_classes = []
@template = template
end
# Add a row to this grid. Use like so:
# grid.add_row do |row|
# row.width = 5
# row.add_column do |column|
# column.html do
# <p>Some content</p>
# end
# end
# end
def add_row(&block)
row = RowBuilder.new(@template)
yield row
@rows << row
end
# Generate the html for the grid
def render
# Add extra classes based on boolean accessors
@css_classes << :container if @container
@css_classes << :showgrid if @showgrid
options = {}
options[:id] = @css_id if @css_id.present?
options[:class] = @css_classes.join(" ") if @css_classes.present?
@rows.each do |row|
@html << row.render
end
content_tag(:div, @html, options)
end
end
class RowBuilder
include ActionView::Helpers::TagHelper
# Internal use.
attr_accessor :columns, :html, :template #:nodoc:
# On creation of a new RowBuilder object, pass in the
# template object (i.e. grid.template).
def initialize(template)
@html = ""
@css_classes = []
@columns = []
@template = template
end
# Add a column to this row. Use like so:
# row.add_column do |column|
# column.width = 5
# column.box = true
# column.border = 0
# column.html do
# <p>Some content</p>
# end
# end
def add_column(&block)
column = ColumnBuilder.new(@template)
yield column
@columns << column
end
# Returns the width of the row, by interrogating the columns.
def counted_width
@columns.inject(0) do |total, column|
total + column.width
end
end
# Generate the html for all the columns in this row.
def render
@columns.each do |column|
column.css_classes << "last" if column == @columns.last
@html << column.render
end
@html
end
end
class ColumnBuilder
include ActionView::Helpers::TagHelper
# String / symbol - the css id applied to this column div.
attr_accessor :css_id
# Array of string / symbol - any additional classes applied to this column div.
attr_accessor :css_classes
# Fixnum - the width of this column (columns).
attr_accessor :width
# Fixnum - a push or pull value (no sanatization to prevent you adding both).
attr_accessor :push, :pull
# Fixnum - append and prepend values (no sanatization to prevent you adding both).
attr_accessor :append, :prepend
# Boolean - (default false) - turn on adding of a padded box inside the column.
attr_accessor :box
# Boolean - (default false) - turns on drawing of a border on the right border of a column.
attr_accessor :border
# Boolean - (default false) - turns on drawing of a border with more whitespace (spans one column).
attr_accessor :colborder
# Internal use.
attr_accessor :template #:nodoc:
# On creation of a new ColumnBuilder object, pass in the
# template object (i.e. row.template).
def initialize(template)
@html = ""
@css_classes = []
@template = template
@box = false
@border = false
@colborder = false
end
# Assign the inner html for this column's div using =
# (i.e. for single line content).
def html=(html)
@html = html
end
# Assign the inner html for this column's div using a block
# (i.e. for multiple lines).
def html(&block)
@html = @template.capture(&block)
end
# Generate the html for this row.
def render
@css_classes << "span-#{@width}" if @width.present?
@css_classes << "push-#{@push}" if @push.present?
@css_classes << "pull-#{@pull}" if @pull.present?
@css_classes << "append-#{@append}" if @append.present?
@css_classes << "prepend-#{@prepend}" if @prepend.present?
@css_classes << "box" if @box.present?
@css_classes << "border" if @border.present?
@css_classes << "colborder" if @colborder.present?
options = {}
options[:id] = @css_id if @css_id.present?
options[:class] = @css_classes.join(" ") if @css_classes.present?
content_tag(:div, @html, options)
end
end
end
end
Updated the main ruprint grid generator not to use an outer wrapper if there aren't any options specified on the main grid.
module Ruprint #:nodoc:
module Builders #:nodoc:
class GridBuilder
include ActionView::Helpers::TagHelper
# String / symbol - the css id applied to the main
# container div of the grid.
attr_accessor :css_id
# Array of string / symbol - any additional classes to
# be added to the main container div of the grid.
attr_accessor :css_classes
# Boolean (default false) - apply the container class to the grid, for use on the main outer grid of a page
attr_accessor :container
# Boolean (default false) - turn on showing of the grid.
attr_accessor :showgrid
# Internal use
attr_accessor :rows, :html, :template #:nodoc:
# On creation of a new GridBuilder object, pass in the
# template object (i.e. self when called within a helper).
def initialize(template)
@html = ""
@rows = []
@container = false
@showgrid = false
@css_classes = []
@template = template
end
# Add a row to this grid. Use like so:
# grid.add_row do |row|
# row.width = 5
# row.add_column do |column|
# column.html do
# <p>Some content</p>
# end
# end
# end
def add_row(&block)
row = RowBuilder.new(@template)
yield row
@rows << row
end
# Generate the html for the grid
def render
# Add extra classes based on boolean accessors
@css_classes << :container if @container
@css_classes << :showgrid if @showgrid
options = {}
options[:id] = @css_id if @css_id.present?
options[:class] = @css_classes.join(" ") if @css_classes.present?
@rows.each do |row|
@html << row.render
end
# Only apply the wrapper div if we have properties given.
# Should refactor so a grid declaration isn't required for subgrids.
options.keys.present? ? content_tag(:div, @html, options) : @html
end
end
class RowBuilder
include ActionView::Helpers::TagHelper
# Internal use.
attr_accessor :columns, :html, :template #:nodoc:
# On creation of a new RowBuilder object, pass in the
# template object (i.e. grid.template).
def initialize(template)
@html = ""
@css_classes = []
@columns = []
@template = template
end
# Add a column to this row. Use like so:
# row.add_column do |column|
# column.width = 5
# column.box = true
# column.border = 0
# column.html do
# <p>Some content</p>
# end
# end
def add_column(&block)
column = ColumnBuilder.new(@template)
yield column
@columns << column
end
# Returns the width of the row, by interrogating the columns.
def counted_width
@columns.inject(0) do |total, column|
total + column.width
end
end
# Generate the html for all the columns in this row.
def render
@columns.each do |column|
column.css_classes << "last" if column == @columns.last
@html << column.render
end
@html
end
end
class ColumnBuilder
include ActionView::Helpers::TagHelper
# String / symbol - the css id applied to this column div.
attr_accessor :css_id
# Array of string / symbol - any additional classes applied to this column div.
attr_accessor :css_classes
# Fixnum - the width of this column (columns).
attr_accessor :width
# Fixnum - a push or pull value (no sanatization to prevent you adding both).
attr_accessor :push, :pull
# Fixnum - append and prepend values (no sanatization to prevent you adding both).
attr_accessor :append, :prepend
# Boolean - (default false) - turn on adding of a padded box inside the column.
attr_accessor :box
# Boolean - (default false) - turns on drawing of a border on the right border of a column.
attr_accessor :border
# Boolean - (default false) - turns on drawing of a border with more whitespace (spans one column).
attr_accessor :colborder
# Internal use.
attr_accessor :template #:nodoc:
# On creation of a new ColumnBuilder object, pass in the
# template object (i.e. row.template).
def initialize(template)
@html = ""
@css_classes = []
@template = template
@box = false
@border = false
@colborder = false
end
# Assign the inner html for this column's div using =
# (i.e. for single line content).
def html=(html)
@html = html
end
# Assign the inner html for this column's div using a block
# (i.e. for multiple lines).
def html(&block)
@html = @template.capture(&block)
end
# Generate the html for this row.
def render
@css_classes << "span-#{@width}" if @width.present?
@css_classes << "push-#{@push}" if @push.present?
@css_classes << "pull-#{@pull}" if @pull.present?
@css_classes << "append-#{@append}" if @append.present?
@css_classes << "prepend-#{@prepend}" if @prepend.present?
@css_classes << "box" if @box.present?
@css_classes << "border" if @border.present?
@css_classes << "colborder" if @colborder.present?
options = {}
options[:id] = @css_id if @css_id.present?
options[:class] = @css_classes.join(" ") if @css_classes.present?
content_tag(:div, @html, options)
end
end
end
end
|
module Ruta
class Routes
class << self
# TODO allow sub-contextes to be mapable
attr_reader :collection
def add ref, route,context, flags
@collection||= {}
create_section_for context
pos[ref] = Route.new(route,flags)
end
def remove ref
@collection.delete(ref)
end
def get ref
@collection[ref]
end
def get_and_paramaterize ref,*params
route = get(ref).dup
segments = `#{route[:path]}.split('/')`
path = '/' + segments.map { |item| item[0] == ':' ? params.shift : item }.join('/') + '/'
route[:path] = path
route
end
private
def pos pointer
if @collection.empty?
@collection
else
pointer.inject(@collection) do |tree,pos|
tree[pos]
end
end
end
def create_section_for pointer
pointer.inject(@collection) do |tree,pos|
tree[pos] = {} unless tree[pos]
tree[pos]
end
end
end
end
end
stuff
module Ruta
class Routes
class << self
# TODO allow sub-contextes to be mapable
attr_reader :collection
def add ref, route,context, flags
@collection||= {}
create_section_for context
pos(context)[ref] = Route.new(route,flags)
end
def remove context, ref
pos(context).delete(ref)
end
def get context, ref
pos(context)[ref]
end
private
def pos pointer
if @collection.empty?
@collection
else
pointer.inject(@collection) do |tree,pos|
tree[pos]
end
end
end
def create_section_for pointer
pointer.inject(@collection) do |tree,pos|
tree[pos] = {} unless tree[pos]
tree[pos]
end
end
end
end
end
|
module Rutabaga
VERSION = '2.0.2'
end
Bump to 2.1.0
module Rutabaga
VERSION = '2.1.0'
end
|
module Rutabaga
VERSION = '3.0.1'
end
Version 3.0.2
module Rutabaga
VERSION = '3.0.2'
end
|
begin
require 'gnuplot'
rescue LoadError
puts "gnuplot not installed"
end
TIMEFMT = '%Y-%m-%d.%H:%M:%S'
INTERVALS = {
'realtime' => 20,
'day' => 300,
'week' => 1800,
'month' => 7200,
}
DISPLAY_TIMEFMT = {
'realtime' => '%H:%M',
'day' => '%H:%M',
'week' => '%m/%d',
'month' => '%Y/%m/%d'
}
opts :plot do
summary "Plot a graph of the given performance counters"
arg :obj, "", :lookup => VIM::ManagedEntity
arg :counter, "Counter name"
opt :terminal, "Display plot on terminal"
opt :scale, INTERVALS.keys*'/', :default => 'realtime'
end
def plot obj, counter_name, opts
pm = obj._connection.serviceContent.perfManager
group_key, counter_key, rollup_type = counter_name.split('.', 3)
all_counters = Hash[pm.perfCounter.map { |x| [x.key, x] }]
interval_id = INTERVALS[opts[:scale]]
start_time = (Time.now-interval_id*10).to_datetime
metrics = pm.QueryAvailablePerfMetric(
:entity => obj,
:intervalId => interval_id,
:startTime => start_time)
metric = metrics.find do |metric|
counter = all_counters[metric.counterId]
counter.groupInfo.key == group_key &&
counter.nameInfo.key == counter_key &&
counter.rollupType == rollup_type
end or err "no such metric"
counter = all_counters[metric.counterId]
spec = {
:entity => obj,
:metricId => [metric],
:intervalId => interval_id,
:startTime => start_time
}
result = pm.QueryPerf(querySpec: [spec])[0]
times = result.sampleInfo.map(&:timestamp).map { |x| x.strftime TIMEFMT }
data = result.value[0].value
if counter.unitInfo.key == 'percent'
data.map! { |x| x/100 }
end
Gnuplot.open do |gp|
Gnuplot::Plot.new( gp ) do |plot|
plot.title "#{counter_name} on #{obj.name}"
plot.ylabel counter.unitInfo.label
plot.xlabel "Date"
plot.terminal 'dumb' if opts[:terminal]
plot.set 'xdata', 'time'
plot.set 'format', "x '#{DISPLAY_TIMEFMT[opts[:scale]]}'"
plot.set 'timefmt', TIMEFMT.inspect
plot.data << Gnuplot::DataSet.new([times, data]) do |ds|
ds.with = "lines"
ds.using = '1:2'
ds.notitle
end
#puts plot.to_gplot
end
end
end
# TODO fix flickering
opts :watch do
summary "Watch a graph of the given performance counters"
arg :obj, "", :lookup => VIM::ManagedEntity
arg :counter, "Counter name"
end
def watch obj, counter_name
while true
plot obj, counter_name, :terminal => true, :scale => 'realtime'
sleep 5
n = 25
$stdout.write "\e[#{n}A"
n.times do |i|
$stdout.write "\e[K\n"
end
$stdout.write "\e[#{n}A"
end
rescue Interrupt
end
opts :metrics do
summary "Display available metrics on an object"
arg :obj, nil, :lookup => VIM::ManagedEntity
end
def metrics obj
perfmgr = obj._connection.serviceContent.perfManager
interval = perfmgr.provider_summary(obj).refreshRate
if interval == -1
# Object does not support real time stats
interval = nil
end
res = perfmgr.QueryAvailablePerfMetric(
:entity => obj,
:intervalId => interval)
res.map! { |x| perfmgr.perfcounter_idhash[x.counterId] }.uniq!
table = Terminal::Table.new
table.add_row ['Perf metric', 'Description', 'Unit']
table.add_separator
res.sort { |a, b| a.pretty_name <=> b.pretty_name }.each do |counter|
table.add_row([counter.pretty_name, counter.nameInfo.label, counter.unitInfo.label])
end
puts table
end
opts :metric do
summary "Retrieve detailed information about a perf metric"
arg :obj, nil, :lookup => VIM::ManagedEntity
arg :metric, nil, :type => :string
end
def metric obj, metric
perfmgr = obj._connection.serviceContent.perfManager
interval = perfmgr.provider_summary(obj).refreshRate
if interval == -1
# Object does not support real time stats
interval = nil
end
res = perfmgr.QueryAvailablePerfMetric(
:entity => obj,
:intervalId => interval)
res.select! { |x| perfmgr.perfcounter_idhash[x.counterId].pretty_name == metric }
metricInfo = perfmgr.perfcounter_hash[metric]
puts "Metric label: #{metricInfo.nameInfo.label}"
puts "Metric summary: #{metricInfo.nameInfo.summary}"
puts "Unit label: #{metricInfo.unitInfo.label}"
puts "Unit summary: #{metricInfo.unitInfo.label}"
puts "Rollup type: #{metricInfo.rollupType}"
puts "Stats type: #{metricInfo.statsType}"
puts "Real time interval: #{interval || 'N/A'}"
instances = res.map(&:instance).reject(&:empty?)
unless instances.empty?
puts "Instances:"
instances.map do |x|
puts " #{x}"
end
end
end
opts :stats do
summary "Retrieve performance stats for given object"
arg :metrics, nil, :type => :string
arg :obj, nil, :multi => true, :lookup => VIM::ManagedEntity
opt :samples, "Number of samples to retrieve", :type => :int
end
def stats metrics, objs, opts
metrics = metrics.split(",")
obj = objs.first
perfmgr = obj._connection.serviceContent.perfManager
interval = perfmgr.provider_summary(obj).refreshRate
start_time = nil
if interval == -1
# Object does not support real time stats
interval = 300
start_time = Time.now - 300 * 5
end
stat_opts = {
:interval => interval,
:startTime => start_time,
}
stat_opts[:max_samples] = opts[:samples] if opts[:samples]
res = perfmgr.retrieve_stats objs, metrics, stat_opts
table = Terminal::Table.new
table.add_row ['Object', 'Metric', 'Values', 'Unit']
table.add_separator
objs.each do |obj|
metrics.each do |metric|
stat = res[obj][:metrics][metric]
metric_info = perfmgr.perfcounter_hash[metric]
table.add_row([obj.name, metric, stat.join(','), metric_info.unitInfo.label])
end
end
puts table
end
friendly error for missing gnuplot
begin
require 'gnuplot'
RVC::HAVE_GNUPLOT = true
rescue LoadError
RVC::HAVE_GNUPLOT = false
end
TIMEFMT = '%Y-%m-%d.%H:%M:%S'
INTERVALS = {
'realtime' => 20,
'day' => 300,
'week' => 1800,
'month' => 7200,
}
DISPLAY_TIMEFMT = {
'realtime' => '%H:%M',
'day' => '%H:%M',
'week' => '%m/%d',
'month' => '%Y/%m/%d'
}
opts :plot do
summary "Plot a graph of the given performance counters"
arg :obj, "", :lookup => VIM::ManagedEntity
arg :counter, "Counter name"
opt :terminal, "Display plot on terminal"
opt :scale, INTERVALS.keys*'/', :default => 'realtime'
end
def plot obj, counter_name, opts
err "gnuplot and/or the gnuplot gem are not installed" unless RVC::HAVE_GNUPLOT
pm = obj._connection.serviceContent.perfManager
group_key, counter_key, rollup_type = counter_name.split('.', 3)
all_counters = Hash[pm.perfCounter.map { |x| [x.key, x] }]
interval_id = INTERVALS[opts[:scale]]
start_time = (Time.now-interval_id*10).to_datetime
metrics = pm.QueryAvailablePerfMetric(
:entity => obj,
:intervalId => interval_id,
:startTime => start_time)
metric = metrics.find do |metric|
counter = all_counters[metric.counterId]
counter.groupInfo.key == group_key &&
counter.nameInfo.key == counter_key &&
counter.rollupType == rollup_type
end or err "no such metric"
counter = all_counters[metric.counterId]
spec = {
:entity => obj,
:metricId => [metric],
:intervalId => interval_id,
:startTime => start_time
}
result = pm.QueryPerf(querySpec: [spec])[0]
times = result.sampleInfo.map(&:timestamp).map { |x| x.strftime TIMEFMT }
data = result.value[0].value
if counter.unitInfo.key == 'percent'
data.map! { |x| x/100 }
end
Gnuplot.open do |gp|
Gnuplot::Plot.new( gp ) do |plot|
plot.title "#{counter_name} on #{obj.name}"
plot.ylabel counter.unitInfo.label
plot.xlabel "Date"
plot.terminal 'dumb' if opts[:terminal]
plot.set 'xdata', 'time'
plot.set 'format', "x '#{DISPLAY_TIMEFMT[opts[:scale]]}'"
plot.set 'timefmt', TIMEFMT.inspect
plot.data << Gnuplot::DataSet.new([times, data]) do |ds|
ds.with = "lines"
ds.using = '1:2'
ds.notitle
end
#puts plot.to_gplot
end
end
end
# TODO fix flickering
opts :watch do
summary "Watch a graph of the given performance counters"
arg :obj, "", :lookup => VIM::ManagedEntity
arg :counter, "Counter name"
end
def watch obj, counter_name
while true
plot obj, counter_name, :terminal => true, :scale => 'realtime'
sleep 5
n = 25
$stdout.write "\e[#{n}A"
n.times do |i|
$stdout.write "\e[K\n"
end
$stdout.write "\e[#{n}A"
end
rescue Interrupt
end
opts :metrics do
summary "Display available metrics on an object"
arg :obj, nil, :lookup => VIM::ManagedEntity
end
def metrics obj
perfmgr = obj._connection.serviceContent.perfManager
interval = perfmgr.provider_summary(obj).refreshRate
if interval == -1
# Object does not support real time stats
interval = nil
end
res = perfmgr.QueryAvailablePerfMetric(
:entity => obj,
:intervalId => interval)
res.map! { |x| perfmgr.perfcounter_idhash[x.counterId] }.uniq!
table = Terminal::Table.new
table.add_row ['Perf metric', 'Description', 'Unit']
table.add_separator
res.sort { |a, b| a.pretty_name <=> b.pretty_name }.each do |counter|
table.add_row([counter.pretty_name, counter.nameInfo.label, counter.unitInfo.label])
end
puts table
end
opts :metric do
summary "Retrieve detailed information about a perf metric"
arg :obj, nil, :lookup => VIM::ManagedEntity
arg :metric, nil, :type => :string
end
def metric obj, metric
perfmgr = obj._connection.serviceContent.perfManager
interval = perfmgr.provider_summary(obj).refreshRate
if interval == -1
# Object does not support real time stats
interval = nil
end
res = perfmgr.QueryAvailablePerfMetric(
:entity => obj,
:intervalId => interval)
res.select! { |x| perfmgr.perfcounter_idhash[x.counterId].pretty_name == metric }
metricInfo = perfmgr.perfcounter_hash[metric]
puts "Metric label: #{metricInfo.nameInfo.label}"
puts "Metric summary: #{metricInfo.nameInfo.summary}"
puts "Unit label: #{metricInfo.unitInfo.label}"
puts "Unit summary: #{metricInfo.unitInfo.label}"
puts "Rollup type: #{metricInfo.rollupType}"
puts "Stats type: #{metricInfo.statsType}"
puts "Real time interval: #{interval || 'N/A'}"
instances = res.map(&:instance).reject(&:empty?)
unless instances.empty?
puts "Instances:"
instances.map do |x|
puts " #{x}"
end
end
end
opts :stats do
summary "Retrieve performance stats for given object"
arg :metrics, nil, :type => :string
arg :obj, nil, :multi => true, :lookup => VIM::ManagedEntity
opt :samples, "Number of samples to retrieve", :type => :int
end
def stats metrics, objs, opts
metrics = metrics.split(",")
obj = objs.first
perfmgr = obj._connection.serviceContent.perfManager
interval = perfmgr.provider_summary(obj).refreshRate
start_time = nil
if interval == -1
# Object does not support real time stats
interval = 300
start_time = Time.now - 300 * 5
end
stat_opts = {
:interval => interval,
:startTime => start_time,
}
stat_opts[:max_samples] = opts[:samples] if opts[:samples]
res = perfmgr.retrieve_stats objs, metrics, stat_opts
table = Terminal::Table.new
table.add_row ['Object', 'Metric', 'Values', 'Unit']
table.add_separator
objs.each do |obj|
metrics.each do |metric|
stat = res[obj][:metrics][metric]
metric_info = perfmgr.perfcounter_hash[metric]
table.add_row([obj.name, metric, stat.join(','), metric_info.unitInfo.label])
end
end
puts table
end
|
# Copyright (c) 2013 VMware, Inc. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
require 'rbvmomi/vim'
require 'rbvmomi/pbm'
PBM = RbVmomi::PBM
class RbVmomi::VIM
def pbm
@pbm ||= PBM.connect self, :insecure => true
end
def pbm= x
@pbm = nil
end
end
RbVmomi::VIM::Datacenter
class RbVmomi::VIM::Datacenter
def rvc_list_children_profiles
{
'storage' => RVC::FakeFolder.new(self, :rvc_children_storage),
}
end
def rvc_children_storage
{
'vmprofiles' => RVC::FakeFolder.new(self, :rvc_children_profiles),
}
end
def rvc_children_profiles
conn = _connection
_catch_spbm_resets(conn) do
pbm = conn.pbm
pm = pbm.serviceContent.profileManager
profileIds = pm.PbmQueryProfile(
:resourceType => {:resourceType => "STORAGE"},
:profileCategory => "REQUIREMENT"
)
if profileIds.length > 0
profiles = pm.PbmRetrieveContent(:profileIds => profileIds)
else
profiles = []
end
Hash[profiles.map do |x|
x.instance_variable_set(:@connection, pbm)
x.instance_variable_set(:@dc, self)
[x.name, x]
end]
end
end
end
RbVmomi::PBM::PbmCapabilityInstance
class RbVmomi::PBM::PbmCapabilityInstance
def name
"#{self.id.namespace}.#{self.id.id}"
end
end
RbVmomi::PBM::PbmCapabilityMetadata
class RbVmomi::PBM::PbmCapabilityMetadata
def name
"#{self.id.namespace}.#{self.id.id}"
end
end
RbVmomi::VIM::VirtualDisk
class RbVmomi::VIM::VirtualDisk
def rvc_display_info_vsan
if self.backing.backingObjectId
puts "VSAN objects:"
backing = self.backing
while backing
puts " #{backing.backingObjectId}"
backing = backing.parent
end
end
end
end
RbVmomi::VIM::Datastore
class RbVmomi::VIM::Datastore
def to_pbmhub
PBM::PbmPlacementPlacementHub(:hubType => "Datastore", :hubId => _ref)
end
def pbm_capability_profiles
pbm_associated_profiles
end
def rvc_list_children_capabilitysets
{
'capabilitysets' => RVC::FakeFolder.new(self, :rvc_children_capabilitysets),
}
end
def rvc_children_capabilitysets
conn = _connection
_catch_spbm_resets(conn) do
pbm = _connection.pbm
profiles = pbm_capability_profiles
Hash[profiles.map do |x|
x.instance_variable_set(:@connection, pbm)
x.instance_variable_set(:@dc, self)
[x.name, x]
end]
end
end
end
RbVmomi::VIM::VirtualMachine
class RbVmomi::VIM::VirtualMachine
def rvc_list_children_vmprofiles
{
'vmprofiles' => RVC::FakeFolder.new(self, :rvc_children_vmprofiles),
}
end
def rvc_children_vmprofiles
conn = _connection
_catch_spbm_resets(conn) do
pbm = _connection.pbm
profiles = pbm_associated_profiles
Hash[profiles.map do |x|
x.instance_variable_set(:@connection, pbm)
x.instance_variable_set(:@dc, self)
[x.name, x]
end]
end
end
end
RbVmomi::VIM::ManagedObject
class RbVmomi::VIM::ManagedObject
def to_pbmobjref
type = self.class.wsdl_name
type = "%s%s" % [type[0].downcase, type[1..-1]]
PBM::PbmServerObjectRef(
:objectType => type,
:key => _ref,
:serverUuid => _connection.serviceContent.about.instanceUuid
)
end
def pbm_associated_profiles
conn = _connection
_catch_spbm_resets(conn) do
pbm = _connection.pbm
pm = pbm.serviceContent.profileManager
ids = pm.QueryAssociatedProfile(:entity => self.to_pbmobjref)
pm.retrieveProfileContent(:profileIds => ids)
end
end
def _catch_spbm_resets(conn)
begin
yield
rescue EOFError
if conn
conn.pbm = nil
end
raise "Connection to SPBM timed out, try again"
end
end
end
RbVmomi::VIM::VirtualMachine
class RbVmomi::VIM::VirtualMachine
def disks_pbmobjref
disks.map do |disk|
PBM::PbmServerObjectRef(
:objectType => "virtualDiskId",
:key => "#{self._ref}:#{disk.key}",
:serverUuid => _connection.serviceContent.about.instanceUuid
)
end
end
def all_pbmobjref
[to_pbmobjref] + disks_pbmobjref
end
end
RbVmomi::PBM::PbmPlacementSolver
class RbVmomi::PBM::PbmPlacementSolver
def find_compatible_datastores datastores, profileIds
if profileIds.length > 1
raise Exception("Passing in more than one profile currently not supported")
end
dsMoMap = Hash[datastores.map{|x| [x._ref, x]}]
results = self.Solve(
:hubsToSearch => datastores.map{|x| x.to_pbmhub},
:requirements => [
{
:subject => PBM.PbmPlacementPlacementSubject(
:subjectType=>"VirtualMachine",
:subjectId=>"fake"
),
:requirement => [
PBM::PbmPlacementCapabilityProfileRequirement(
:requirementType => "type",
:mandatory => true,
:profileId => profileIds[0]
)
],
}
],
:partialSolution => false
)
compatibleDsList = results.map do |x|
dsMoMap[x.subjectAssignment[0].hub.hubId]
end
end
end
RbVmomi::PBM::PbmCapabilityProfile
class RbVmomi::PBM::PbmCapabilityProfile
include InventoryObject
def children
{
'datastores' => RVC::FakeFolder.new(self, :rvc_children_datastores),
'vms' => RVC::FakeFolder.new(self, :rvc_children_vms),
}
end
def rvc_children_vms
pbm = @connection
vim = @dc._connection
pc = vim.propertyCollector
vms = pm.QueryAssociatedEntity(
:profile => self.profileId,
:entityType => 'virtualMachine'
)
vms = vms.map do |ref|
VIM::VirtualMachine(vim, ref.key)
end
props = pc.collectMultiple(vms, 'name')
Hash[props.map do |vm, vm_props|
[vm_props['name'], vm]
end]
end
def rvc_children_datastores
pbm = @connection
vim = @dc._connection
pc = vim.propertyCollector
_catch_spbm_resets(vim) do
solver = pbm.serviceContent.placementSolver
datastores = solver.find_compatible_datastores @dc.datastore, [profileId]
props = pc.collectMultiple(datastores, 'name')
Hash[props.map do |ds, ds_props|
[ds_props['name'], ds]
end]
end
end
def display_info
super
puts "Name: #{name}"
puts "Description:"
puts description
puts "ProfileId: #{profileId.uniqueId}"
puts "Type: #{resourceType.resourceType} - #{profileCategory}"
puts "Rule-Sets:"
constraints.subProfiles.each_with_index do |sub, i|
puts " Rule-Set ##{i + 1}:"
sub.capability.each do |rule|
instances = rule.constraint.map{|c| c.propertyInstance}.flatten
if instances.length > 1
raise "Can't deal with multiple constraints in single rule"
end
value = instances[0].value
if value.is_a?(RbVmomi::PBM::PbmCapabilityRange)
value = "#{value.min} - #{value.max}"
end
puts " #{rule.name}: #{value}"
end
end
end
end
opts :profile_delete do
summary "Delete a VM Storage Profile"
arg :profile, nil, :lookup => RbVmomi::PBM::PbmCapabilityProfile, :multi => true
end
def profile_delete profiles
if profiles.length == 0
return
end
_catch_spbm_resets(nil) do
pbm = profiles.first.instance_variable_get(:@connection)
pm = pbm.serviceContent.profileManager
pm.PbmDelete(:profileId => profiles.map{|x| x.profileId})
end
end
opts :profile_apply do
summary "Apply a VM Storage Profile. Pushed profile content to Storage system"
arg :profile, nil, :lookup => RbVmomi::PBM::PbmCapabilityProfile, :multi => true
end
def profile_apply profiles
if profiles.length == 0
return
end
pbm = profiles.first.instance_variable_get(:@connection)
dc = profiles.first.instance_variable_get(:@dc)
vim = dc._connection
_catch_spbm_resets(vim) do
pm = pbm.serviceContent.profileManager
results = pm.applyProfile(:profiles => profiles.map{|x| x.profileId})
tasks = results.map{|x| x.reconfigOutcome.map{|y| y.taskMoid}}.flatten
tasks = tasks.map{|x| VIM::Task(vim, x)}
progress(tasks)
end
end
opts :profile_create do
summary "Create a VM Storage Profile"
arg :name, nil, :type => :string
opt :description, "Description", :type => :string
opt :rule, "Rule in format <provider>.<capability>=<value>", :type => :string, :multi => true
end
def profile_create profile_name, opts
dc, = lookup '~'
conn = dc._connection
_catch_spbm_resets(conn) do
pbm = conn.pbm
pm = pbm.serviceContent.profileManager
rules = opts[:rule] || []
resType = {:resourceType => "STORAGE"}
# Need to support other vendors too
cm = pm.PbmFetchCapabilityMetadata(
:resourceType => resType,
:vendorUuid => "com.vmware.storage.vsan"
)
capabilities = cm.map{|x| x.capabilityMetadata}.flatten
constraints = rules.map do |rule_str|
name, values_str = rule_str.split("=", 2)
if !values_str
err "Rule is malformed: #{rule_str}, should be <provider>.<capability>=<value>"
end
ns, id = name.split('.', 2)
if !id
err "Rule is malformed: #{rule_str}, should be <provider>.<capability>=<value>"
end
capability = capabilities.find{|x| x.name == name}
if !capability
err "Capability #{name} unknown"
end
type = capability.propertyMetadata[0].type
values = values_str.split(',')
if type.typeName == "XSD_INT"
values = values.map{|x| RbVmomi::BasicTypes::Int.new(x.to_i)}
end
if type.typeName == "XSD_BOOLEAN"
values = values.map{|x| (x =~ /(true|True|1|yes|Yes)/) != nil}
end
if type.is_a?(PBM::PbmCapabilityGenericTypeInfo) && type.genericTypeName == "VMW_RANGE"
if values.length != 2
err "#{name} is a range, need to specify 2 values"
end
value = PBM::PbmCapabilityTypesRange(:min => values[0], :max => values[1])
elsif values.length == 1
value = values.first
else
err "Value malformed: #{value_str}"
end
{
:id => {
:namespace => ns,
:id => id
},
:constraint => [{
:propertyInstance => [{
:id => id,
:value => value
}]
}]
}
end
pm.PbmCreate(
:createSpec => {
:name => profile_name,
:description => opts[:description],
:resourceType => resType,
:constraints => PBM::PbmCapabilitySubProfileConstraints(
:subProfiles => [
PBM::PbmCapabilitySubProfile(
:name => "Object",
:capability => constraints
)
]
)
}
)
end
end
opts :device_change_storage_profile do
summary "Change storage profile of a virtual disk"
arg :device, nil, :lookup => VIM::VirtualDevice, :multi => true
opt :profile, "Profile", :lookup => RbVmomi::PBM::PbmCapabilityProfile
end
def device_change_storage_profile devs, opts
if !opts[:profile]
err "Must specify a storage profile"
end
vm_devs = devs.group_by(&:rvc_vm)
conn = vm_devs.keys.first._connection
_catch_spbm_resets(conn) do
_run_with_rev(conn, "dev") do
profile = nil
if opts[:profile]
profile = [VIM::VirtualMachineDefinedProfileSpec(
:profileId => opts[:profile].profileId.uniqueId
)]
end
tasks = vm_devs.map do |vm, my_devs|
spec = {
:deviceChange => my_devs.map do |dev|
{
:operation => :edit,
:device => dev,
:profile => profile,
}
end
}
vm.ReconfigVM_Task(:spec => spec)
end
progress(tasks)
end
end
end
opts :check_compliance do
summary "Check compliance"
arg :vm, nil, :lookup => VIM::VirtualMachine, :multi => true
end
def check_compliance vms
dc, = lookup '~'
conn = dc._connection
_catch_spbm_resets(conn) do
pbm = conn.pbm
pm = pbm.serviceContent.profileManager
cm = pbm.serviceContent.complianceManager
compliance = cm.PbmCheckCompliance(:entities => vms.map do |vm|
vm.all_pbmobjref
end.flatten)
profile_ids = Hash[compliance.map{|x| [x.entity.key, x.profile.uniqueId]}]
compliances = Hash[compliance.map{|x| [x.entity.key, x.complianceStatus]}]
profiles = nil
begin
profileIds = profile_ids.values.uniq.compact.map do |x|
PBM::PbmProfileId(:uniqueId => x)
end
if profileIds.length > 0
profiles = pm.PbmRetrieveContent(
:profileIds => profileIds
)
else
profiles = []
end
rescue Exception => ex
pp "#{ex.class}: #{ex.message}"
pp profile_ids
raise ex
end
profiles = Hash[profiles.map{|x| [x.profileId.uniqueId, x.name]}]
profiles = Hash[profile_ids.map{|k,v| [k, profiles[v] || v]}]
t = Terminal::Table.new()
t << ['VM/Virtual Disk', 'Profile', 'Compliance']
t.add_separator
vms.each do |vm|
t << [
vm.name,
profiles[vm._ref] || "unknown",
compliances[vm._ref] || "unknown",
]
vm.disks.each do |disk|
id = "#{vm._ref}:#{disk.key}"
t << [
" #{disk.deviceInfo.label}",
profiles[id] || "unknown",
compliances[id] || "unknown",
]
end
end
puts t
puts ""
stats = Hash[compliances.values.group_by{|x| x}.map{|k,v| [k, v.length]}]
stats.sort_by{|k,v| k}.each do |type, count|
puts "Number of '#{type}' entities: #{count}"
end
end
end
opts :namespace_change_storage_profile do
summary "Change storage profile of VM namespace"
arg :vm, nil, :lookup => VIM::VirtualMachine, :multi => true
opt :profile, "Profile", :lookup => RbVmomi::PBM::PbmCapabilityProfile
end
def namespace_change_storage_profile vms, opts
if !opts[:profile]
err "Must specify a storage profile"
end
conn = vms.first._connection
_catch_spbm_resets(conn) do
_run_with_rev(conn, "dev") do
profile = nil
if opts[:profile]
profile = [VIM::VirtualMachineDefinedProfileSpec(
:profileId => opts[:profile].profileId.uniqueId
)]
end
tasks = vms.map do |vm|
spec = {
:vmProfile => profile,
}
vm.ReconfigVM_Task(:spec => spec)
end
progress(tasks)
end
end
end
opts :vm_change_storage_profile do
summary "Change storage profile of VM namespace and its disks"
arg :vm, nil, :lookup => VIM::VirtualMachine, :multi => true
opt :profile, "Profile", :lookup => RbVmomi::PBM::PbmCapabilityProfile
end
def vm_change_storage_profile vms, opts
if !opts[:profile]
err "Must specify a storage profile"
end
conn = vms.first._connection
_catch_spbm_resets(conn) do
_run_with_rev(conn, "dev") do
profile = nil
if opts[:profile]
profile = [VIM::VirtualMachineDefinedProfileSpec(
:profileId => opts[:profile].profileId.uniqueId
)]
end
tasks = vms.map do |vm|
disks = vm.disks
spec = {
:vmProfile => profile,
:deviceChange => disks.map do |dev|
{
:operation => :edit,
:device => dev,
:profile => profile,
}
end
}
vm.ReconfigVM_Task(:spec => spec)
end
progress(tasks)
end
end
end
opts :device_add_disk do
summary "Add a hard drive to a virtual machine"
arg :vm, nil, :lookup => VIM::VirtualMachine
arg :path, "Filename on the datastore", :lookup_parent => VIM::Datastore::FakeDatastoreFolder, :required => false
opt :size, 'Size', :default => '10G'
opt :controller, 'Virtual controller', :type => :string, :lookup => VIM::VirtualController
opt :file_op, 'File operation (create|reuse|replace)', :default => 'create'
opt :profile, "Profile", :lookup => RbVmomi::PBM::PbmCapabilityProfile
end
def device_add_disk vm, path, opts
controller, unit_number = pick_controller vm, opts[:controller], [VIM::VirtualSCSIController, VIM::VirtualIDEController]
id = "disk-#{controller.key}-#{unit_number}"
if path
dir, file = *path
filename = "#{dir.datastore_path}/#{file}"
else
filename = "#{File.dirname(vm.summary.config.vmPathName)}/#{id}.vmdk"
end
opts[:file_op] = nil if opts[:file_op] == 'reuse'
conn = vm._connection
_run_with_rev(conn, "dev") do
profile = nil
if opts[:profile]
profile = [VIM::VirtualMachineDefinedProfileSpec(
:profileId => opts[:profile].profileId.uniqueId
)]
end
spec = {
:deviceChange => [
{
:operation => :add,
:fileOperation => opts[:file_op],
:device => VIM::VirtualDisk(
:key => -1,
:backing => VIM.VirtualDiskFlatVer2BackingInfo(
:fileName => filename,
:diskMode => :persistent,
:thinProvisioned => true
),
:capacityInKB => MetricNumber.parse(opts[:size]).to_i/1000,
:controllerKey => controller.key,
:unitNumber => unit_number
),
:profile => profile,
},
]
}
task = vm.ReconfigVM_Task(:spec => spec)
result = progress([task])[task]
if result == nil
new_device = vm.collect('config.hardware.device')[0].grep(VIM::VirtualDisk).last
puts "Added device #{new_device.name}"
end
end
end
def pick_controller vm, controller, controller_classes
existing_devices, = vm.collect 'config.hardware.device'
controller ||= existing_devices.find do |dev|
controller_classes.any? { |klass| dev.is_a? klass } &&
dev.device.length < 2
end
err "no suitable controller found" unless controller
used_unit_numbers = existing_devices.select { |dev| dev.controllerKey == controller.key }.map(&:unitNumber)
unit_number = (used_unit_numbers.max||-1) + 1
[controller, unit_number]
end
def _run_with_rev conn, rev
old_rev = conn.rev
begin
conn.rev = rev
yield
ensure
conn.rev = old_rev
end
end
def _catch_spbm_resets(conn)
begin
yield
rescue EOFError
if conn
conn.pbm = nil
end
err "Connection to SPBM timed out, try again"
end
end
add missing include to spbm.rb
# Copyright (c) 2013 VMware, Inc. All Rights Reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
require 'rbvmomi/vim'
require 'rbvmomi/pbm'
require 'rvc/vim'
PBM = RbVmomi::PBM
class RbVmomi::VIM
def pbm
@pbm ||= PBM.connect self, :insecure => true
end
def pbm= x
@pbm = nil
end
end
RbVmomi::VIM::Datacenter
class RbVmomi::VIM::Datacenter
def rvc_list_children_profiles
{
'storage' => RVC::FakeFolder.new(self, :rvc_children_storage),
}
end
def rvc_children_storage
{
'vmprofiles' => RVC::FakeFolder.new(self, :rvc_children_profiles),
}
end
def rvc_children_profiles
conn = _connection
_catch_spbm_resets(conn) do
pbm = conn.pbm
pm = pbm.serviceContent.profileManager
profileIds = pm.PbmQueryProfile(
:resourceType => {:resourceType => "STORAGE"},
:profileCategory => "REQUIREMENT"
)
if profileIds.length > 0
profiles = pm.PbmRetrieveContent(:profileIds => profileIds)
else
profiles = []
end
Hash[profiles.map do |x|
x.instance_variable_set(:@connection, pbm)
x.instance_variable_set(:@dc, self)
[x.name, x]
end]
end
end
end
RbVmomi::PBM::PbmCapabilityInstance
class RbVmomi::PBM::PbmCapabilityInstance
def name
"#{self.id.namespace}.#{self.id.id}"
end
end
RbVmomi::PBM::PbmCapabilityMetadata
class RbVmomi::PBM::PbmCapabilityMetadata
def name
"#{self.id.namespace}.#{self.id.id}"
end
end
RbVmomi::VIM::VirtualDisk
class RbVmomi::VIM::VirtualDisk
def rvc_display_info_vsan
if self.backing.backingObjectId
puts "VSAN objects:"
backing = self.backing
while backing
puts " #{backing.backingObjectId}"
backing = backing.parent
end
end
end
end
RbVmomi::VIM::Datastore
class RbVmomi::VIM::Datastore
def to_pbmhub
PBM::PbmPlacementPlacementHub(:hubType => "Datastore", :hubId => _ref)
end
def pbm_capability_profiles
pbm_associated_profiles
end
def rvc_list_children_capabilitysets
{
'capabilitysets' => RVC::FakeFolder.new(self, :rvc_children_capabilitysets),
}
end
def rvc_children_capabilitysets
conn = _connection
_catch_spbm_resets(conn) do
pbm = _connection.pbm
profiles = pbm_capability_profiles
Hash[profiles.map do |x|
x.instance_variable_set(:@connection, pbm)
x.instance_variable_set(:@dc, self)
[x.name, x]
end]
end
end
end
RbVmomi::VIM::VirtualMachine
class RbVmomi::VIM::VirtualMachine
def rvc_list_children_vmprofiles
{
'vmprofiles' => RVC::FakeFolder.new(self, :rvc_children_vmprofiles),
}
end
def rvc_children_vmprofiles
conn = _connection
_catch_spbm_resets(conn) do
pbm = _connection.pbm
profiles = pbm_associated_profiles
Hash[profiles.map do |x|
x.instance_variable_set(:@connection, pbm)
x.instance_variable_set(:@dc, self)
[x.name, x]
end]
end
end
end
RbVmomi::VIM::ManagedObject
class RbVmomi::VIM::ManagedObject
def to_pbmobjref
type = self.class.wsdl_name
type = "%s%s" % [type[0].downcase, type[1..-1]]
PBM::PbmServerObjectRef(
:objectType => type,
:key => _ref,
:serverUuid => _connection.serviceContent.about.instanceUuid
)
end
def pbm_associated_profiles
conn = _connection
_catch_spbm_resets(conn) do
pbm = _connection.pbm
pm = pbm.serviceContent.profileManager
ids = pm.QueryAssociatedProfile(:entity => self.to_pbmobjref)
pm.retrieveProfileContent(:profileIds => ids)
end
end
def _catch_spbm_resets(conn)
begin
yield
rescue EOFError
if conn
conn.pbm = nil
end
raise "Connection to SPBM timed out, try again"
end
end
end
RbVmomi::VIM::VirtualMachine
class RbVmomi::VIM::VirtualMachine
def disks_pbmobjref
disks.map do |disk|
PBM::PbmServerObjectRef(
:objectType => "virtualDiskId",
:key => "#{self._ref}:#{disk.key}",
:serverUuid => _connection.serviceContent.about.instanceUuid
)
end
end
def all_pbmobjref
[to_pbmobjref] + disks_pbmobjref
end
end
RbVmomi::PBM::PbmPlacementSolver
class RbVmomi::PBM::PbmPlacementSolver
def find_compatible_datastores datastores, profileIds
if profileIds.length > 1
raise Exception("Passing in more than one profile currently not supported")
end
dsMoMap = Hash[datastores.map{|x| [x._ref, x]}]
results = self.Solve(
:hubsToSearch => datastores.map{|x| x.to_pbmhub},
:requirements => [
{
:subject => PBM.PbmPlacementPlacementSubject(
:subjectType=>"VirtualMachine",
:subjectId=>"fake"
),
:requirement => [
PBM::PbmPlacementCapabilityProfileRequirement(
:requirementType => "type",
:mandatory => true,
:profileId => profileIds[0]
)
],
}
],
:partialSolution => false
)
compatibleDsList = results.map do |x|
dsMoMap[x.subjectAssignment[0].hub.hubId]
end
end
end
RbVmomi::PBM::PbmCapabilityProfile
class RbVmomi::PBM::PbmCapabilityProfile
include InventoryObject
def children
{
'datastores' => RVC::FakeFolder.new(self, :rvc_children_datastores),
'vms' => RVC::FakeFolder.new(self, :rvc_children_vms),
}
end
def rvc_children_vms
pbm = @connection
vim = @dc._connection
pc = vim.propertyCollector
vms = pm.QueryAssociatedEntity(
:profile => self.profileId,
:entityType => 'virtualMachine'
)
vms = vms.map do |ref|
VIM::VirtualMachine(vim, ref.key)
end
props = pc.collectMultiple(vms, 'name')
Hash[props.map do |vm, vm_props|
[vm_props['name'], vm]
end]
end
def rvc_children_datastores
pbm = @connection
vim = @dc._connection
pc = vim.propertyCollector
_catch_spbm_resets(vim) do
solver = pbm.serviceContent.placementSolver
datastores = solver.find_compatible_datastores @dc.datastore, [profileId]
props = pc.collectMultiple(datastores, 'name')
Hash[props.map do |ds, ds_props|
[ds_props['name'], ds]
end]
end
end
def display_info
super
puts "Name: #{name}"
puts "Description:"
puts description
puts "ProfileId: #{profileId.uniqueId}"
puts "Type: #{resourceType.resourceType} - #{profileCategory}"
puts "Rule-Sets:"
constraints.subProfiles.each_with_index do |sub, i|
puts " Rule-Set ##{i + 1}:"
sub.capability.each do |rule|
instances = rule.constraint.map{|c| c.propertyInstance}.flatten
if instances.length > 1
raise "Can't deal with multiple constraints in single rule"
end
value = instances[0].value
if value.is_a?(RbVmomi::PBM::PbmCapabilityRange)
value = "#{value.min} - #{value.max}"
end
puts " #{rule.name}: #{value}"
end
end
end
end
opts :profile_delete do
summary "Delete a VM Storage Profile"
arg :profile, nil, :lookup => RbVmomi::PBM::PbmCapabilityProfile, :multi => true
end
def profile_delete profiles
if profiles.length == 0
return
end
_catch_spbm_resets(nil) do
pbm = profiles.first.instance_variable_get(:@connection)
pm = pbm.serviceContent.profileManager
pm.PbmDelete(:profileId => profiles.map{|x| x.profileId})
end
end
opts :profile_apply do
summary "Apply a VM Storage Profile. Pushed profile content to Storage system"
arg :profile, nil, :lookup => RbVmomi::PBM::PbmCapabilityProfile, :multi => true
end
def profile_apply profiles
if profiles.length == 0
return
end
pbm = profiles.first.instance_variable_get(:@connection)
dc = profiles.first.instance_variable_get(:@dc)
vim = dc._connection
_catch_spbm_resets(vim) do
pm = pbm.serviceContent.profileManager
results = pm.applyProfile(:profiles => profiles.map{|x| x.profileId})
tasks = results.map{|x| x.reconfigOutcome.map{|y| y.taskMoid}}.flatten
tasks = tasks.map{|x| VIM::Task(vim, x)}
progress(tasks)
end
end
opts :profile_create do
summary "Create a VM Storage Profile"
arg :name, nil, :type => :string
opt :description, "Description", :type => :string
opt :rule, "Rule in format <provider>.<capability>=<value>", :type => :string, :multi => true
end
def profile_create profile_name, opts
dc, = lookup '~'
conn = dc._connection
_catch_spbm_resets(conn) do
pbm = conn.pbm
pm = pbm.serviceContent.profileManager
rules = opts[:rule] || []
resType = {:resourceType => "STORAGE"}
# Need to support other vendors too
cm = pm.PbmFetchCapabilityMetadata(
:resourceType => resType,
:vendorUuid => "com.vmware.storage.vsan"
)
capabilities = cm.map{|x| x.capabilityMetadata}.flatten
constraints = rules.map do |rule_str|
name, values_str = rule_str.split("=", 2)
if !values_str
err "Rule is malformed: #{rule_str}, should be <provider>.<capability>=<value>"
end
ns, id = name.split('.', 2)
if !id
err "Rule is malformed: #{rule_str}, should be <provider>.<capability>=<value>"
end
capability = capabilities.find{|x| x.name == name}
if !capability
err "Capability #{name} unknown"
end
type = capability.propertyMetadata[0].type
values = values_str.split(',')
if type.typeName == "XSD_INT"
values = values.map{|x| RbVmomi::BasicTypes::Int.new(x.to_i)}
end
if type.typeName == "XSD_BOOLEAN"
values = values.map{|x| (x =~ /(true|True|1|yes|Yes)/) != nil}
end
if type.is_a?(PBM::PbmCapabilityGenericTypeInfo) && type.genericTypeName == "VMW_RANGE"
if values.length != 2
err "#{name} is a range, need to specify 2 values"
end
value = PBM::PbmCapabilityTypesRange(:min => values[0], :max => values[1])
elsif values.length == 1
value = values.first
else
err "Value malformed: #{value_str}"
end
{
:id => {
:namespace => ns,
:id => id
},
:constraint => [{
:propertyInstance => [{
:id => id,
:value => value
}]
}]
}
end
pm.PbmCreate(
:createSpec => {
:name => profile_name,
:description => opts[:description],
:resourceType => resType,
:constraints => PBM::PbmCapabilitySubProfileConstraints(
:subProfiles => [
PBM::PbmCapabilitySubProfile(
:name => "Object",
:capability => constraints
)
]
)
}
)
end
end
opts :device_change_storage_profile do
summary "Change storage profile of a virtual disk"
arg :device, nil, :lookup => VIM::VirtualDevice, :multi => true
opt :profile, "Profile", :lookup => RbVmomi::PBM::PbmCapabilityProfile
end
def device_change_storage_profile devs, opts
if !opts[:profile]
err "Must specify a storage profile"
end
vm_devs = devs.group_by(&:rvc_vm)
conn = vm_devs.keys.first._connection
_catch_spbm_resets(conn) do
_run_with_rev(conn, "dev") do
profile = nil
if opts[:profile]
profile = [VIM::VirtualMachineDefinedProfileSpec(
:profileId => opts[:profile].profileId.uniqueId
)]
end
tasks = vm_devs.map do |vm, my_devs|
spec = {
:deviceChange => my_devs.map do |dev|
{
:operation => :edit,
:device => dev,
:profile => profile,
}
end
}
vm.ReconfigVM_Task(:spec => spec)
end
progress(tasks)
end
end
end
opts :check_compliance do
summary "Check compliance"
arg :vm, nil, :lookup => VIM::VirtualMachine, :multi => true
end
def check_compliance vms
dc, = lookup '~'
conn = dc._connection
_catch_spbm_resets(conn) do
pbm = conn.pbm
pm = pbm.serviceContent.profileManager
cm = pbm.serviceContent.complianceManager
compliance = cm.PbmCheckCompliance(:entities => vms.map do |vm|
vm.all_pbmobjref
end.flatten)
profile_ids = Hash[compliance.map{|x| [x.entity.key, x.profile.uniqueId]}]
compliances = Hash[compliance.map{|x| [x.entity.key, x.complianceStatus]}]
profiles = nil
begin
profileIds = profile_ids.values.uniq.compact.map do |x|
PBM::PbmProfileId(:uniqueId => x)
end
if profileIds.length > 0
profiles = pm.PbmRetrieveContent(
:profileIds => profileIds
)
else
profiles = []
end
rescue Exception => ex
pp "#{ex.class}: #{ex.message}"
pp profile_ids
raise ex
end
profiles = Hash[profiles.map{|x| [x.profileId.uniqueId, x.name]}]
profiles = Hash[profile_ids.map{|k,v| [k, profiles[v] || v]}]
t = Terminal::Table.new()
t << ['VM/Virtual Disk', 'Profile', 'Compliance']
t.add_separator
vms.each do |vm|
t << [
vm.name,
profiles[vm._ref] || "unknown",
compliances[vm._ref] || "unknown",
]
vm.disks.each do |disk|
id = "#{vm._ref}:#{disk.key}"
t << [
" #{disk.deviceInfo.label}",
profiles[id] || "unknown",
compliances[id] || "unknown",
]
end
end
puts t
puts ""
stats = Hash[compliances.values.group_by{|x| x}.map{|k,v| [k, v.length]}]
stats.sort_by{|k,v| k}.each do |type, count|
puts "Number of '#{type}' entities: #{count}"
end
end
end
opts :namespace_change_storage_profile do
summary "Change storage profile of VM namespace"
arg :vm, nil, :lookup => VIM::VirtualMachine, :multi => true
opt :profile, "Profile", :lookup => RbVmomi::PBM::PbmCapabilityProfile
end
def namespace_change_storage_profile vms, opts
if !opts[:profile]
err "Must specify a storage profile"
end
conn = vms.first._connection
_catch_spbm_resets(conn) do
_run_with_rev(conn, "dev") do
profile = nil
if opts[:profile]
profile = [VIM::VirtualMachineDefinedProfileSpec(
:profileId => opts[:profile].profileId.uniqueId
)]
end
tasks = vms.map do |vm|
spec = {
:vmProfile => profile,
}
vm.ReconfigVM_Task(:spec => spec)
end
progress(tasks)
end
end
end
opts :vm_change_storage_profile do
summary "Change storage profile of VM namespace and its disks"
arg :vm, nil, :lookup => VIM::VirtualMachine, :multi => true
opt :profile, "Profile", :lookup => RbVmomi::PBM::PbmCapabilityProfile
end
def vm_change_storage_profile vms, opts
if !opts[:profile]
err "Must specify a storage profile"
end
conn = vms.first._connection
_catch_spbm_resets(conn) do
_run_with_rev(conn, "dev") do
profile = nil
if opts[:profile]
profile = [VIM::VirtualMachineDefinedProfileSpec(
:profileId => opts[:profile].profileId.uniqueId
)]
end
tasks = vms.map do |vm|
disks = vm.disks
spec = {
:vmProfile => profile,
:deviceChange => disks.map do |dev|
{
:operation => :edit,
:device => dev,
:profile => profile,
}
end
}
vm.ReconfigVM_Task(:spec => spec)
end
progress(tasks)
end
end
end
opts :device_add_disk do
summary "Add a hard drive to a virtual machine"
arg :vm, nil, :lookup => VIM::VirtualMachine
arg :path, "Filename on the datastore", :lookup_parent => VIM::Datastore::FakeDatastoreFolder, :required => false
opt :size, 'Size', :default => '10G'
opt :controller, 'Virtual controller', :type => :string, :lookup => VIM::VirtualController
opt :file_op, 'File operation (create|reuse|replace)', :default => 'create'
opt :profile, "Profile", :lookup => RbVmomi::PBM::PbmCapabilityProfile
end
def device_add_disk vm, path, opts
controller, unit_number = pick_controller vm, opts[:controller], [VIM::VirtualSCSIController, VIM::VirtualIDEController]
id = "disk-#{controller.key}-#{unit_number}"
if path
dir, file = *path
filename = "#{dir.datastore_path}/#{file}"
else
filename = "#{File.dirname(vm.summary.config.vmPathName)}/#{id}.vmdk"
end
opts[:file_op] = nil if opts[:file_op] == 'reuse'
conn = vm._connection
_run_with_rev(conn, "dev") do
profile = nil
if opts[:profile]
profile = [VIM::VirtualMachineDefinedProfileSpec(
:profileId => opts[:profile].profileId.uniqueId
)]
end
spec = {
:deviceChange => [
{
:operation => :add,
:fileOperation => opts[:file_op],
:device => VIM::VirtualDisk(
:key => -1,
:backing => VIM.VirtualDiskFlatVer2BackingInfo(
:fileName => filename,
:diskMode => :persistent,
:thinProvisioned => true
),
:capacityInKB => MetricNumber.parse(opts[:size]).to_i/1000,
:controllerKey => controller.key,
:unitNumber => unit_number
),
:profile => profile,
},
]
}
task = vm.ReconfigVM_Task(:spec => spec)
result = progress([task])[task]
if result == nil
new_device = vm.collect('config.hardware.device')[0].grep(VIM::VirtualDisk).last
puts "Added device #{new_device.name}"
end
end
end
def pick_controller vm, controller, controller_classes
existing_devices, = vm.collect 'config.hardware.device'
controller ||= existing_devices.find do |dev|
controller_classes.any? { |klass| dev.is_a? klass } &&
dev.device.length < 2
end
err "no suitable controller found" unless controller
used_unit_numbers = existing_devices.select { |dev| dev.controllerKey == controller.key }.map(&:unitNumber)
unit_number = (used_unit_numbers.max||-1) + 1
[controller, unit_number]
end
def _run_with_rev conn, rev
old_rev = conn.rev
begin
conn.rev = rev
yield
ensure
conn.rev = old_rev
end
end
def _catch_spbm_resets(conn)
begin
yield
rescue EOFError
if conn
conn.pbm = nil
end
err "Connection to SPBM timed out, try again"
end
end
|
module Salt
module Api
VERSION = "0.1.2"
end
end
Bump version to 0.1.3
module Salt
module Api
VERSION = "0.1.3"
end
end
|
#
# Be sure to run `pod spec lint SwiftyAlert.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "SwiftyAlert"
s.version = "1.0.0"
s.summary = "Simple UIAlertController and UIAlertView wrapper."
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
Swifty UIAlertController
DESC
s.homepage = "https://github.com/simorgh3196/SwiftyAlert"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
# s.license = "MIT (example)"
s.license = { :type => "MIT", :file => "LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "simorgh3196" => "simorgh3196@gmail.com" }
# Or just: s.author = "simorgh3196"
# s.authors = { "simorgh3196" => "email@address.com" }
# s.social_media_url = "http://twitter.com/simorgh3196"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
s.platform = :ios, "8.0"
# When using multiple platforms
# s.ios.deployment_target = "5.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/simorgh3196/SwiftyAlert.git", :tag => "v1.0.0" }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "SwiftyAlert/**/*.swift"
# s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
end
Update podspec
#
# Be sure to run `pod spec lint SwiftyAlert.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "SwiftyAlert"
s.version = "1.0.1"
s.summary = "Simple UIAlertController and UIAlertView wrapper."
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
Swifty UIAlertController
DESC
s.homepage = "https://github.com/simorgh3196/SwiftyAlert"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
# s.license = "MIT (example)"
s.license = { :type => "MIT", :file => "LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "simorgh3196" => "simorgh3196@gmail.com" }
# Or just: s.author = "simorgh3196"
# s.authors = { "simorgh3196" => "email@address.com" }
# s.social_media_url = "http://twitter.com/simorgh3196"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
s.platform = :ios, "8.0"
# When using multiple platforms
# s.ios.deployment_target = "5.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/simorgh3196/SwiftyAlert.git", :tag => "v1.0.1" }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "SwiftyAlert/**/*.swift"
# s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
end
|
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'date'
require_relative 'lib/sensu-plugins-iis'
Gem::Specification.new do |s|
s.authors = ['Sensu-Plugins and contributors']
s.date = Date.today.to_s
s.description = 'Sensu plugins for Microsoft IIS'
s.email = '<sensu-users@googlegroups.com>'
s.executables = Dir.glob('bin/**/*.rb').map { |file| File.basename(file) }
s.files = Dir.glob('{bin,lib}/**/*') + %w(LICENSE README.md CHANGELOG.md)
s.homepage = 'https://github.com/sensu-plugins/sensu-plugins-iis'
s.license = 'MIT'
s.metadata = { 'maintainer' => 'sensu-plugin',
'development_status' => 'active',
'production_status' => 'unstable - testing recommended',
'release_draft' => 'false',
'release_prerelease' => 'false' }
s.name = 'sensu-plugins-iis'
s.platform = Gem::Platform::RUBY
s.post_install_message = 'You can use the embedded Ruby by setting EMBEDDED_RUBY=true in /etc/default/sensu'
s.require_paths = ['lib']
s.required_ruby_version = '>= 2.0.0'
s.summary = 'Sensu plugins for Microsoft IIS'
s.test_files = s.files.grep(%r{^(test|spec|features)/})
s.version = SensuPluginsIIS::Version::VER_STRING
s.add_runtime_dependency 'sensu-plugin', '~> 1.2'
s.add_development_dependency 'bundler', '~> 1.7'
s.add_development_dependency 'codeclimate-test-reporter', '~> 0.4'
s.add_development_dependency 'github-markup', '~> 1.3'
s.add_development_dependency 'pry', '~> 0.10'
s.add_development_dependency 'rake', '~> 10.5'
s.add_development_dependency 'redcarpet', '~> 3.2'
s.add_development_dependency 'rubocop', '~> 0.40.0'
s.add_development_dependency 'rspec', '~> 3.4'
s.add_development_dependency 'yard', '~> 0.8'
end
Update github-markup requirement from ~> 1.3 to ~> 3.0
Updates the requirements on [github-markup](https://github.com/github/markup) to permit the latest version.
- [Release notes](https://github.com/github/markup/releases)
- [Changelog](https://github.com/github/markup/blob/master/HISTORY.md)
- [Commits](https://github.com/github/markup/commits/v3.0.1)
Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@dependabot.com>
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'date'
require_relative 'lib/sensu-plugins-iis'
Gem::Specification.new do |s|
s.authors = ['Sensu-Plugins and contributors']
s.date = Date.today.to_s
s.description = 'Sensu plugins for Microsoft IIS'
s.email = '<sensu-users@googlegroups.com>'
s.executables = Dir.glob('bin/**/*.rb').map { |file| File.basename(file) }
s.files = Dir.glob('{bin,lib}/**/*') + %w(LICENSE README.md CHANGELOG.md)
s.homepage = 'https://github.com/sensu-plugins/sensu-plugins-iis'
s.license = 'MIT'
s.metadata = { 'maintainer' => 'sensu-plugin',
'development_status' => 'active',
'production_status' => 'unstable - testing recommended',
'release_draft' => 'false',
'release_prerelease' => 'false' }
s.name = 'sensu-plugins-iis'
s.platform = Gem::Platform::RUBY
s.post_install_message = 'You can use the embedded Ruby by setting EMBEDDED_RUBY=true in /etc/default/sensu'
s.require_paths = ['lib']
s.required_ruby_version = '>= 2.0.0'
s.summary = 'Sensu plugins for Microsoft IIS'
s.test_files = s.files.grep(%r{^(test|spec|features)/})
s.version = SensuPluginsIIS::Version::VER_STRING
s.add_runtime_dependency 'sensu-plugin', '~> 1.2'
s.add_development_dependency 'bundler', '~> 1.7'
s.add_development_dependency 'codeclimate-test-reporter', '~> 0.4'
s.add_development_dependency 'github-markup', '~> 3.0'
s.add_development_dependency 'pry', '~> 0.10'
s.add_development_dependency 'rake', '~> 10.5'
s.add_development_dependency 'redcarpet', '~> 3.2'
s.add_development_dependency 'rubocop', '~> 0.40.0'
s.add_development_dependency 'rspec', '~> 3.4'
s.add_development_dependency 'yard', '~> 0.8'
end
|
# frozen_string_literal: true
lib = File.expand_path('lib', __dir__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'date'
require_relative 'lib/sensu-plugins-lvm'
Gem::Specification.new do |s| # rubocop:disable Metrics/BlockLength
s.authors = ['Sensu-Plugins and contributors']
s.date = Date.today.to_s
s.description = 'Sensu plugins for LVM'
s.email = '<sensu-users@googlegroups.com>'
s.executables = Dir.glob('bin/**/*').map { |file| File.basename(file) }
s.files = Dir.glob('{bin,lib}/**/*') + %w[LICENSE README.md CHANGELOG.md]
s.homepage = 'https://github.com/sensu-plugins/sensu-plugins-lvm'
s.license = 'MIT'
s.metadata = {
'maintainer' => 'sensu-plugin',
'development_status' => 'active',
'production_status' => 'unstable - testing recommended',
'release_draft' => 'false',
'release_prerelease' => 'false'
}
s.name = 'sensu-plugins-lvm'
s.platform = Gem::Platform::RUBY
s.post_install_message = 'You can use the embedded Ruby by setting EMBEDDED_RUBY=true in /etc/default/sensu'
s.require_paths = ['lib']
s.required_ruby_version = '>= 2.3.0'
s.summary = 'Sensu plugins for lvm'
s.test_files = s.files.grep(%r{^(test|spec|features)/})
s.version = SensuPluginsLvm::Version::VER_STRING
s.add_runtime_dependency 'chef-ruby-lvm', '~> 0.3.0'
s.add_runtime_dependency 'chef-ruby-lvm-attrib', '~> 0.2.1'
s.add_runtime_dependency 'sensu-plugin', '>= 2.5', '< 5.0'
s.add_development_dependency 'bundler', '~> 2.1'
s.add_development_dependency 'codeclimate-test-reporter', '~> 0.4'
s.add_development_dependency 'github-markup', '~> 3.0'
s.add_development_dependency 'pry', '~> 0.10'
s.add_development_dependency 'rake', '~> 12.3'
s.add_development_dependency 'redcarpet', '~> 3.2'
s.add_development_dependency 'rspec', '~> 3.4'
s.add_development_dependency 'rubocop', '~> 0.62.0'
s.add_development_dependency 'yard', '~> 0.9.11'
end
Update rake requirement from ~> 12.3 to ~> 13.0
Updates the requirements on [rake](https://github.com/ruby/rake) to permit the latest version.
- [Release notes](https://github.com/ruby/rake/releases)
- [Changelog](https://github.com/ruby/rake/blob/master/History.rdoc)
- [Commits](https://github.com/ruby/rake/compare/v12.3.0...v13.0.1)
Signed-off-by: dependabot-preview[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@dependabot.com>
# frozen_string_literal: true
lib = File.expand_path('lib', __dir__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'date'
require_relative 'lib/sensu-plugins-lvm'
Gem::Specification.new do |s| # rubocop:disable Metrics/BlockLength
s.authors = ['Sensu-Plugins and contributors']
s.date = Date.today.to_s
s.description = 'Sensu plugins for LVM'
s.email = '<sensu-users@googlegroups.com>'
s.executables = Dir.glob('bin/**/*').map { |file| File.basename(file) }
s.files = Dir.glob('{bin,lib}/**/*') + %w[LICENSE README.md CHANGELOG.md]
s.homepage = 'https://github.com/sensu-plugins/sensu-plugins-lvm'
s.license = 'MIT'
s.metadata = {
'maintainer' => 'sensu-plugin',
'development_status' => 'active',
'production_status' => 'unstable - testing recommended',
'release_draft' => 'false',
'release_prerelease' => 'false'
}
s.name = 'sensu-plugins-lvm'
s.platform = Gem::Platform::RUBY
s.post_install_message = 'You can use the embedded Ruby by setting EMBEDDED_RUBY=true in /etc/default/sensu'
s.require_paths = ['lib']
s.required_ruby_version = '>= 2.3.0'
s.summary = 'Sensu plugins for lvm'
s.test_files = s.files.grep(%r{^(test|spec|features)/})
s.version = SensuPluginsLvm::Version::VER_STRING
s.add_runtime_dependency 'chef-ruby-lvm', '~> 0.3.0'
s.add_runtime_dependency 'chef-ruby-lvm-attrib', '~> 0.2.1'
s.add_runtime_dependency 'sensu-plugin', '>= 2.5', '< 5.0'
s.add_development_dependency 'bundler', '~> 2.1'
s.add_development_dependency 'codeclimate-test-reporter', '~> 0.4'
s.add_development_dependency 'github-markup', '~> 3.0'
s.add_development_dependency 'pry', '~> 0.10'
s.add_development_dependency 'rake', '~> 13.0'
s.add_development_dependency 'redcarpet', '~> 3.2'
s.add_development_dependency 'rspec', '~> 3.4'
s.add_development_dependency 'rubocop', '~> 0.62.0'
s.add_development_dependency 'yard', '~> 0.9.11'
end
|
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'date'
if RUBY_VERSION < '2.0.0'
require 'sensu-plugins-ssl'
else
require_relative 'lib/sensu-plugins-ssl'
end
pvt_key = '~/.ssh/gem-private_key.pem'
Gem::Specification.new do |s|
s.authors = ['Sensu-Plugins and contributors']
s.cert_chain = ['certs/sensu-plugins.pem']
s.date = Date.today.to_s
s.description = 'Sensu plugins for SSL'
s.email = '<sensu-users@googlegroups.com>'
s.executables = s.files.grep(%r{^bin/}) { |f| File.basename(f) }
s.files = Dir.glob('{bin,lib}/**/*') + %w(LICENSE README.md CHANGELOG.md)
s.homepage = 'https://github.com/sensu-plugins/sensu-plugins-ssl'
s.license = 'MIT'
s.metadata = { 'maintainer' => '',
'development_status' => 'active',
'production_status' => 'unstable - testing recommended',
'release_draft' => 'false',
'release_prerelease' => 'false'
}
s.name = 'sensu-plugins-ssl'
s.platform = Gem::Platform::RUBY
s.post_install_message = 'You can use the embedded Ruby by setting EMBEDDED_RUBY=true in /etc/default/sensu'
s.require_paths = ['lib']
s.required_ruby_version = '>= 1.9.3'
s.signing_key = File.expand_path(pvt_key) if $PROGRAM_NAME =~ /gem\z/
s.summary = 'Sensu plugins for SSL'
s.test_files = s.files.grep(%r{^(test|spec|features)/})
s.version = SensuPluginsSSL::Version::VER_STRING
s.add_runtime_dependency 'sensu-plugin', '1.1.0'
s.add_runtime_dependency 'openssl', '1.0.0.beta'
s.add_development_dependency 'codeclimate-test-reporter', '~> 0.4'
s.add_development_dependency 'rubocop', '~> 0.30'
s.add_development_dependency 'rspec', '~> 3.1'
s.add_development_dependency 'bundler', '~> 1.7'
s.add_development_dependency 'rake', '~> 10.0'
s.add_development_dependency 'github-markup', '~> 1.3'
s.add_development_dependency 'redcarpet', '~> 3.2'
s.add_development_dependency 'yard', '~> 0.8'
s.add_development_dependency 'pry', '~> 0.10'
end
Add base maintainer
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'date'
if RUBY_VERSION < '2.0.0'
require 'sensu-plugins-ssl'
else
require_relative 'lib/sensu-plugins-ssl'
end
pvt_key = '~/.ssh/gem-private_key.pem'
Gem::Specification.new do |s|
s.authors = ['Sensu-Plugins and contributors']
s.cert_chain = ['certs/sensu-plugins.pem']
s.date = Date.today.to_s
s.description = 'Sensu plugins for SSL'
s.email = '<sensu-users@googlegroups.com>'
s.executables = s.files.grep(%r{^bin/}) { |f| File.basename(f) }
s.files = Dir.glob('{bin,lib}/**/*') + %w(LICENSE README.md CHANGELOG.md)
s.homepage = 'https://github.com/sensu-plugins/sensu-plugins-ssl'
s.license = 'MIT'
s.metadata = { 'maintainer' => 'sensu-plugin',
'development_status' => 'active',
'production_status' => 'unstable - testing recommended',
'release_draft' => 'false',
'release_prerelease' => 'false'
}
s.name = 'sensu-plugins-ssl'
s.platform = Gem::Platform::RUBY
s.post_install_message = 'You can use the embedded Ruby by setting EMBEDDED_RUBY=true in /etc/default/sensu'
s.require_paths = ['lib']
s.required_ruby_version = '>= 1.9.3'
s.signing_key = File.expand_path(pvt_key) if $PROGRAM_NAME =~ /gem\z/
s.summary = 'Sensu plugins for SSL'
s.test_files = s.files.grep(%r{^(test|spec|features)/})
s.version = SensuPluginsSSL::Version::VER_STRING
s.add_runtime_dependency 'sensu-plugin', '1.1.0'
s.add_runtime_dependency 'openssl', '1.0.0.beta'
s.add_development_dependency 'codeclimate-test-reporter', '~> 0.4'
s.add_development_dependency 'rubocop', '~> 0.30'
s.add_development_dependency 'rspec', '~> 3.1'
s.add_development_dependency 'bundler', '~> 1.7'
s.add_development_dependency 'rake', '~> 10.0'
s.add_development_dependency 'github-markup', '~> 1.3'
s.add_development_dependency 'redcarpet', '~> 3.2'
s.add_development_dependency 'yard', '~> 0.8'
s.add_development_dependency 'pry', '~> 0.10'
end
|
Pod::Spec.new do |s|
s.name = "MCChildrenNavigationController"
s.version = "0.0.1"
s.summary = "MCChildrenNavigationController displays tree structures in a navigation interface"
s.description = <<-DESC
MCChildrenNavigationController displays tree structures in a navigation interface
DESC
s.homepage = "https://github.com/cabeca/MCChildrenNavigationController"
s.license = 'MIT'
s.author = { "Miguel Cabeça" => "miguel.cabeca@gmail.com" }
s.platform = :ios, '7.0'
s.source = { :git => "https://github.com/cabeca/MCChildrenNavigationController.git", :tag => "0.0.1" }
s.source_files = 'MCChildrenNavigationController/lib/*.{h,m}'
s.requires_arc = true
end
Bump version
Pod::Spec.new do |s|
s.name = "MCChildrenNavigationController"
s.version = "0.0.2"
s.summary = "MCChildrenNavigationController displays tree structures in a navigation interface"
s.description = <<-DESC
MCChildrenNavigationController displays tree structures in a navigation interface
DESC
s.homepage = "https://github.com/cabeca/MCChildrenNavigationController"
s.license = 'MIT'
s.author = { "Miguel Cabeça" => "miguel.cabeca@gmail.com" }
s.platform = :ios, '7.0'
s.source = { :git => "https://github.com/cabeca/MCChildrenNavigationController.git", :tag => "0.0.2" }
s.source_files = 'MCChildrenNavigationController/lib/*.{h,m}'
s.requires_arc = true
end
|
class Iqtree < Formula
desc "Efficient phylogenomic software by maximum likelihood"
homepage "http://www.iqtree.org/"
# doi "10.1093/molbev/mst024", "10.1093/molbev/msu300", "10.1093/sysbio/syw037"
# tag "bioinformatics"
url "https://github.com/Cibiv/IQ-TREE/archive/v1.6.0.tar.gz"
sha256 "283e5f251652fdc8375f0b8eb28cb1b1866f764e2286bae6fe2b53f9d39adebe"
bottle do
sha256 "dbe34a68aacc7e76c5312a8141916c922ac5d77fda89d029f44fa372cb8341cf" => :high_sierra
sha256 "2cfbe86ef8a2f7da60ef237c034d69805c84b9de7b07468409fe33c9e52efddf" => :sierra
sha256 "a1453d22e3fa9bad8b468aade7378346b94c80942f43cf1870faf612fd9f00b5" => :el_capitan
end
needs :openmp
depends_on "cmake" => :build
depends_on "eigen"
depends_on "zlib" unless OS.mac?
def install
if OS.mac?
inreplace "CMakeLists.txt",
"${CMAKE_EXE_LINKER_FLAGS_RELEASE} -Wl,--gc-sections",
"${CMAKE_EXE_LINKER_FLAGS_RELEASE}"
end
mkdir "build" do
system "cmake", "..", "-DIQTREE_FLAGS=omp", *std_cmake_args
system "make"
end
bin.install "build/iqtree"
end
test do
assert_match "boot", shell_output("#{bin}/iqtree 2>&1")
end
end
iqtree: update 1.6.0 bottle for Linuxbrew.
class Iqtree < Formula
desc "Efficient phylogenomic software by maximum likelihood"
homepage "http://www.iqtree.org/"
# doi "10.1093/molbev/mst024", "10.1093/molbev/msu300", "10.1093/sysbio/syw037"
# tag "bioinformatics"
url "https://github.com/Cibiv/IQ-TREE/archive/v1.6.0.tar.gz"
sha256 "283e5f251652fdc8375f0b8eb28cb1b1866f764e2286bae6fe2b53f9d39adebe"
bottle do
sha256 "dbe34a68aacc7e76c5312a8141916c922ac5d77fda89d029f44fa372cb8341cf" => :high_sierra
sha256 "2cfbe86ef8a2f7da60ef237c034d69805c84b9de7b07468409fe33c9e52efddf" => :sierra
sha256 "a1453d22e3fa9bad8b468aade7378346b94c80942f43cf1870faf612fd9f00b5" => :el_capitan
sha256 "4cd70da7365a35bd65e382b04f1cf89620863e507519a7de94e7b8f2c36974b3" => :x86_64_linux
end
needs :openmp
depends_on "cmake" => :build
depends_on "eigen"
depends_on "zlib" unless OS.mac?
def install
if OS.mac?
inreplace "CMakeLists.txt",
"${CMAKE_EXE_LINKER_FLAGS_RELEASE} -Wl,--gc-sections",
"${CMAKE_EXE_LINKER_FLAGS_RELEASE}"
end
mkdir "build" do
system "cmake", "..", "-DIQTREE_FLAGS=omp", *std_cmake_args
system "make"
end
bin.install "build/iqtree"
end
test do
assert_match "boot", shell_output("#{bin}/iqtree 2>&1")
end
end
|
class OodAppGroup
attr_accessor :apps, :title, :subtitle
def initialize
@apps = []
end
def has_apps?
apps.count > 0
end
# TODO: write lots of tests so we can refactor this well...
def self.usr_groups(owners)
# TODO: this gets much more complex when we have apps specifying
# which groups they should be filtered under
Array(owners).map do |o|
g = OodAppGroup.new
g.title = o
g.subtitle = o
g.apps = UsrRouter.apps(owner: o)
g
end.select(&:has_apps?)
end
end
use gecos for app group title if avail
class OodAppGroup
attr_accessor :apps, :title, :subtitle
def initialize
@apps = []
end
def has_apps?
apps.count > 0
end
# Givin a list of owners, we will build a list of AppGroups
# where each app group is a list of apps that user has shared
def self.usr_groups(owners)
Array(owners).map do |o|
g = OodAppGroup.new
g.title = (Etc.getpwnam(o).gecos || o)
g.subtitle = o
g.apps = UsrRouter.apps(owner: o)
g
end.select(&:has_apps?)
end
end
|
Gem::Specification.new do |s|
s.name = 'sequel-bit_fields'
s.version = '0.8.0'
s.date = '2013-02-19'
s.summary = "A Sequel compatible version of common bit field technique."
s.description = "Use this plugin to add flags/bits to your Sequel based models."
s.authors = ["Sascha Depold"]
s.email = 'sascha@depold.com'
s.files = ["lib/sequel/plugins/bit_fields.rb"]
s.homepage = 'https://github.com/sdepold/sequel-bit_fields'
s.license = "MIT"
s.add_runtime_dependency "sequel"
end
0.9.0
Gem::Specification.new do |s|
s.name = 'sequel-bit_fields'
s.version = '0.9.0'
s.date = '2013-02-21'
s.summary = "A Sequel compatible version of common bit field technique."
s.description = "Use this plugin to add flags/bits to your Sequel based models."
s.authors = ["Sascha Depold"]
s.email = 'sascha@depold.com'
s.files = ["lib/sequel/plugins/bit_fields.rb"]
s.homepage = 'https://github.com/sdepold/sequel-bit_fields'
s.license = "MIT"
s.add_runtime_dependency "sequel"
end
|
Pod::Spec.new do |s|
s.name = 'MSNavigationPaneViewController'
s.version = '0.2.0'
s.license = 'MIT'
s.platform = :ios, '5.0'
s.summary = 'Draggable navigation pane view controller for iPhone. Similar to the Path/Facebook navigation paradigm.'
s.homepage = 'https://github.com/monospacecollective/MSNavigationPaneViewController'
s.author = { 'Eric Horacek' => 'eric@monospacecollective.com' }
s.source = { :git => 'https://github.com/monospacecollective/MSNavigationPaneViewController.git', :tag => s.version.to_s }
s.source_files = 'MSNavigationPaneViewController/*.{h,m}'
s.requires_arc = true
s.frameworks = 'QuartzCore'
s.dependency 'PRTween', '~> 0.0.1'
end
Removes PRTween from podspec
Pod::Spec.new do |s|
s.name = 'MSNavigationPaneViewController'
s.version = '0.2.0'
s.license = 'MIT'
s.platform = :ios, '5.0'
s.summary = 'Draggable navigation pane view controller for iPhone. Similar to the Path/Facebook navigation paradigm.'
s.homepage = 'https://github.com/monospacecollective/MSNavigationPaneViewController'
s.author = { 'Eric Horacek' => 'eric@monospacecollective.com' }
s.source = { :git => 'https://github.com/monospacecollective/MSNavigationPaneViewController.git', :tag => s.version.to_s }
s.source_files = 'MSNavigationPaneViewController/*.{h,m}'
s.requires_arc = true
s.frameworks = 'QuartzCore'
end
|
module Commands
module V2
class Import < BaseCommand
def call
unless UuidValidator.valid?(payload[:content_id])
raise CommandError.new(
code: 422,
error_details: {
error: {
code: 422,
message: "Content id not valid",
fields: "content_id",
}
})
end
ContentItem.transaction do
delete_all(payload[:content_id])
all_content_items.map.with_index do |event, index|
create_content_item(event, index, payload[:content_id])
end
end
send_downstream(all_content_items.find { |e| e[:action] == 'Publish' })
Success.new(content_id: payload[:content_id])
end
private
def all_content_items
@all_content_items ||= [redirects.compact + payload[:content_items]].flatten
end
def send_downstream(content)
return unless content
DownstreamLiveWorker.perform_async_in_queue(
DownstreamLiveWorker::LOW_QUEUE,
content_id: content[:content_id],
locale: 'en',
message_queue_update_type: content[:payload][:update_type],
payload_version: 1 #event.id
)
end
def create_content_item(event, index, content_id)
event_payload = event[:payload].slice(*attributes).merge(content_id: content_id)
Services::CreateContentItem.new(
payload: event_payload,
user_facing_version: index + 1,
lock_version: index + 1,
state: state(event)
).create_content_item
end
def attributes
@attributes ||= ContentItem.new.attributes.keys.map(&:to_sym) << :base_path
end
def state(event)
event[:payload][:state] || 'superseded'
end
def redirects
return if base_paths_and_routes.count == 1
base_paths_and_routes.map.with_index do |(base_path, routes), index|
new_base_path = base_paths_and_routes[index + 1]
next unless new_base_path
{
payload: RedirectHelper.create_redirect(
publishing_app: publishing_app,
old_base_path: base_path,
new_base_path: new_base_path.first,
routes: routes,
options: { skip_put_content: true }, callbacks: nil,
)
}
end
end
def publishing_app
@payload[:content_items].map { |e| e[:payload][:publishing_app] }.last
end
def base_paths_and_routes
@base_paths ||= payload[:content_items].map { |e| [e[:payload][:base_path], e[:payload][:routes]] }.uniq
end
def delete_all(content_id)
Services::DeleteContentItem.destroy_content_items_with_links(content_id)
end
end
end
end
Return empty array if one base_path was provided
This stops the call method failing due to a Nil error.
module Commands
module V2
class Import < BaseCommand
def call
unless UuidValidator.valid?(payload[:content_id])
raise CommandError.new(
code: 422,
error_details: {
error: {
code: 422,
message: "Content id not valid",
fields: "content_id",
}
})
end
ContentItem.transaction do
delete_all(payload[:content_id])
all_content_items.map.with_index do |event, index|
create_content_item(event, index, payload[:content_id])
end
end
send_downstream(all_content_items.find { |e| e[:action] == 'Publish' })
Success.new(content_id: payload[:content_id])
end
private
def all_content_items
@all_content_items ||= [redirects.compact + payload[:content_items]].flatten
end
def send_downstream(content)
return unless content
DownstreamLiveWorker.perform_async_in_queue(
DownstreamLiveWorker::LOW_QUEUE,
content_id: content[:content_id],
locale: 'en',
message_queue_update_type: content[:payload][:update_type],
payload_version: 1 #event.id
)
end
def create_content_item(event, index, content_id)
event_payload = event[:payload].slice(*attributes).merge(content_id: content_id)
Services::CreateContentItem.new(
payload: event_payload,
user_facing_version: index + 1,
lock_version: index + 1,
state: state(event)
).create_content_item
end
def attributes
@attributes ||= ContentItem.new.attributes.keys.map(&:to_sym) << :base_path
end
def state(event)
event[:payload][:state] || 'superseded'
end
def redirects
return [] if base_paths_and_routes.count == 1
base_paths_and_routes.map.with_index do |(base_path, routes), index|
new_base_path = base_paths_and_routes[index + 1]
next unless new_base_path
{
payload: RedirectHelper.create_redirect(
publishing_app: publishing_app,
old_base_path: base_path,
new_base_path: new_base_path.first,
routes: routes,
options: { skip_put_content: true }, callbacks: nil,
)
}
end
end
def publishing_app
@payload[:content_items].map { |e| e[:payload][:publishing_app] }.last
end
def base_paths_and_routes
@base_paths ||= payload[:content_items].map { |e| [e[:payload][:base_path], e[:payload][:routes]] }.uniq
end
def delete_all(content_id)
Services::DeleteContentItem.destroy_content_items_with_links(content_id)
end
end
end
end
|
class Browse < Application
before :get_centers_and_template
before :display_from_cache, :only => [:hq_tab]
after :store_to_cache, :only => [:hq_tab]
Line = Struct.new(:ip, :date_time, :method, :model, :url, :status, :response_time)
def index
render :template => @template
end
def branches
redirect resource(:branches)
end
def centers
if session.user.role == :staff_member
@centers = Center.all(:manager => session.user.staff_member, :order => [:meeting_day]).paginate(:per_page => 15, :page => params[:page] || 1)
else
@centers = Center.all.paginate(:per_page => 15, :page => params[:page] || 1)
end
@branch = @centers.branch[0]
render :template => 'centers/index'
end
def hq_tab
partial :totalinfo
end
def centers_paying_today
@date = params[:date] ? Date.parse(params[:date]) : Date.today
hash = {:date => @date}
hash = {:branch_id => params[:branch_id]} if params[:branch_id] and not params[:branch_id].blank?
center_ids = LoanHistory.all(hash).aggregate(:center_id)
loans = LoanHistory.all(hash).aggregate(:loan_id, :center_id).to_hash
# restrict branch manager and center managers to their own branches
if user = session.user and staff = user.staff_member
hash[:branch_id] = [staff.related_branches.map{|x| x.id}, staff.centers.branches.map{|x| x.id}].uniq
center_ids = staff.related_centers.map{|x| x.id} & center_ids
end
if Mfi.first.map_enabled
@locations = Location.all(:parent_id => center_ids, :parent_type => "center").group_by{|x| x.parent_id}
end
@fees_due, @fees_paid, @fees_overdue = Hash.new(0), Hash.new(0), Hash.new(0)
Fee.applicable(loans.keys, {:date => @date}).each{|fa|
@fees_due[loans[fa.loan_id]] += fa.fees_applicable
}
Payment.all(:type => :fees, "client.center_id" => center_ids, :received_on.lt => @date).aggregate(:loan_id, :amount.sum).each{|fp|
@fees_due[loans[fp[0]]] -= fp[1]
}
Payment.all(:type => :fees, :received_on => @date, "client.center_id" => center_ids).aggregate(:loan_id, :amount.sum).each{|fp|
@fees_paid[loans[fp[0]]] += fp[1]
}
@disbursals = {}
@disbursals[:scheduled] = LoanHistory.all("loan.scheduled_disbursal_date" => @date, :date => @date).aggregate(:center_id, :scheduled_outstanding_principal.sum).to_hash
@disbursals[:actual] = LoanHistory.all("loan.scheduled_disbursal_date" => @date, :date => @date,
:status => [:disbursed]).aggregate(:center_id, :scheduled_outstanding_principal.sum).to_hash
# caclulating old outstanding for loans, paying today, as of last payment date
old_outstanding = {}
LoanHistory.sum_outstanding_grouped_by(@date - 1, [:loan], {:center_id => center_ids}).group_by{|x| old_outstanding[x.loan_id] = x}
# calculating outstanding for loans, paying today, as of today
new_outstanding = LoanHistory.sum_outstanding_grouped_by(@date, [:loan], {:center_id => center_ids}, [:branch, :center, :principal_due,
:interest_due, :principal_paid, :interest_paid]).group_by{|x|
x.branch_id
}.map{|branch_id, centers|
{branch_id => centers.group_by{|loan| loan.center_id}}
}.reduce({}){|s,x| s+=x}
@centers = Center.all(:id => center_ids)
@branches = @centers.branches.map{|b| [b.id, b.name]}.to_hash
@centers = @centers.map{|c| [c.id, c]}.to_hash
#get payments done on @date in format of {<loan_id> => [<principal>, <interest>]}
@payments = LoanHistory.all(:date => @date, :center_id => center_ids).aggregate(:loan_id, :principal_paid.sum, :interest_paid.sum).group_by{|x|
x[0]
}
#advance balance
new_advance_balances = LoanHistory.advance_balance(@date, [:center], {:center_id => center_ids}).group_by{|x| x.center_id}
old_advance_balances = LoanHistory.advance_balance(@date - 1, [:center], {:center_id => center_ids}).group_by{|x| x.center_id}
# fill out @data with {branch => {center => row}}
@data = {}
new_outstanding.each{|branch_id, centers|
@data[branch_id] ||= {}
centers.each{|center_id, loans|
@data[branch_id][center_id] ||= Array.new(11, 0)
loans.each{|loan|
if old_outstanding.key?(loan.loan_id)
# scheduled due
@data[branch_id][center_id][2] += old_outstanding[loan.loan_id].actual_outstanding_principal - loan.scheduled_outstanding_principal
@data[branch_id][center_id][3] += old_outstanding[loan.loan_id].actual_outstanding_total - old_outstanding[loan.loan_id].actual_outstanding_principal - loan.scheduled_outstanding_total + loan.scheduled_outstanding_principal
#payments
if @payments.key?(loan.loan_id)
@data[branch_id][center_id][4] += @payments[loan.loan_id][0][1]
@data[branch_id][center_id][5] += @payments[loan.loan_id][0][2]
end
# overdue
@data[branch_id][center_id][6] += loan.actual_outstanding_principal - loan.scheduled_outstanding_principal if loan.actual_outstanding_principal > loan.scheduled_outstanding_principal
@data[branch_id][center_id][7] += loan.actual_outstanding_total - loan.scheduled_outstanding_total if loan.actual_outstanding_total > loan.scheduled_outstanding_total
#advance collected
@data[branch_id][center_id][8] += (-1 * loan.principal_due) if loan.principal_due < 0 and loan.principal_paid > 0
@data[branch_id][center_id][8] += (-1 * loan.interest_due) if loan.interest_due < 0 and loan.interest_paid > 0
end
}
collected = @data[branch_id][center_id][8]
#advance balance
new_balance = 0
new_balance = new_advance_balances[center_id][0].balance_total if new_advance_balances[center_id]
@data[branch_id][center_id][10] += new_balance
# adjusted
old_balance = old_advance_balances[center_id] ? old_advance_balances[center_id][0].balance_total : 0
@data[branch_id][center_id][9] += old_balance + collected - new_balance
}
}
render :template => 'dashboard/today'
end
# method to parse log file and show activity.
def show_log
@@models ||= DataMapper::Model.descendants.map{|d| [d.to_s.snake_case.pluralize, d]}.to_hash
@@not_reported_controllers ||= ["merb_auth_slice_password/sessions", "exceptions", "entrance", "login", "searches"]
@lines = []
ignore_regex = /\/images|\/javascripts|\/stylesheets|\/open-flash-chart|\/searches|\/dashboard|\/graph_data|\/browse/
`tail -500 log/#{Merb.env}.log`.split(/\n/).reverse.each{|line|
next if ignore_regex.match(line)
ip, date_time, timezone, method, uri, http_type, status, size, response_time = line.strip.gsub(/(\s\-\s)|\[|\]|\"/, "").split(/\s/).reject{|x| x==""}
uri = URI.parse(uri)
method = method.to_s.upcase || "GET"
request = Merb::Request.new(
Merb::Const::REQUEST_PATH => uri.path,
Merb::Const::REQUEST_METHOD => method,
Merb::Const::QUERY_STRING => uri.query ? CGI.unescape(uri.query) : "")
route = Merb::Router.match(request)[1] rescue nil
route.merge!(uri.query.split("&").map{|x| x.split("=")}.to_hash) if uri.query
next if not route[:controller] or @@not_reported_controllers.include?(route[:controller])
model = @@models[route[:controller]] if @@models.key?(route[:controller])
@lines.push(Line.new(ip, date_time, method.downcase.to_sym, model, route, status.to_i, response_time.split(/\//)[0]))
}
render
end
private
def get_centers_and_template
if session.user.staff_member
@staff ||= session.user.staff_member
if branch = Branch.all(:manager => @staff)
true
else
@centers = Center.all(:manager => @staff)
@template = 'browse/for_staff_member'
end
end
end
end
Fixed day sheet when no data
class Browse < Application
before :get_centers_and_template
before :display_from_cache, :only => [:hq_tab]
after :store_to_cache, :only => [:hq_tab]
Line = Struct.new(:ip, :date_time, :method, :model, :url, :status, :response_time)
def index
render :template => @template
end
def branches
redirect resource(:branches)
end
def centers
if session.user.role == :staff_member
@centers = Center.all(:manager => session.user.staff_member, :order => [:meeting_day]).paginate(:per_page => 15, :page => params[:page] || 1)
else
@centers = Center.all.paginate(:per_page => 15, :page => params[:page] || 1)
end
@branch = @centers.branch[0]
render :template => 'centers/index'
end
def hq_tab
partial :totalinfo
end
def centers_paying_today
@date = params[:date] ? Date.parse(params[:date]) : Date.today
hash = {:date => @date}
hash = {:branch_id => params[:branch_id]} if params[:branch_id] and not params[:branch_id].blank?
center_ids = LoanHistory.all(hash).aggregate(:center_id)
loans = LoanHistory.all(hash).aggregate(:loan_id, :center_id).to_hash
# restrict branch manager and center managers to their own branches
if user = session.user and staff = user.staff_member
hash[:branch_id] = [staff.related_branches.map{|x| x.id}, staff.centers.branches.map{|x| x.id}].uniq
center_ids = staff.related_centers.map{|x| x.id} & center_ids
end
if Mfi.first.map_enabled
@locations = Location.all(:parent_id => center_ids, :parent_type => "center").group_by{|x| x.parent_id}
end
@fees_due, @fees_paid, @fees_overdue = Hash.new(0), Hash.new(0), Hash.new(0)
Fee.applicable(loans.keys, {:date => @date}).each{|fa|
@fees_due[loans[fa.loan_id]] += fa.fees_applicable
}
Payment.all(:type => :fees, "client.center_id" => center_ids, :received_on.lt => @date).aggregate(:loan_id, :amount.sum).each{|fp|
@fees_due[loans[fp[0]]] -= fp[1]
} if center_ids.length>0
Payment.all(:type => :fees, :received_on => @date, "client.center_id" => center_ids).aggregate(:loan_id, :amount.sum).each{|fp|
@fees_paid[loans[fp[0]]] += fp[1]
} if center_ids.length>0
@disbursals = {}
@disbursals[:scheduled] = LoanHistory.all("loan.scheduled_disbursal_date" => @date, :date => @date).aggregate(:center_id, :scheduled_outstanding_principal.sum).to_hash
@disbursals[:actual] = LoanHistory.all("loan.scheduled_disbursal_date" => @date, :date => @date,
:status => [:disbursed]).aggregate(:center_id, :scheduled_outstanding_principal.sum).to_hash
# caclulating old outstanding for loans, paying today, as of last payment date
old_outstanding = {}
LoanHistory.sum_outstanding_grouped_by(@date - 1, [:loan], {:center_id => center_ids}).group_by{|x| old_outstanding[x.loan_id] = x}
# calculating outstanding for loans, paying today, as of today
new_outstanding = LoanHistory.sum_outstanding_grouped_by(@date, [:loan], {:center_id => center_ids}, [:branch, :center, :principal_due,
:interest_due, :principal_paid, :interest_paid]).group_by{|x|
x.branch_id
}.map{|branch_id, centers|
{branch_id => centers.group_by{|loan| loan.center_id}}
}.reduce({}){|s,x| s+=x}
@centers = Center.all(:id => center_ids)
@branches = @centers.branches.map{|b| [b.id, b.name]}.to_hash
@centers = @centers.map{|c| [c.id, c]}.to_hash
#get payments done on @date in format of {<loan_id> => [<principal>, <interest>]}
@payments = LoanHistory.all(:date => @date, :center_id => center_ids).aggregate(:loan_id, :principal_paid.sum, :interest_paid.sum).group_by{|x|
x[0]
}
#advance balance
new_advance_balances = LoanHistory.advance_balance(@date, [:center], {:center_id => center_ids}).group_by{|x| x.center_id}
old_advance_balances = LoanHistory.advance_balance(@date - 1, [:center], {:center_id => center_ids}).group_by{|x| x.center_id}
# fill out @data with {branch => {center => row}}
@data = {}
new_outstanding.each{|branch_id, centers|
@data[branch_id] ||= {}
centers.each{|center_id, loans|
@data[branch_id][center_id] ||= Array.new(11, 0)
loans.each{|loan|
if old_outstanding.key?(loan.loan_id)
# scheduled due
@data[branch_id][center_id][2] += old_outstanding[loan.loan_id].actual_outstanding_principal - loan.scheduled_outstanding_principal
@data[branch_id][center_id][3] += old_outstanding[loan.loan_id].actual_outstanding_total - old_outstanding[loan.loan_id].actual_outstanding_principal - loan.scheduled_outstanding_total + loan.scheduled_outstanding_principal
#payments
if @payments.key?(loan.loan_id)
@data[branch_id][center_id][4] += @payments[loan.loan_id][0][1]
@data[branch_id][center_id][5] += @payments[loan.loan_id][0][2]
end
# overdue
@data[branch_id][center_id][6] += loan.actual_outstanding_principal - loan.scheduled_outstanding_principal if loan.actual_outstanding_principal > loan.scheduled_outstanding_principal
@data[branch_id][center_id][7] += loan.actual_outstanding_total - loan.scheduled_outstanding_total if loan.actual_outstanding_total > loan.scheduled_outstanding_total
#advance collected
@data[branch_id][center_id][8] += (-1 * loan.principal_due) if loan.principal_due < 0 and loan.principal_paid > 0
@data[branch_id][center_id][8] += (-1 * loan.interest_due) if loan.interest_due < 0 and loan.interest_paid > 0
end
}
collected = @data[branch_id][center_id][8]
#advance balance
new_balance = 0
new_balance = new_advance_balances[center_id][0].balance_total if new_advance_balances[center_id]
@data[branch_id][center_id][10] += new_balance
# adjusted
old_balance = old_advance_balances[center_id] ? old_advance_balances[center_id][0].balance_total : 0
@data[branch_id][center_id][9] += old_balance + collected - new_balance
}
}
render :template => 'dashboard/today'
end
# method to parse log file and show activity.
def show_log
@@models ||= DataMapper::Model.descendants.map{|d| [d.to_s.snake_case.pluralize, d]}.to_hash
@@not_reported_controllers ||= ["merb_auth_slice_password/sessions", "exceptions", "entrance", "login", "searches"]
@lines = []
ignore_regex = /\/images|\/javascripts|\/stylesheets|\/open-flash-chart|\/searches|\/dashboard|\/graph_data|\/browse/
`tail -500 log/#{Merb.env}.log`.split(/\n/).reverse.each{|line|
next if ignore_regex.match(line)
ip, date_time, timezone, method, uri, http_type, status, size, response_time = line.strip.gsub(/(\s\-\s)|\[|\]|\"/, "").split(/\s/).reject{|x| x==""}
uri = URI.parse(uri)
method = method.to_s.upcase || "GET"
request = Merb::Request.new(
Merb::Const::REQUEST_PATH => uri.path,
Merb::Const::REQUEST_METHOD => method,
Merb::Const::QUERY_STRING => uri.query ? CGI.unescape(uri.query) : "")
route = Merb::Router.match(request)[1] rescue nil
route.merge!(uri.query.split("&").map{|x| x.split("=")}.to_hash) if uri.query
next if not route[:controller] or @@not_reported_controllers.include?(route[:controller])
model = @@models[route[:controller]] if @@models.key?(route[:controller])
@lines.push(Line.new(ip, date_time, method.downcase.to_sym, model, route, status.to_i, response_time.split(/\//)[0]))
}
render
end
private
def get_centers_and_template
if session.user.staff_member
@staff ||= session.user.staff_member
if branch = Branch.all(:manager => @staff)
true
else
@centers = Center.all(:manager => @staff)
@template = 'browse/for_staff_member'
end
end
end
end
|
get '/surveys' do
@all_surveys = Survey.all
erb :"survey/index"
end
get '/surveys/new' do
erb :"survey/new"
end
get '/surveys/:id' do
@the_survey = Survey.find_by(:id => params[:id])
erb :"survey/show"
end
post '/surveys' do
@new_survey = Survey.new(:title => params[:title], :description => params[:description])
if @new_survey.save
redirect "/surveys/#{@new_survey.id}"
else
[404, "This survey couldn't be added. Sorry!"]
end
end
working create route for surveys
get '/surveys' do
@all_surveys = Survey.all
erb :"survey/index"
end
get '/surveys/new' do
erb :"survey/new"
end
get '/surveys/:id' do
@the_survey = Survey.find_by(:id => params[:id])
erb :"survey/show"
end
post '/surveys' do
@new_survey = Survey.new(:title => params[:title], :description => params[:description], :creator_id => session[:user_id])
if @new_survey.save
redirect "/surveys/#{@new_survey.id}"
else
[404, "This survey couldn't be added. Sorry!"]
end
end
|
require 'premailer'
class GitCommitNotifier::Emailer
DEFAULT_STYLESHEET_PATH = File.join(File.dirname(__FILE__), '/../../template/styles.css').freeze
TEMPLATE = File.join(File.dirname(__FILE__), '/../../template/email.html.erb').freeze
PARAMETERS = %w[project_path recipient from_address from_alias subject text_message html_message ref_name old_rev new_rev].freeze
def config
@@config
end
def initialize(config, options = {})
@@config = config || {}
PARAMETERS.each do |name|
instance_variable_set("@#{name}".to_sym, options[name.to_sym])
end
end
class << self
def reset_template
@template = nil
end
def template_source
template_file = @@config['custom_template'] || TEMPLATE
IO.read(template_file)
end
def template
unless @template
source = template_source
begin
require 'erubis'
@template = Erubis::Eruby.new(source)
rescue LoadError
require 'erb'
@template = ERB.new(source)
end
end
@template
end
end
def mail_html_message
html = GitCommitNotifier::Emailer.template.result(binding)
if config['expand_css'].nil? || config['expand_css']
premailer = Premailer.new(html, :with_html_string => true, :adapter => :nokogiri)
html = premailer.to_inline_css
end
html
end
def boundary
return @boundary if @boundary
srand
seed = "#{rand(10000)}#{Time.now}"
@boundary = Digest::SHA1.hexdigest(seed)
end
def stylesheet_string
stylesheet = config['stylesheet'] || DEFAULT_STYLESHEET_PATH
IO.read(stylesheet)
end
def perform_delivery_debug(content)
content.each do |line|
puts line
end
end
def perform_delivery_smtp(content, smtp_settings)
settings = { }
%w(address port domain user_name password authentication enable_tls).each do |key|
val = smtp_settings[key].to_s.empty? ? nil : smtp_settings[key]
settings.merge!({ key => val})
end
main_smtp = Net::SMTP.new settings['address'], settings['port']
main_smtp.enable_starttls if settings['enable_tls']
main_smtp.start( settings['domain'],
settings['user_name'], settings['password'], settings['authentication']) do |smtp|
recipients = @recipient.dup
recipients.force_encoding('ASCII-8BIT') if recipients.respond_to?(:force_encoding)
recipients = recipients.split(",")
smtp.open_message_stream(@from_address, recipients) do |f|
content.each do |line|
line.force_encoding('ASCII-8BIT') if line.respond_to?(:force_encoding)
f.puts line
end
end
end
end
def perform_delivery_sendmail(content, options = nil)
sendmail_settings = {
'location' => "/usr/sbin/sendmail",
'arguments' => "-i -t"
}.merge(options || {})
command = "#{sendmail_settings['location']} #{sendmail_settings['arguments']}"
IO.popen(command, "w+") do |f|
f.write(content.join("\n"))
f.flush
end
end
def perform_delivery_nntp(content, nntp_settings)
require 'nntp'
Net::NNTP.start(nntp_settings['address'], nntp_settings['port']) do |nntp|
nntp.post content
end
end
def send
to_tag = config['delivery_method'] == 'nntp' ? 'Newsgroups' : 'To'
quoted_from_alias = quote_if_necessary("#{@from_alias}",'utf-8')
from = @from_alias.empty? ? @from_address : "#{quoted_from_alias} <#{@from_address}>"
plaintext = if config['add_plaintext'].nil? || config['add_plaintext']
@text_message
else
"Plain text part omitted. Consider setting add_plaintext in configuration."
end
content = [
"From: #{from}",
"#{to_tag}: #{quote_if_necessary(@recipient, 'utf-8')}",
"Subject: #{quote_if_necessary(@subject, 'utf-8')}",
"X-Mailer: git-commit-notifier",
"X-Git-Refname: #{@ref_name}",
"X-Git-Oldrev: #{@old_rev}",
"X-Git-Newrev: #{@new_rev}",
"Mime-Version: 1.0",
"Content-Type: multipart/alternative; boundary=#{boundary}\n\n\n",
"--#{boundary}",
"Content-Type: text/plain; charset=utf-8",
"Content-Transfer-Encoding: quoted-printable",
"Content-Disposition: inline\n\n\n",
encode_quoted_printable_message(plaintext),
"\n--#{boundary}",
"Content-Type: text/html; charset=utf-8",
"Content-Transfer-Encoding: quoted-printable",
"Content-Disposition: inline\n\n\n",
encode_quoted_printable_message(mail_html_message),
"--#{boundary}--"]
if @recipient.empty?
puts content.join("\n")
return
end
case config['delivery_method'].to_sym
when :smtp then perform_delivery_smtp(content, config['smtp_server'])
when :nntp then perform_delivery_nntp(content, config['nntp_settings'])
when :debug then perform_delivery_debug(content)
else # sendmail
perform_delivery_sendmail(content, config['sendmail_options'])
end
end
# Convert a message into quoted printable encoding,
# limiting line length to 76 characters per spec
# Encoding messages in this way ensures that they
# won't violate rules for maximum line length, which
# can result in the MTA breaking lines at inconvenient points,
# such as in the middle of UTF8 characters.
def encode_quoted_printable_message(text)
StringIO.open("", "w") do |output|
# Character encoding of output string can be plain US-ASCII since quoted-printable is plain ASCII
output.string.force_encoding("US-ASCII") if output.string.respond_to?(:force_encoding)
line_max = 76
line_len = 0
input = StringIO.new(text, "r")
input.each_byte do |b|
case (b)
when 9, 32..60, 62..126
if line_len >= line_max - 1
output << "=\r\n"
line_len = 0
end
output << b.chr
line_len += 1
else
if line_len >= line_max - 3
output << "=\r\n"
line_len = 0
end
output << "=%02X" % b
line_len += 3
end
end
output << "=\r\n" if line_len > 0
output.string
end
end
# Convert the given text into quoted printable format, with an instruction
# that the text be eventually interpreted in the given charset.
def quoted_printable(text, charset)
text = text.gsub( /[^a-z ]/i ) { quoted_printable_encode($&) }.
gsub( / /, "_" )
"=?#{charset}?Q?#{text}?="
end
# Convert the given character to quoted printable format, taking into
# account multi-byte characters (if executing with $KCODE="u", for instance)
def quoted_printable_encode(character)
result = ""
character.each_byte { |b| result << "=%02X" % b }
result
end
# A quick-and-dirty regexp for determining whether a string contains any
# characters that need escaping.
CHARS_NEEDING_QUOTING = /[\000-\011\013\014\016-\037\177-\377]/
# Quote the given text if it contains any "illegal" characters
def quote_if_necessary(text, charset)
text = text.dup.force_encoding(Encoding::ASCII_8BIT) if text.respond_to?(:force_encoding)
(text =~ CHARS_NEEDING_QUOTING) ?
quoted_printable(text, charset) :
text
end
end
Correctly separate message lines with CRLF, not LF alone.
require 'premailer'
class GitCommitNotifier::Emailer
DEFAULT_STYLESHEET_PATH = File.join(File.dirname(__FILE__), '/../../template/styles.css').freeze
TEMPLATE = File.join(File.dirname(__FILE__), '/../../template/email.html.erb').freeze
PARAMETERS = %w[project_path recipient from_address from_alias subject text_message html_message ref_name old_rev new_rev].freeze
def config
@@config
end
def initialize(config, options = {})
@@config = config || {}
PARAMETERS.each do |name|
instance_variable_set("@#{name}".to_sym, options[name.to_sym])
end
end
class << self
def reset_template
@template = nil
end
def template_source
template_file = @@config['custom_template'] || TEMPLATE
IO.read(template_file)
end
def template
unless @template
source = template_source
begin
require 'erubis'
@template = Erubis::Eruby.new(source)
rescue LoadError
require 'erb'
@template = ERB.new(source)
end
end
@template
end
end
def mail_html_message
html = GitCommitNotifier::Emailer.template.result(binding)
if config['expand_css'].nil? || config['expand_css']
premailer = Premailer.new(html, :with_html_string => true, :adapter => :nokogiri)
html = premailer.to_inline_css
end
html
end
def boundary
return @boundary if @boundary
srand
seed = "#{rand(10000)}#{Time.now}"
@boundary = Digest::SHA1.hexdigest(seed)
end
def stylesheet_string
stylesheet = config['stylesheet'] || DEFAULT_STYLESHEET_PATH
IO.read(stylesheet)
end
def perform_delivery_debug(content)
content.each do |line|
puts line
end
end
def perform_delivery_smtp(content, smtp_settings)
settings = { }
%w(address port domain user_name password authentication enable_tls).each do |key|
val = smtp_settings[key].to_s.empty? ? nil : smtp_settings[key]
settings.merge!({ key => val})
end
main_smtp = Net::SMTP.new settings['address'], settings['port']
main_smtp.enable_starttls if settings['enable_tls']
main_smtp.start( settings['domain'],
settings['user_name'], settings['password'], settings['authentication']) do |smtp|
recipients = @recipient.dup
recipients.force_encoding('ASCII-8BIT') if recipients.respond_to?(:force_encoding)
recipients = recipients.split(",")
smtp.open_message_stream(@from_address, recipients) do |f|
content.each do |line|
line.force_encoding('ASCII-8BIT') if line.respond_to?(:force_encoding)
f.print(line, "\r\n")
end
end
end
end
def perform_delivery_sendmail(content, options = nil)
sendmail_settings = {
'location' => "/usr/sbin/sendmail",
'arguments' => "-i -t"
}.merge(options || {})
command = "#{sendmail_settings['location']} #{sendmail_settings['arguments']}"
IO.popen(command, "w+") do |f|
content.each do |line|
f.print(line, "\r\n")
end
f.flush
end
end
def perform_delivery_nntp(content, nntp_settings)
require 'nntp'
Net::NNTP.start(nntp_settings['address'], nntp_settings['port']) do |nntp|
nntp.post content
end
end
def send
to_tag = config['delivery_method'] == 'nntp' ? 'Newsgroups' : 'To'
quoted_from_alias = quote_if_necessary("#{@from_alias}",'utf-8')
from = @from_alias.empty? ? @from_address : "#{quoted_from_alias} <#{@from_address}>"
plaintext = if config['add_plaintext'].nil? || config['add_plaintext']
@text_message
else
"Plain text part omitted. Consider setting add_plaintext in configuration."
end
content = [
"From: #{from}",
"#{to_tag}: #{quote_if_necessary(@recipient, 'utf-8')}",
"Subject: #{quote_if_necessary(@subject, 'utf-8')}",
"X-Mailer: git-commit-notifier",
"X-Git-Refname: #{@ref_name}",
"X-Git-Oldrev: #{@old_rev}",
"X-Git-Newrev: #{@new_rev}",
"Mime-Version: 1.0",
"Content-Type: multipart/alternative; boundary=#{boundary}",
"",
"--#{boundary}",
"Content-Type: text/plain; charset=utf-8",
"Content-Transfer-Encoding: quoted-printable",
"Content-Disposition: inline",
"",
encode_quoted_printable_message(plaintext),
"--#{boundary}",
"Content-Type: text/html; charset=utf-8",
"Content-Transfer-Encoding: quoted-printable",
"Content-Disposition: inline",
"",
encode_quoted_printable_message(mail_html_message),
"--#{boundary}--"]
if @recipient.empty?
puts content.join("\n")
return
end
case config['delivery_method'].to_sym
when :smtp then perform_delivery_smtp(content, config['smtp_server'])
when :nntp then perform_delivery_nntp(content, config['nntp_settings'])
when :debug then perform_delivery_debug(content)
else # sendmail
perform_delivery_sendmail(content, config['sendmail_options'])
end
end
# Convert a message into quoted printable encoding,
# limiting line length to 76 characters per spec
# Encoding messages in this way ensures that they
# won't violate rules for maximum line length, which
# can result in the MTA breaking lines at inconvenient points,
# such as in the middle of UTF8 characters.
def encode_quoted_printable_message(text)
StringIO.open("", "w") do |output|
# Character encoding of output string can be plain US-ASCII since quoted-printable is plain ASCII
output.string.force_encoding("US-ASCII") if output.string.respond_to?(:force_encoding)
line_max = 76
line_len = 0
input = StringIO.new(text, "r")
input.each_byte do |b|
case (b)
when 9, 32..60, 62..126
if line_len >= line_max - 1
output << "=\r\n"
line_len = 0
end
output << b.chr
line_len += 1
else
if line_len >= line_max - 3
output << "=\r\n"
line_len = 0
end
output << "=%02X" % b
line_len += 3
end
end
output << "=\r\n" if line_len > 0
output.string
end
end
# Convert the given text into quoted printable format, with an instruction
# that the text be eventually interpreted in the given charset.
def quoted_printable(text, charset)
text = text.gsub( /[^a-z ]/i ) { quoted_printable_encode($&) }.
gsub( / /, "_" )
"=?#{charset}?Q?#{text}?="
end
# Convert the given character to quoted printable format, taking into
# account multi-byte characters (if executing with $KCODE="u", for instance)
def quoted_printable_encode(character)
result = ""
character.each_byte { |b| result << "=%02X" % b }
result
end
# A quick-and-dirty regexp for determining whether a string contains any
# characters that need escaping.
CHARS_NEEDING_QUOTING = /[\000-\011\013\014\016-\037\177-\377]/
# Quote the given text if it contains any "illegal" characters
def quote_if_necessary(text, charset)
text = text.dup.force_encoding(Encoding::ASCII_8BIT) if text.respond_to?(:force_encoding)
(text =~ CHARS_NEEDING_QUOTING) ?
quoted_printable(text, charset) :
text
end
end
|
require_relative '../ui/color'
module GitCrecord
module Diff
class Difference
attr_accessor :expanded
attr_accessor :y1, :y2
attr_reader :subs
SELECTED_MAP = {
true => '[X] ',
false => '[ ] ',
:partly => '[~] '
}.freeze
SELECTION_MARKER_WIDTH = SELECTED_MAP[true].size
def initialize
@subs = []
end
def strings(width)
to_s.scan(/.{1,#{content_width(width)}}/)
end
def max_height(width)
width = content_width(width)
((to_s.size - 1).abs / width) + 1 + subs.reduce(0) do |a, e|
a + e.max_height(width)
end
end
def content_width(width)
[1, width - x_offset - SELECTION_MARKER_WIDTH].max
end
def selectable?
true
end
def selectable_subs
@selectable_subs ||= subs.select(&:selectable?)
end
def selected
s = selectable_subs.map(&:selected).uniq
return s[0] if s.size == 1
:partly
end
def selected=(value)
selectable_subs.each{ |sub| sub.selected = value }
end
def style(is_highlighted)
return Curses::A_BOLD | UI::Color.hl if is_highlighted
Curses::A_BOLD | UI::Color.normal
end
def prefix_style(_is_highlighted)
UI::Color.normal
end
def print(win, line_number, is_highlighted)
@y1 = line_number + 1
prefix = SELECTED_MAP.fetch(selected)
strings(win.width).each_with_index do |string, index|
prefix = ' ' unless index == 0 && selectable?
p_style = prefix_style(is_highlighted)
win.addstr(' ' * x_offset, line_number += 1, attr: p_style)
win.addstr(prefix, attr: p_style)
win.addstr(string, attr: style(is_highlighted), fill: ' ')
end
@y2 = line_number
end
end
end
end
Refactor Difference.print method
require_relative '../ui/color'
module GitCrecord
module Diff
class Difference
attr_accessor :expanded
attr_accessor :y1, :y2
attr_reader :subs
SELECTED_MAP = {
true => '[X] ',
false => '[ ] ',
:partly => '[~] '
}.freeze
SELECTION_MARKER_WIDTH = SELECTED_MAP[true].size
def initialize
@subs = []
end
def strings(width)
to_s.scan(/.{1,#{content_width(width)}}/)
end
def max_height(width)
width = content_width(width)
((to_s.size - 1).abs / width) + 1 + subs.reduce(0) do |a, e|
a + e.max_height(width)
end
end
def content_width(width)
[1, width - x_offset - SELECTION_MARKER_WIDTH].max
end
def selectable?
true
end
def selectable_subs
@selectable_subs ||= subs.select(&:selectable?)
end
def selected
s = selectable_subs.map(&:selected).uniq
return s[0] if s.size == 1
:partly
end
def selected=(value)
selectable_subs.each{ |sub| sub.selected = value }
end
def style(is_highlighted)
return Curses::A_BOLD | UI::Color.hl if is_highlighted
Curses::A_BOLD | UI::Color.normal
end
def prefix_style(_is_highlighted)
UI::Color.normal
end
def prefix(line_number)
return SELECTED_MAP.fetch(selected) if line_number == 0 && selectable?
' ' * SELECTION_MARKER_WIDTH
end
def print(win, line_number, is_highlighted)
@y1 = line_number + 1
prefix_style = prefix_style(is_highlighted)
style = style(is_highlighted)
strings(win.width).each_with_index do |string, index|
win.addstr(' ' * x_offset, line_number += 1, attr: prefix_style)
win.addstr(prefix(index), attr: prefix_style)
win.addstr(string, attr: style, fill: ' ')
end
@y2 = line_number
end
end
end
end
|
module GitPivotalTracker
VERSION = '0.0.9'
end
bump version to 0.1.0
module GitPivotalTracker
VERSION = '0.1.0'
end
|
require 'active_support/core_ext/hash'
require 'hadoop/samtools/errors'
module Hadoop::Samtools
# ArgsParser is used to parse Samtools command arguments.
# @author Wei-Ming Wu
module ArgsParser
# CMD_FORMAT defines the number of required files for each samtools command.
# how to listen merge file?
CMD_FORMAT = { faidx: [1], view: [2,3], merge: [2],
sort: [2], rmdup: [2], index: [1] ,
mpileup: [2] }.with_indifferent_access
include Errors
# Returns required files for a Samtools command after parsing arguments.
#
# @return [Array] an Array of required files
def parse_args cmd
args = cmd.strip.split(/\s+/)
cmd = args.shift
if cmd !~ %r{#{CMD_FORMAT.keys.map { |c| "^#{c}$" }.join '|'}}
raise InvalidCommandError, "Invalid command: #{cmd}."
end
files = args.slice_before { |co| co =~ /^-/ }.to_a.last.delete_if { |co| co =~ /^-/ }
files.shift while CMD_FORMAT[cmd].max < files.size
files.keep_if { |file| file =~ /^\w+(\.\w+)+$/ } if cmd == 'view'
unless CMD_FORMAT[cmd].include? files.size
raise RequiredFilesMissingError,
"Required #{CMD_FORMAT[cmd].join ' or '} file(s), " <<
"#{CMD_FORMAT[cmd][0] - files.size} missing."
end
files
end
end
end
fix view
require 'active_support/core_ext/hash'
require 'hadoop/samtools/errors'
module Hadoop::Samtools
# ArgsParser is used to parse Samtools command arguments.
# @author Wei-Ming Wu
module ArgsParser
# CMD_FORMAT defines the number of required files for each samtools command.
# how to listen merge file?
CMD_FORMAT = { faidx: [1], view: [1,2], merge: [2],
sort: [2], rmdup: [2], index: [1] ,
mpileup: [2] }.with_indifferent_access
include Errors
# Returns required files for a Samtools command after parsing arguments.
#
# @return [Array] an Array of required files
def parse_args cmd
args = cmd.strip.split(/\s+/)
cmd = args.shift
if cmd !~ %r{#{CMD_FORMAT.keys.map { |c| "^#{c}$" }.join '|'}}
raise InvalidCommandError, "Invalid command: #{cmd}."
end
files = args.slice_before { |co| co =~ /^-/ }.to_a.last.delete_if { |co| co =~ /^-/ }
files.shift while CMD_FORMAT[cmd].max < files.size
files.keep_if { |file| file =~ /^\w+(\.\w+)+$/ } if cmd == 'view'
unless CMD_FORMAT[cmd].include? files.size
raise RequiredFilesMissingError,
"Required #{CMD_FORMAT[cmd].join ' or '} file(s), " <<
"#{CMD_FORMAT[cmd][0] - files.size} missing."
end
files
end
end
end
|
module Haml
module I18n
class Extractor
VERSION = "0.5.0"
end
end
end
Bump to 0.5.1
module Haml
module I18n
class Extractor
VERSION = "0.5.1"
end
end
end
|
# Yogo Data Management Toolkit
# Copyright (c) 2010 Montana State University
#
# License -> see license.txt
#
# FILE: development.rb
#
#
# Settings specified here will take precedence over those in config/environment.rb
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the webserver when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.action_controller.consider_all_requests_local = true
config.action_view.debug_rjs = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send
config.action_mailer.raise_delivery_errors = false
Cache the classes in development mode.
This isn't ideal, but makes many-to-many work with controllers.
# Yogo Data Management Toolkit
# Copyright (c) 2010 Montana State University
#
# License -> see license.txt
#
# FILE: development.rb
#
#
# Settings specified here will take precedence over those in config/environment.rb
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the webserver when you make code changes.
config.cache_classes = true
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.action_controller.consider_all_requests_local = true
config.action_view.debug_rjs = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send
config.action_mailer.raise_delivery_errors = false |
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Adds additional error checking when serving assets at runtime.
# Checks for improperly declared sprockets dependencies.
# Raises helpful error messages.
config.assets.raise_runtime_errors = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
config.action_mailer.default_url_options = { host: 'localhost', port: 3000 }
end
Fix? CSS not displaying heroku
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
config.assets.compile = true
# Adds additional error checking when serving assets at runtime.
# Checks for improperly declared sprockets dependencies.
# Raises helpful error messages.
config.assets.raise_runtime_errors = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
config.action_mailer.default_url_options = { host: 'localhost', port: 3000 }
end
|
# Represents a message sent from one user to one or more others.
#
# == States
#
# Messages can be in 1 of 3 states:
# * +unsent+ - The message has not yet been sent. This is the *initial* state.
# * +queued+ - The message has been queued for future delivery.
# * +sent+ - The message has been sent.
#
# == Interacting with the message
#
# In order to perform actions on the message, such as queueing or delivering,
# you should always use the associated event action:
# * +queue+ - Queues the message so that you can send it in a separate process
# * +deliver+ - Sends the message to all of the recipients
#
# == Message visibility
#
# Although you can delete a message, it will also delete it from the inbox of all
# the message's recipients. Instead, you can change the *visibility* of the
# message. Messages have 1 of 2 states that define its visibility:
# * +visible+ - The message is visible to the sender
# * +hidden+ - The message is hidden from the sender
#
# The visibility of a message can be changed by running the associated action:
# * +hide+ -Hides the message from the sender
# * +unhide+ - Makes the message visible again
class Message < ActiveRecord::Base
belongs_to :sender, :polymorphic => true
has_many :recipients, :class_name => 'MessageRecipient', :order => 'kind DESC, position ASC', :dependent => :destroy
validates_presence_of :state, :sender_id, :sender_type
attr_accessible :subject, :body, :to, :cc, :bcc
after_save :update_recipients
scope :visible, :conditions => {:hidden_at => nil}
# Define actions for the message
state_machine :state, :initial => :unsent do
# Queues the message so that it's sent in a separate process
event :queue do
transition :unsent => :queued, :if => :has_recipients?
end
# Sends the message to all of the recipients as long as at least one
# recipient has been added
event :deliver do
transition [:unsent, :queued] => :sent, :if => :has_recipients?
end
end
# Defines actions for the visibility of the message
state_machine :hidden_at, :initial => :visible do
# Hides the message from the recipient's inbox
event :hide do
transition all => :hidden
end
# Makes the message visible in the recipient's inbox
event :unhide do
transition all => :visible
end
state :visible, :value => nil
state :hidden, :value => lambda {Time.now}, :if => lambda {|value| value}
end
# Directly adds the receivers on the message (i.e. they are visible to all recipients)
def to(*receivers)
receivers(receivers, 'to')
end
alias_method :to=, :to
# Carbon copies the receivers on the message
def cc(*receivers)
receivers(receivers, 'cc')
end
alias_method :cc=, :cc
# Blind carbon copies the receivers on the message
def bcc(*receivers)
receivers(receivers, 'bcc')
end
alias_method :bcc=, :bcc
# Forwards this message, including the original subject and body in the new
# message
def forward
message = self.class.new(:subject => subject, :body => body)
message.sender = sender
message
end
# Replies to this message, including the original subject and body in the new
# message. Only the original direct receivers are added to the reply.
def reply
message = self.class.new(:subject => subject, :body => body)
message.sender = sender
message.to(to)
message
end
# Replies to all recipients on this message, including the original subject
# and body in the new message. All receivers (direct, cc, and bcc) are added
# to the reply.
def reply_to_all
message = reply
message.cc(cc)
message.bcc(bcc)
message
end
private
# Create/destroy any receivers that were added/removed
def update_recipients
if @receivers
@receivers.each do |kind, receivers|
kind_recipients = recipients.select {|recipient| recipient.kind == kind}
new_receivers = receivers - kind_recipients.map(&:receiver)
removed_recipients = kind_recipients.reject {|recipient| receivers.include?(recipient.receiver)}
recipients.delete(*removed_recipients) if removed_recipients.any?
new_receivers.each {|receiver| self.recipients.create!(:receiver => receiver, :kind => kind)}
end
@receivers = nil
end
end
# Does this message have any recipients on it?
def has_recipients?
(to + cc + bcc).any?
end
# Creates new receivers or gets the current receivers for the given kind (to, cc, or bcc)
def receivers(receivers, kind)
if receivers.any?
(@receivers ||= {})[kind] = receivers.flatten.compact
else
@receivers && @receivers[kind] || recipients.select {|recipient| recipient.kind == kind}.map(&:receiver)
end
end
end
Removed ordering by position ASC which was interfering with message_recipient reordering.
# Represents a message sent from one user to one or more others.
#
# == States
#
# Messages can be in 1 of 3 states:
# * +unsent+ - The message has not yet been sent. This is the *initial* state.
# * +queued+ - The message has been queued for future delivery.
# * +sent+ - The message has been sent.
#
# == Interacting with the message
#
# In order to perform actions on the message, such as queueing or delivering,
# you should always use the associated event action:
# * +queue+ - Queues the message so that you can send it in a separate process
# * +deliver+ - Sends the message to all of the recipients
#
# == Message visibility
#
# Although you can delete a message, it will also delete it from the inbox of all
# the message's recipients. Instead, you can change the *visibility* of the
# message. Messages have 1 of 2 states that define its visibility:
# * +visible+ - The message is visible to the sender
# * +hidden+ - The message is hidden from the sender
#
# The visibility of a message can be changed by running the associated action:
# * +hide+ -Hides the message from the sender
# * +unhide+ - Makes the message visible again
class Message < ActiveRecord::Base
belongs_to :sender, :polymorphic => true
has_many :recipients, :class_name => 'MessageRecipient', :order => 'kind DESC', :dependent => :destroy
validates_presence_of :state, :sender_id, :sender_type
attr_accessible :subject, :body, :to, :cc, :bcc
after_save :update_recipients
scope :visible, :conditions => {:hidden_at => nil}
# Define actions for the message
state_machine :state, :initial => :unsent do
# Queues the message so that it's sent in a separate process
event :queue do
transition :unsent => :queued, :if => :has_recipients?
end
# Sends the message to all of the recipients as long as at least one
# recipient has been added
event :deliver do
transition [:unsent, :queued] => :sent, :if => :has_recipients?
end
end
# Defines actions for the visibility of the message
state_machine :hidden_at, :initial => :visible do
# Hides the message from the recipient's inbox
event :hide do
transition all => :hidden
end
# Makes the message visible in the recipient's inbox
event :unhide do
transition all => :visible
end
state :visible, :value => nil
state :hidden, :value => lambda {Time.now}, :if => lambda {|value| value}
end
# Directly adds the receivers on the message (i.e. they are visible to all recipients)
def to(*receivers)
receivers(receivers, 'to')
end
alias_method :to=, :to
# Carbon copies the receivers on the message
def cc(*receivers)
receivers(receivers, 'cc')
end
alias_method :cc=, :cc
# Blind carbon copies the receivers on the message
def bcc(*receivers)
receivers(receivers, 'bcc')
end
alias_method :bcc=, :bcc
# Forwards this message, including the original subject and body in the new
# message
def forward
message = self.class.new(:subject => subject, :body => body)
message.sender = sender
message
end
# Replies to this message, including the original subject and body in the new
# message. Only the original direct receivers are added to the reply.
def reply
message = self.class.new(:subject => subject, :body => body)
message.sender = sender
message.to(to)
message
end
# Replies to all recipients on this message, including the original subject
# and body in the new message. All receivers (direct, cc, and bcc) are added
# to the reply.
def reply_to_all
message = reply
message.cc(cc)
message.bcc(bcc)
message
end
private
# Create/destroy any receivers that were added/removed
def update_recipients
if @receivers
@receivers.each do |kind, receivers|
kind_recipients = recipients.select {|recipient| recipient.kind == kind}
new_receivers = receivers - kind_recipients.map(&:receiver)
removed_recipients = kind_recipients.reject {|recipient| receivers.include?(recipient.receiver)}
recipients.delete(*removed_recipients) if removed_recipients.any?
new_receivers.each {|receiver| self.recipients.create!(:receiver => receiver, :kind => kind)}
end
@receivers = nil
end
end
# Does this message have any recipients on it?
def has_recipients?
(to + cc + bcc).any?
end
# Creates new receivers or gets the current receivers for the given kind (to, cc, or bcc)
def receivers(receivers, kind)
if receivers.any?
(@receivers ||= {})[kind] = receivers.flatten.compact
else
@receivers && @receivers[kind] || recipients.select {|recipient| recipient.kind == kind}.map(&:receiver)
end
end
end
|
module Searchkick
class Query
extend Forwardable
@@metric_aggs = [:avg, :cardinality, :max, :min, :sum]
attr_reader :klass, :term, :options
attr_accessor :body
def_delegators :execute, :map, :each, :any?, :empty?, :size, :length, :slice, :[], :to_ary,
:records, :results, :suggestions, :each_with_hit, :with_details, :aggregations, :aggs,
:took, :error, :model_name, :entry_name, :total_count, :total_entries,
:current_page, :per_page, :limit_value, :padding, :total_pages, :num_pages,
:offset_value, :offset, :previous_page, :prev_page, :next_page, :first_page?, :last_page?,
:out_of_range?, :hits, :response, :to_a, :first
def initialize(klass, term = "*", **options)
unknown_keywords = options.keys - [:aggs, :body, :body_options, :boost,
:boost_by, :boost_by_distance, :boost_where, :conversions, :conversions_term, :debug, :emoji, :exclude, :execute, :explain,
:fields, :highlight, :includes, :index_name, :indices_boost, :limit, :load,
:match, :misspellings, :model_includes, :offset, :operator, :order, :padding, :page, :per_page, :profile,
:request_params, :routing, :scope_results, :select, :similar, :smart_aggs, :suggest, :track, :type, :where]
raise ArgumentError, "unknown keywords: #{unknown_keywords.join(", ")}" if unknown_keywords.any?
term = term.to_s
if options[:emoji]
term = EmojiParser.parse_unicode(term) { |e| " #{e.name} " }.strip
end
@klass = klass
@term = term
@options = options
@match_suffix = options[:match] || searchkick_options[:match] || "analyzed"
# prevent Ruby warnings
@type = nil
@routing = nil
@misspellings = false
@misspellings_below = nil
@highlighted_fields = nil
prepare
end
def searchkick_index
klass ? klass.searchkick_index : nil
end
def searchkick_options
klass ? klass.searchkick_options : {}
end
def searchkick_klass
klass ? klass.searchkick_klass : nil
end
def params
index =
if options[:index_name]
Array(options[:index_name]).map { |v| v.respond_to?(:searchkick_index) ? v.searchkick_index.name : v }.join(",")
elsif searchkick_index
searchkick_index.name
else
"_all"
end
params = {
index: index,
body: body
}
params[:type] = @type if @type
params[:routing] = @routing if @routing
params.merge!(options[:request_params]) if options[:request_params]
params
end
def execute
@execute ||= begin
begin
response = execute_search
if retry_misspellings?(response)
prepare
response = execute_search
end
rescue => e # TODO rescue type
handle_error(e)
end
handle_response(response)
end
end
def to_curl
query = params
type = query[:type]
index = query[:index].is_a?(Array) ? query[:index].join(",") : query[:index]
# no easy way to tell which host the client will use
host = Searchkick.client.transport.hosts.first
credentials = host[:user] || host[:password] ? "#{host[:user]}:#{host[:password]}@" : nil
"curl #{host[:protocol]}://#{credentials}#{host[:host]}:#{host[:port]}/#{CGI.escape(index)}#{type ? "/#{type.map { |t| CGI.escape(t) }.join(',')}" : ''}/_search?pretty -H 'Content-Type: application/json' -d '#{query[:body].to_json}'"
end
def handle_response(response)
opts = {
page: @page,
per_page: @per_page,
padding: @padding,
load: @load,
includes: options[:includes],
model_includes: options[:model_includes],
json: !@json.nil?,
match_suffix: @match_suffix,
highlighted_fields: @highlighted_fields || [],
misspellings: @misspellings,
term: term,
scope_results: options[:scope_results]
}
if options[:debug]
require "pp"
puts "Searchkick Version: #{Searchkick::VERSION}"
puts "Elasticsearch Version: #{Searchkick.server_version}"
puts
puts "Model Searchkick Options"
pp searchkick_options
puts
puts "Search Options"
pp options
puts
if searchkick_index
puts "Model Search Data"
begin
pp klass.first(3).map { |r| {index: searchkick_index.record_data(r).merge(data: searchkick_index.send(:search_data, r))}}
rescue => e
puts "#{e.class.name}: #{e.message}"
end
puts
puts "Elasticsearch Mapping"
puts JSON.pretty_generate(searchkick_index.mapping)
puts
puts "Elasticsearch Settings"
puts JSON.pretty_generate(searchkick_index.settings)
puts
end
puts "Elasticsearch Query"
puts to_curl
puts
puts "Elasticsearch Results"
puts JSON.pretty_generate(response)
end
# set execute for multi search
@execute = Searchkick::Results.new(searchkick_klass, response, opts)
end
def retry_misspellings?(response)
@misspellings_below && response["hits"]["total"] < @misspellings_below
end
private
def handle_error(e)
status_code = e.message[1..3].to_i
if status_code == 404
raise MissingIndexError, "Index missing - run #{reindex_command}"
elsif status_code == 500 && (
e.message.include?("IllegalArgumentException[minimumSimilarity >= 1]") ||
e.message.include?("No query registered for [multi_match]") ||
e.message.include?("[match] query does not support [cutoff_frequency]") ||
e.message.include?("No query registered for [function_score]")
)
raise UnsupportedVersionError, "This version of Searchkick requires Elasticsearch 2 or greater"
elsif status_code == 400
if (
e.message.include?("bool query does not support [filter]") ||
e.message.include?("[bool] filter does not support [filter]")
)
raise UnsupportedVersionError, "This version of Searchkick requires Elasticsearch 2 or greater"
elsif e.message.include?("[multi_match] analyzer [searchkick_search] not found")
raise InvalidQueryError, "Bad mapping - run #{reindex_command}"
else
raise InvalidQueryError, e.message
end
else
raise e
end
end
def reindex_command
searchkick_klass ? "#{searchkick_klass.name}.reindex" : "reindex"
end
def execute_search
Searchkick.client.search(params)
end
def prepare
boost_fields, fields = set_fields
operator = options[:operator] || "and"
# pagination
page = [options[:page].to_i, 1].max
per_page = (options[:limit] || options[:per_page] || 1_000).to_i
padding = [options[:padding].to_i, 0].max
offset = options[:offset] || (page - 1) * per_page + padding
# model and eager loading
load = options[:load].nil? ? true : options[:load]
conversions_fields = Array(options[:conversions] || searchkick_options[:conversions]).map(&:to_s)
all = term == "*"
@json = options[:body]
if @json
ignored_options = options.keys & [:aggs, :boost,
:boost_by, :boost_by_distance, :boost_where, :conversions, :conversions_term, :exclude, :explain,
:fields, :highlight, :indices_boost, :limit, :match, :misspellings, :offset, :operator, :order,
:padding, :page, :per_page, :select, :smart_aggs, :suggest, :where]
warn "The body option replaces the entire body, so the following options are ignored: #{ignored_options.join(", ")}" if ignored_options.any?
payload = @json
else
if options[:similar]
payload = {
more_like_this: {
like: term,
min_doc_freq: 1,
min_term_freq: 1,
analyzer: "searchkick_search2"
}
}
if fields.all? { |f| f.start_with?("*.") }
raise ArgumentError, "Must specify fields to search"
end
if fields != ["_all"]
payload[:more_like_this][:fields] = fields
end
elsif all
payload = {
match_all: {}
}
else
queries = []
misspellings =
if options.key?(:misspellings)
options[:misspellings]
else
true
end
if misspellings.is_a?(Hash) && misspellings[:below] && !@misspellings_below
@misspellings_below = misspellings[:below].to_i
misspellings = false
end
if misspellings != false
edit_distance = (misspellings.is_a?(Hash) && (misspellings[:edit_distance] || misspellings[:distance])) || 1
transpositions =
if misspellings.is_a?(Hash) && misspellings.key?(:transpositions)
{fuzzy_transpositions: misspellings[:transpositions]}
else
{fuzzy_transpositions: true}
end
prefix_length = (misspellings.is_a?(Hash) && misspellings[:prefix_length]) || 0
default_max_expansions = @misspellings_below ? 20 : 3
max_expansions = (misspellings.is_a?(Hash) && misspellings[:max_expansions]) || default_max_expansions
@misspellings = true
else
@misspellings = false
end
fields.each do |field|
queries_to_add = []
qs = []
factor = boost_fields[field] || 1
shared_options = {
query: term,
boost: 10 * factor
}
match_type =
if field.end_with?(".phrase")
field =
if field == "_all.phrase"
"_all"
else
field.sub(/\.phrase\z/, ".analyzed")
end
:match_phrase
else
:match
end
shared_options[:operator] = operator if match_type == :match
exclude_analyzer = nil
exclude_field = field
if field == "_all" || field.end_with?(".analyzed")
shared_options[:cutoff_frequency] = 0.001 unless operator.to_s == "and" || misspellings == false
qs.concat [
shared_options.merge(analyzer: "searchkick_search"),
shared_options.merge(analyzer: "searchkick_search2")
]
exclude_analyzer = "searchkick_search2"
elsif field.end_with?(".exact")
f = field.split(".")[0..-2].join(".")
queries_to_add << {match: {f => shared_options.merge(analyzer: "keyword")}}
exclude_field = f
exclude_analyzer = "keyword"
else
analyzer = field =~ /\.word_(start|middle|end)\z/ ? "searchkick_word_search" : "searchkick_autocomplete_search"
qs << shared_options.merge(analyzer: analyzer)
exclude_analyzer = analyzer
end
if misspellings != false && match_type == :match
qs.concat qs.map { |q| q.except(:cutoff_frequency).merge(fuzziness: edit_distance, prefix_length: prefix_length, max_expansions: max_expansions, boost: factor).merge(transpositions) }
end
if field.start_with?("*.")
q2 = qs.map { |q| {multi_match: q.merge(fields: [field], type: match_type == :match_phrase ? "phrase" : "best_fields")} }
if below60?
q2.each do |q|
q[:multi_match].delete(:fuzzy_transpositions)
end
end
else
q2 = qs.map { |q| {match_type => {field => q}} }
end
# boost exact matches more
if field =~ /\.word_(start|middle|end)\z/ && searchkick_options[:word] != false
queries_to_add << {
bool: {
must: {
bool: {
should: q2
}
},
should: {match_type => {field.sub(/\.word_(start|middle|end)\z/, ".analyzed") => qs.first}}
}
}
else
queries_to_add.concat(q2)
end
if options[:exclude]
must_not =
Array(options[:exclude]).map do |phrase|
if field.start_with?("*.")
{
multi_match: {
fields: [field],
query: phrase,
analyzer: exclude_analyzer,
type: "phrase"
}
}
else
{
match_phrase: {
exclude_field => {
query: phrase,
analyzer: exclude_analyzer
}
}
}
end
end
queries_to_add = [{
bool: {
should: queries_to_add,
must_not: must_not
}
}]
end
queries.concat(queries_to_add)
end
payload = {
dis_max: {
queries: queries
}
}
if conversions_fields.present? && options[:conversions] != false
shoulds = []
conversions_fields.each do |conversions_field|
# wrap payload in a bool query
script_score = {field_value_factor: {field: "#{conversions_field}.count"}}
shoulds << {
nested: {
path: conversions_field,
score_mode: "sum",
query: {
function_score: {
boost_mode: "replace",
query: {
match: {
"#{conversions_field}.query" => options[:conversions_term] || term
}
}
}.merge(script_score)
}
}
}
end
payload = {
bool: {
must: payload,
should: shoulds
}
}
end
end
custom_filters = []
multiply_filters = []
set_boost_by(multiply_filters, custom_filters)
set_boost_where(custom_filters)
set_boost_by_distance(custom_filters) if options[:boost_by_distance]
if custom_filters.any?
payload = {
function_score: {
functions: custom_filters,
query: payload,
score_mode: "sum"
}
}
end
if multiply_filters.any?
payload = {
function_score: {
functions: multiply_filters,
query: payload,
score_mode: "multiply"
}
}
end
payload = {
query: payload,
size: per_page,
from: offset
}
payload[:explain] = options[:explain] if options[:explain]
payload[:profile] = options[:profile] if options[:profile]
# order
set_order(payload) if options[:order]
# indices_boost
set_boost_by_indices(payload)
# type when inheritance
where = (options[:where] || {}).dup
if searchkick_options[:inheritance] && (options[:type] || (klass != searchkick_klass && searchkick_index))
where[:type] = [options[:type] || klass].flatten.map { |v| searchkick_index.klass_document_type(v, true) }
end
# start everything as efficient filters
# move to post_filters as aggs demand
filters = where_filters(where)
post_filters = []
# aggregations
set_aggregations(payload, filters, post_filters) if options[:aggs]
# filters
set_filters(payload, filters, post_filters)
# suggestions
set_suggestions(payload, options[:suggest]) if options[:suggest]
# highlight
set_highlights(payload, fields) if options[:highlight]
# timeout shortly after client times out
payload[:timeout] ||= "#{Searchkick.search_timeout + 1}s"
# An empty array will cause only the _id and _type for each hit to be returned
# https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-source-filtering.html
if options[:select]
if options[:select] == []
# intuitively [] makes sense to return no fields, but ES by default returns all fields
payload[:_source] = false
else
payload[:_source] = options[:select]
end
elsif load
payload[:_source] = false
end
end
# type
if !searchkick_options[:inheritance] && (options[:type] || (klass != searchkick_klass && searchkick_index))
@type = [options[:type] || klass].flatten.map { |v| searchkick_index.klass_document_type(v) }
end
# routing
@routing = options[:routing] if options[:routing]
# merge more body options
payload = payload.deep_merge(options[:body_options]) if options[:body_options]
@body = payload
@page = page
@per_page = per_page
@padding = padding
@load = load
end
def set_fields
boost_fields = {}
fields = options[:fields] || searchkick_options[:default_fields] || searchkick_options[:searchable]
all = searchkick_options.key?(:_all) ? searchkick_options[:_all] : below60?
default_match = options[:match] || searchkick_options[:match] || :word
fields =
if fields
fields.map do |value|
k, v = value.is_a?(Hash) ? value.to_a.first : [value, default_match]
k2, boost = k.to_s.split("^", 2)
field = "#{k2}.#{v == :word ? 'analyzed' : v}"
boost_fields[field] = boost.to_f if boost
field
end
elsif all && default_match == :word
["_all"]
elsif all && default_match == :phrase
["_all.phrase"]
elsif term == "*"
[]
elsif default_match == :exact
raise ArgumentError, "Must specify fields to search"
else
[default_match == :word ? "*.analyzed" : "*.#{default_match}"]
end
[boost_fields, fields]
end
def set_boost_by_distance(custom_filters)
boost_by_distance = options[:boost_by_distance] || {}
# legacy format
if boost_by_distance[:field]
boost_by_distance = {boost_by_distance[:field] => boost_by_distance.except(:field)}
end
boost_by_distance.each do |field, attributes|
attributes = {function: :gauss, scale: "5mi"}.merge(attributes)
unless attributes[:origin]
raise ArgumentError, "boost_by_distance requires :origin"
end
function_params = attributes.select { |k, _| [:origin, :scale, :offset, :decay].include?(k) }
function_params[:origin] = location_value(function_params[:origin])
custom_filters << {
weight: attributes[:factor] || 1,
attributes[:function] => {
field => function_params
}
}
end
end
def set_boost_by(multiply_filters, custom_filters)
boost_by = options[:boost_by] || {}
if boost_by.is_a?(Array)
boost_by = Hash[boost_by.map { |f| [f, {factor: 1}] }]
elsif boost_by.is_a?(Hash)
multiply_by, boost_by = boost_by.partition { |_, v| v[:boost_mode] == "multiply" }.map { |i| Hash[i] }
end
boost_by[options[:boost]] = {factor: 1} if options[:boost]
custom_filters.concat boost_filters(boost_by, log: true)
multiply_filters.concat boost_filters(multiply_by || {})
end
def set_boost_where(custom_filters)
boost_where = options[:boost_where] || {}
boost_where.each do |field, value|
if value.is_a?(Array) && value.first.is_a?(Hash)
value.each do |value_factor|
custom_filters << custom_filter(field, value_factor[:value], value_factor[:factor])
end
elsif value.is_a?(Hash)
custom_filters << custom_filter(field, value[:value], value[:factor])
else
factor = 1000
custom_filters << custom_filter(field, value, factor)
end
end
end
def set_boost_by_indices(payload)
return unless options[:indices_boost]
indices_boost = options[:indices_boost].each_with_object({}) do |(key, boost), memo|
index = key.respond_to?(:searchkick_index) ? key.searchkick_index.name : key
# try to use index explicitly instead of alias: https://github.com/elasticsearch/elasticsearch/issues/4756
index_by_alias = Searchkick.client.indices.get_alias(index: index).keys.first
memo[index_by_alias || index] = boost
end
payload[:indices_boost] = indices_boost
end
def set_suggestions(payload, suggest)
suggest_fields = nil
if suggest.is_a?(Array)
suggest_fields = suggest
else
suggest_fields = (searchkick_options[:suggest] || []).map(&:to_s)
# intersection
if options[:fields]
suggest_fields &= options[:fields].map { |v| (v.is_a?(Hash) ? v.keys.first : v).to_s.split("^", 2).first }
end
end
if suggest_fields.any?
payload[:suggest] = {text: term}
suggest_fields.each do |field|
payload[:suggest][field] = {
phrase: {
field: "#{field}.suggest"
}
}
end
else
raise ArgumentError, "Must pass fields to suggest option"
end
end
def set_highlights(payload, fields)
payload[:highlight] = {
fields: Hash[fields.map { |f| [f, {}] }]
}
if options[:highlight].is_a?(Hash)
if (tag = options[:highlight][:tag])
payload[:highlight][:pre_tags] = [tag]
payload[:highlight][:post_tags] = [tag.to_s.gsub(/\A<(\w+).+/, "</\\1>")]
end
if (fragment_size = options[:highlight][:fragment_size])
payload[:highlight][:fragment_size] = fragment_size
end
if (encoder = options[:highlight][:encoder])
payload[:highlight][:encoder] = encoder
end
highlight_fields = options[:highlight][:fields]
if highlight_fields
payload[:highlight][:fields] = {}
highlight_fields.each do |name, opts|
payload[:highlight][:fields]["#{name}.#{@match_suffix}"] = opts || {}
end
end
end
@highlighted_fields = payload[:highlight][:fields].keys
end
def set_aggregations(payload, filters, post_filters)
aggs = options[:aggs]
payload[:aggs] = {}
aggs = Hash[aggs.map { |f| [f, {}] }] if aggs.is_a?(Array) # convert to more advanced syntax
aggs.each do |field, agg_options|
size = agg_options[:limit] ? agg_options[:limit] : 1_000
shared_agg_options = agg_options.slice(:order, :min_doc_count)
if agg_options[:ranges]
payload[:aggs][field] = {
range: {
field: agg_options[:field] || field,
ranges: agg_options[:ranges]
}.merge(shared_agg_options)
}
elsif agg_options[:date_ranges]
payload[:aggs][field] = {
date_range: {
field: agg_options[:field] || field,
ranges: agg_options[:date_ranges]
}.merge(shared_agg_options)
}
elsif histogram = agg_options[:date_histogram]
interval = histogram[:interval]
payload[:aggs][field] = {
date_histogram: {
field: histogram[:field],
interval: interval
}
}
elsif metric = @@metric_aggs.find { |k| agg_options.has_key?(k) }
payload[:aggs][field] = {
metric => {
field: agg_options[metric][:field] || field
}
}
else
payload[:aggs][field] = {
terms: {
field: agg_options[:field] || field,
size: size
}.merge(shared_agg_options)
}
end
where = {}
where = (options[:where] || {}).reject { |k| k == field } unless options[:smart_aggs] == false
agg_filters = where_filters(where.merge(agg_options[:where] || {}))
# only do one level comparison for simplicity
filters.select! do |filter|
if agg_filters.include?(filter)
true
else
post_filters << filter
false
end
end
if agg_filters.any?
payload[:aggs][field] = {
filter: {
bool: {
must: agg_filters
}
},
aggs: {
field => payload[:aggs][field]
}
}
end
end
end
def set_filters(payload, filters, post_filters)
if post_filters.any?
payload[:post_filter] = {
bool: {
filter: post_filters
}
}
end
if filters.any?
# more efficient query if no aggs
payload[:query] = {
bool: {
must: payload[:query],
filter: filters
}
}
end
end
# TODO id transformation for arrays
def set_order(payload)
order = options[:order].is_a?(Enumerable) ? options[:order] : {options[:order] => :asc}
id_field = below50? ? :_id : :_uid
payload[:sort] = order.is_a?(Array) ? order : Hash[order.map { |k, v| [k.to_s == "id" ? id_field : k, v] }]
end
def where_filters(where)
filters = []
(where || {}).each do |field, value|
field = :_id if field.to_s == "id"
if field == :or
value.each do |or_clause|
filters << {bool: {should: or_clause.map { |or_statement| {bool: {filter: where_filters(or_statement)}} }}}
end
elsif field == :_or
filters << {bool: {should: value.map { |or_statement| {bool: {filter: where_filters(or_statement)}} }}}
elsif field == :_not
filters << {bool: {must_not: where_filters(value)}}
elsif field == :_and
filters << {bool: {must: value.map { |or_statement| {bool: {filter: where_filters(or_statement)}} }}}
else
# expand ranges
if value.is_a?(Range)
value = {gte: value.first, (value.exclude_end? ? :lt : :lte) => value.last}
end
value = {in: value} if value.is_a?(Array)
if value.is_a?(Hash)
value.each do |op, op_value|
case op
when :within, :bottom_right, :bottom_left
# do nothing
when :near
filters << {
geo_distance: {
field => location_value(op_value),
distance: value[:within] || "50mi"
}
}
when :geo_polygon
filters << {
geo_polygon: {
field => op_value
}
}
when :geo_shape
shape = op_value.except(:relation)
shape[:coordinates] = coordinate_array(shape[:coordinates]) if shape[:coordinates]
filters << {
geo_shape: {
field => {
relation: op_value[:relation] || "intersects",
shape: shape
}
}
}
when :top_left
filters << {
geo_bounding_box: {
field => {
top_left: location_value(op_value),
bottom_right: location_value(value[:bottom_right])
}
}
}
when :top_right
filters << {
geo_bounding_box: {
field => {
top_right: location_value(op_value),
bottom_left: location_value(value[:bottom_left])
}
}
}
when :regexp # support for regexp queries without using a regexp ruby object
filters << {regexp: {field => {value: op_value}}}
when :not # not equal
filters << {bool: {must_not: term_filters(field, op_value)}}
when :all
op_value.each do |val|
filters << term_filters(field, val)
end
when :in
filters << term_filters(field, op_value)
else
range_query =
case op
when :gt
{from: op_value, include_lower: false}
when :gte
{from: op_value, include_lower: true}
when :lt
{to: op_value, include_upper: false}
when :lte
{to: op_value, include_upper: true}
else
raise "Unknown where operator: #{op.inspect}"
end
# issue 132
if (existing = filters.find { |f| f[:range] && f[:range][field] })
existing[:range][field].merge!(range_query)
else
filters << {range: {field => range_query}}
end
end
end
else
filters << term_filters(field, value)
end
end
end
filters
end
def term_filters(field, value)
if value.is_a?(Array) # in query
if value.any?(&:nil?)
{bool: {should: [term_filters(field, nil), term_filters(field, value.compact)]}}
else
{terms: {field => value}}
end
elsif value.nil?
{bool: {must_not: {exists: {field: field}}}}
elsif value.is_a?(Regexp)
{regexp: {field => {value: value.source}}}
else
{term: {field => value}}
end
end
def custom_filter(field, value, factor)
if below50?
{
filter: {
bool: {
must: where_filters(field => value)
}
},
boost_factor: factor
}
else
{
filter: where_filters(field => value),
weight: factor
}
end
end
def boost_filters(boost_by, options = {})
boost_by.map do |field, value|
log = value.key?(:log) ? value[:log] : options[:log]
value[:factor] ||= 1
script_score = {
field_value_factor: {
field: field,
factor: value[:factor].to_f,
modifier: value[:modifier] || (log ? "ln2p" : nil)
}
}
if value[:missing]
if below50?
raise ArgumentError, "The missing option for boost_by is not supported in Elasticsearch < 5"
else
script_score[:field_value_factor][:missing] = value[:missing].to_f
end
else
script_score[:filter] = {
exists: {
field: field
}
}
end
script_score
end
end
# Recursively descend through nesting of arrays until we reach either a lat/lon object or an array of numbers,
# eventually returning the same structure with all values transformed to [lon, lat].
#
def coordinate_array(value)
if value.is_a?(Hash)
[value[:lon], value[:lat]]
elsif value.is_a?(Array) and !value[0].is_a?(Numeric)
value.map { |a| coordinate_array(a) }
else
value
end
end
def location_value(value)
if value.is_a?(Array)
value.map(&:to_f).reverse
else
value
end
end
def below50?
Searchkick.server_below?("5.0.0-alpha1")
end
def below60?
Searchkick.server_below?("6.0.0-alpha1")
end
end
end
No fuzzy transposition option until ES 6.1
module Searchkick
class Query
extend Forwardable
@@metric_aggs = [:avg, :cardinality, :max, :min, :sum]
attr_reader :klass, :term, :options
attr_accessor :body
def_delegators :execute, :map, :each, :any?, :empty?, :size, :length, :slice, :[], :to_ary,
:records, :results, :suggestions, :each_with_hit, :with_details, :aggregations, :aggs,
:took, :error, :model_name, :entry_name, :total_count, :total_entries,
:current_page, :per_page, :limit_value, :padding, :total_pages, :num_pages,
:offset_value, :offset, :previous_page, :prev_page, :next_page, :first_page?, :last_page?,
:out_of_range?, :hits, :response, :to_a, :first
def initialize(klass, term = "*", **options)
unknown_keywords = options.keys - [:aggs, :body, :body_options, :boost,
:boost_by, :boost_by_distance, :boost_where, :conversions, :conversions_term, :debug, :emoji, :exclude, :execute, :explain,
:fields, :highlight, :includes, :index_name, :indices_boost, :limit, :load,
:match, :misspellings, :model_includes, :offset, :operator, :order, :padding, :page, :per_page, :profile,
:request_params, :routing, :scope_results, :select, :similar, :smart_aggs, :suggest, :track, :type, :where]
raise ArgumentError, "unknown keywords: #{unknown_keywords.join(", ")}" if unknown_keywords.any?
term = term.to_s
if options[:emoji]
term = EmojiParser.parse_unicode(term) { |e| " #{e.name} " }.strip
end
@klass = klass
@term = term
@options = options
@match_suffix = options[:match] || searchkick_options[:match] || "analyzed"
# prevent Ruby warnings
@type = nil
@routing = nil
@misspellings = false
@misspellings_below = nil
@highlighted_fields = nil
prepare
end
def searchkick_index
klass ? klass.searchkick_index : nil
end
def searchkick_options
klass ? klass.searchkick_options : {}
end
def searchkick_klass
klass ? klass.searchkick_klass : nil
end
def params
index =
if options[:index_name]
Array(options[:index_name]).map { |v| v.respond_to?(:searchkick_index) ? v.searchkick_index.name : v }.join(",")
elsif searchkick_index
searchkick_index.name
else
"_all"
end
params = {
index: index,
body: body
}
params[:type] = @type if @type
params[:routing] = @routing if @routing
params.merge!(options[:request_params]) if options[:request_params]
params
end
def execute
@execute ||= begin
begin
response = execute_search
if retry_misspellings?(response)
prepare
response = execute_search
end
rescue => e # TODO rescue type
handle_error(e)
end
handle_response(response)
end
end
def to_curl
query = params
type = query[:type]
index = query[:index].is_a?(Array) ? query[:index].join(",") : query[:index]
# no easy way to tell which host the client will use
host = Searchkick.client.transport.hosts.first
credentials = host[:user] || host[:password] ? "#{host[:user]}:#{host[:password]}@" : nil
"curl #{host[:protocol]}://#{credentials}#{host[:host]}:#{host[:port]}/#{CGI.escape(index)}#{type ? "/#{type.map { |t| CGI.escape(t) }.join(',')}" : ''}/_search?pretty -H 'Content-Type: application/json' -d '#{query[:body].to_json}'"
end
def handle_response(response)
opts = {
page: @page,
per_page: @per_page,
padding: @padding,
load: @load,
includes: options[:includes],
model_includes: options[:model_includes],
json: !@json.nil?,
match_suffix: @match_suffix,
highlighted_fields: @highlighted_fields || [],
misspellings: @misspellings,
term: term,
scope_results: options[:scope_results]
}
if options[:debug]
require "pp"
puts "Searchkick Version: #{Searchkick::VERSION}"
puts "Elasticsearch Version: #{Searchkick.server_version}"
puts
puts "Model Searchkick Options"
pp searchkick_options
puts
puts "Search Options"
pp options
puts
if searchkick_index
puts "Model Search Data"
begin
pp klass.first(3).map { |r| {index: searchkick_index.record_data(r).merge(data: searchkick_index.send(:search_data, r))}}
rescue => e
puts "#{e.class.name}: #{e.message}"
end
puts
puts "Elasticsearch Mapping"
puts JSON.pretty_generate(searchkick_index.mapping)
puts
puts "Elasticsearch Settings"
puts JSON.pretty_generate(searchkick_index.settings)
puts
end
puts "Elasticsearch Query"
puts to_curl
puts
puts "Elasticsearch Results"
puts JSON.pretty_generate(response)
end
# set execute for multi search
@execute = Searchkick::Results.new(searchkick_klass, response, opts)
end
def retry_misspellings?(response)
@misspellings_below && response["hits"]["total"] < @misspellings_below
end
private
def handle_error(e)
status_code = e.message[1..3].to_i
if status_code == 404
raise MissingIndexError, "Index missing - run #{reindex_command}"
elsif status_code == 500 && (
e.message.include?("IllegalArgumentException[minimumSimilarity >= 1]") ||
e.message.include?("No query registered for [multi_match]") ||
e.message.include?("[match] query does not support [cutoff_frequency]") ||
e.message.include?("No query registered for [function_score]")
)
raise UnsupportedVersionError, "This version of Searchkick requires Elasticsearch 2 or greater"
elsif status_code == 400
if (
e.message.include?("bool query does not support [filter]") ||
e.message.include?("[bool] filter does not support [filter]")
)
raise UnsupportedVersionError, "This version of Searchkick requires Elasticsearch 2 or greater"
elsif e.message.include?("[multi_match] analyzer [searchkick_search] not found")
raise InvalidQueryError, "Bad mapping - run #{reindex_command}"
else
raise InvalidQueryError, e.message
end
else
raise e
end
end
def reindex_command
searchkick_klass ? "#{searchkick_klass.name}.reindex" : "reindex"
end
def execute_search
Searchkick.client.search(params)
end
def prepare
boost_fields, fields = set_fields
operator = options[:operator] || "and"
# pagination
page = [options[:page].to_i, 1].max
per_page = (options[:limit] || options[:per_page] || 1_000).to_i
padding = [options[:padding].to_i, 0].max
offset = options[:offset] || (page - 1) * per_page + padding
# model and eager loading
load = options[:load].nil? ? true : options[:load]
conversions_fields = Array(options[:conversions] || searchkick_options[:conversions]).map(&:to_s)
all = term == "*"
@json = options[:body]
if @json
ignored_options = options.keys & [:aggs, :boost,
:boost_by, :boost_by_distance, :boost_where, :conversions, :conversions_term, :exclude, :explain,
:fields, :highlight, :indices_boost, :limit, :match, :misspellings, :offset, :operator, :order,
:padding, :page, :per_page, :select, :smart_aggs, :suggest, :where]
warn "The body option replaces the entire body, so the following options are ignored: #{ignored_options.join(", ")}" if ignored_options.any?
payload = @json
else
if options[:similar]
payload = {
more_like_this: {
like: term,
min_doc_freq: 1,
min_term_freq: 1,
analyzer: "searchkick_search2"
}
}
if fields.all? { |f| f.start_with?("*.") }
raise ArgumentError, "Must specify fields to search"
end
if fields != ["_all"]
payload[:more_like_this][:fields] = fields
end
elsif all
payload = {
match_all: {}
}
else
queries = []
misspellings =
if options.key?(:misspellings)
options[:misspellings]
else
true
end
if misspellings.is_a?(Hash) && misspellings[:below] && !@misspellings_below
@misspellings_below = misspellings[:below].to_i
misspellings = false
end
if misspellings != false
edit_distance = (misspellings.is_a?(Hash) && (misspellings[:edit_distance] || misspellings[:distance])) || 1
transpositions =
if misspellings.is_a?(Hash) && misspellings.key?(:transpositions)
{fuzzy_transpositions: misspellings[:transpositions]}
else
{fuzzy_transpositions: true}
end
prefix_length = (misspellings.is_a?(Hash) && misspellings[:prefix_length]) || 0
default_max_expansions = @misspellings_below ? 20 : 3
max_expansions = (misspellings.is_a?(Hash) && misspellings[:max_expansions]) || default_max_expansions
@misspellings = true
else
@misspellings = false
end
fields.each do |field|
queries_to_add = []
qs = []
factor = boost_fields[field] || 1
shared_options = {
query: term,
boost: 10 * factor
}
match_type =
if field.end_with?(".phrase")
field =
if field == "_all.phrase"
"_all"
else
field.sub(/\.phrase\z/, ".analyzed")
end
:match_phrase
else
:match
end
shared_options[:operator] = operator if match_type == :match
exclude_analyzer = nil
exclude_field = field
if field == "_all" || field.end_with?(".analyzed")
shared_options[:cutoff_frequency] = 0.001 unless operator.to_s == "and" || misspellings == false
qs.concat [
shared_options.merge(analyzer: "searchkick_search"),
shared_options.merge(analyzer: "searchkick_search2")
]
exclude_analyzer = "searchkick_search2"
elsif field.end_with?(".exact")
f = field.split(".")[0..-2].join(".")
queries_to_add << {match: {f => shared_options.merge(analyzer: "keyword")}}
exclude_field = f
exclude_analyzer = "keyword"
else
analyzer = field =~ /\.word_(start|middle|end)\z/ ? "searchkick_word_search" : "searchkick_autocomplete_search"
qs << shared_options.merge(analyzer: analyzer)
exclude_analyzer = analyzer
end
if misspellings != false && match_type == :match
qs.concat qs.map { |q| q.except(:cutoff_frequency).merge(fuzziness: edit_distance, prefix_length: prefix_length, max_expansions: max_expansions, boost: factor).merge(transpositions) }
end
if field.start_with?("*.")
q2 = qs.map { |q| {multi_match: q.merge(fields: [field], type: match_type == :match_phrase ? "phrase" : "best_fields")} }
if below61?
q2.each do |q|
q[:multi_match].delete(:fuzzy_transpositions)
end
end
else
q2 = qs.map { |q| {match_type => {field => q}} }
end
# boost exact matches more
if field =~ /\.word_(start|middle|end)\z/ && searchkick_options[:word] != false
queries_to_add << {
bool: {
must: {
bool: {
should: q2
}
},
should: {match_type => {field.sub(/\.word_(start|middle|end)\z/, ".analyzed") => qs.first}}
}
}
else
queries_to_add.concat(q2)
end
if options[:exclude]
must_not =
Array(options[:exclude]).map do |phrase|
if field.start_with?("*.")
{
multi_match: {
fields: [field],
query: phrase,
analyzer: exclude_analyzer,
type: "phrase"
}
}
else
{
match_phrase: {
exclude_field => {
query: phrase,
analyzer: exclude_analyzer
}
}
}
end
end
queries_to_add = [{
bool: {
should: queries_to_add,
must_not: must_not
}
}]
end
queries.concat(queries_to_add)
end
payload = {
dis_max: {
queries: queries
}
}
if conversions_fields.present? && options[:conversions] != false
shoulds = []
conversions_fields.each do |conversions_field|
# wrap payload in a bool query
script_score = {field_value_factor: {field: "#{conversions_field}.count"}}
shoulds << {
nested: {
path: conversions_field,
score_mode: "sum",
query: {
function_score: {
boost_mode: "replace",
query: {
match: {
"#{conversions_field}.query" => options[:conversions_term] || term
}
}
}.merge(script_score)
}
}
}
end
payload = {
bool: {
must: payload,
should: shoulds
}
}
end
end
custom_filters = []
multiply_filters = []
set_boost_by(multiply_filters, custom_filters)
set_boost_where(custom_filters)
set_boost_by_distance(custom_filters) if options[:boost_by_distance]
if custom_filters.any?
payload = {
function_score: {
functions: custom_filters,
query: payload,
score_mode: "sum"
}
}
end
if multiply_filters.any?
payload = {
function_score: {
functions: multiply_filters,
query: payload,
score_mode: "multiply"
}
}
end
payload = {
query: payload,
size: per_page,
from: offset
}
payload[:explain] = options[:explain] if options[:explain]
payload[:profile] = options[:profile] if options[:profile]
# order
set_order(payload) if options[:order]
# indices_boost
set_boost_by_indices(payload)
# type when inheritance
where = (options[:where] || {}).dup
if searchkick_options[:inheritance] && (options[:type] || (klass != searchkick_klass && searchkick_index))
where[:type] = [options[:type] || klass].flatten.map { |v| searchkick_index.klass_document_type(v, true) }
end
# start everything as efficient filters
# move to post_filters as aggs demand
filters = where_filters(where)
post_filters = []
# aggregations
set_aggregations(payload, filters, post_filters) if options[:aggs]
# filters
set_filters(payload, filters, post_filters)
# suggestions
set_suggestions(payload, options[:suggest]) if options[:suggest]
# highlight
set_highlights(payload, fields) if options[:highlight]
# timeout shortly after client times out
payload[:timeout] ||= "#{Searchkick.search_timeout + 1}s"
# An empty array will cause only the _id and _type for each hit to be returned
# https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-source-filtering.html
if options[:select]
if options[:select] == []
# intuitively [] makes sense to return no fields, but ES by default returns all fields
payload[:_source] = false
else
payload[:_source] = options[:select]
end
elsif load
payload[:_source] = false
end
end
# type
if !searchkick_options[:inheritance] && (options[:type] || (klass != searchkick_klass && searchkick_index))
@type = [options[:type] || klass].flatten.map { |v| searchkick_index.klass_document_type(v) }
end
# routing
@routing = options[:routing] if options[:routing]
# merge more body options
payload = payload.deep_merge(options[:body_options]) if options[:body_options]
@body = payload
@page = page
@per_page = per_page
@padding = padding
@load = load
end
def set_fields
boost_fields = {}
fields = options[:fields] || searchkick_options[:default_fields] || searchkick_options[:searchable]
all = searchkick_options.key?(:_all) ? searchkick_options[:_all] : below60?
default_match = options[:match] || searchkick_options[:match] || :word
fields =
if fields
fields.map do |value|
k, v = value.is_a?(Hash) ? value.to_a.first : [value, default_match]
k2, boost = k.to_s.split("^", 2)
field = "#{k2}.#{v == :word ? 'analyzed' : v}"
boost_fields[field] = boost.to_f if boost
field
end
elsif all && default_match == :word
["_all"]
elsif all && default_match == :phrase
["_all.phrase"]
elsif term == "*"
[]
elsif default_match == :exact
raise ArgumentError, "Must specify fields to search"
else
[default_match == :word ? "*.analyzed" : "*.#{default_match}"]
end
[boost_fields, fields]
end
def set_boost_by_distance(custom_filters)
boost_by_distance = options[:boost_by_distance] || {}
# legacy format
if boost_by_distance[:field]
boost_by_distance = {boost_by_distance[:field] => boost_by_distance.except(:field)}
end
boost_by_distance.each do |field, attributes|
attributes = {function: :gauss, scale: "5mi"}.merge(attributes)
unless attributes[:origin]
raise ArgumentError, "boost_by_distance requires :origin"
end
function_params = attributes.select { |k, _| [:origin, :scale, :offset, :decay].include?(k) }
function_params[:origin] = location_value(function_params[:origin])
custom_filters << {
weight: attributes[:factor] || 1,
attributes[:function] => {
field => function_params
}
}
end
end
def set_boost_by(multiply_filters, custom_filters)
boost_by = options[:boost_by] || {}
if boost_by.is_a?(Array)
boost_by = Hash[boost_by.map { |f| [f, {factor: 1}] }]
elsif boost_by.is_a?(Hash)
multiply_by, boost_by = boost_by.partition { |_, v| v[:boost_mode] == "multiply" }.map { |i| Hash[i] }
end
boost_by[options[:boost]] = {factor: 1} if options[:boost]
custom_filters.concat boost_filters(boost_by, log: true)
multiply_filters.concat boost_filters(multiply_by || {})
end
def set_boost_where(custom_filters)
boost_where = options[:boost_where] || {}
boost_where.each do |field, value|
if value.is_a?(Array) && value.first.is_a?(Hash)
value.each do |value_factor|
custom_filters << custom_filter(field, value_factor[:value], value_factor[:factor])
end
elsif value.is_a?(Hash)
custom_filters << custom_filter(field, value[:value], value[:factor])
else
factor = 1000
custom_filters << custom_filter(field, value, factor)
end
end
end
def set_boost_by_indices(payload)
return unless options[:indices_boost]
indices_boost = options[:indices_boost].each_with_object({}) do |(key, boost), memo|
index = key.respond_to?(:searchkick_index) ? key.searchkick_index.name : key
# try to use index explicitly instead of alias: https://github.com/elasticsearch/elasticsearch/issues/4756
index_by_alias = Searchkick.client.indices.get_alias(index: index).keys.first
memo[index_by_alias || index] = boost
end
payload[:indices_boost] = indices_boost
end
def set_suggestions(payload, suggest)
suggest_fields = nil
if suggest.is_a?(Array)
suggest_fields = suggest
else
suggest_fields = (searchkick_options[:suggest] || []).map(&:to_s)
# intersection
if options[:fields]
suggest_fields &= options[:fields].map { |v| (v.is_a?(Hash) ? v.keys.first : v).to_s.split("^", 2).first }
end
end
if suggest_fields.any?
payload[:suggest] = {text: term}
suggest_fields.each do |field|
payload[:suggest][field] = {
phrase: {
field: "#{field}.suggest"
}
}
end
else
raise ArgumentError, "Must pass fields to suggest option"
end
end
def set_highlights(payload, fields)
payload[:highlight] = {
fields: Hash[fields.map { |f| [f, {}] }]
}
if options[:highlight].is_a?(Hash)
if (tag = options[:highlight][:tag])
payload[:highlight][:pre_tags] = [tag]
payload[:highlight][:post_tags] = [tag.to_s.gsub(/\A<(\w+).+/, "</\\1>")]
end
if (fragment_size = options[:highlight][:fragment_size])
payload[:highlight][:fragment_size] = fragment_size
end
if (encoder = options[:highlight][:encoder])
payload[:highlight][:encoder] = encoder
end
highlight_fields = options[:highlight][:fields]
if highlight_fields
payload[:highlight][:fields] = {}
highlight_fields.each do |name, opts|
payload[:highlight][:fields]["#{name}.#{@match_suffix}"] = opts || {}
end
end
end
@highlighted_fields = payload[:highlight][:fields].keys
end
def set_aggregations(payload, filters, post_filters)
aggs = options[:aggs]
payload[:aggs] = {}
aggs = Hash[aggs.map { |f| [f, {}] }] if aggs.is_a?(Array) # convert to more advanced syntax
aggs.each do |field, agg_options|
size = agg_options[:limit] ? agg_options[:limit] : 1_000
shared_agg_options = agg_options.slice(:order, :min_doc_count)
if agg_options[:ranges]
payload[:aggs][field] = {
range: {
field: agg_options[:field] || field,
ranges: agg_options[:ranges]
}.merge(shared_agg_options)
}
elsif agg_options[:date_ranges]
payload[:aggs][field] = {
date_range: {
field: agg_options[:field] || field,
ranges: agg_options[:date_ranges]
}.merge(shared_agg_options)
}
elsif histogram = agg_options[:date_histogram]
interval = histogram[:interval]
payload[:aggs][field] = {
date_histogram: {
field: histogram[:field],
interval: interval
}
}
elsif metric = @@metric_aggs.find { |k| agg_options.has_key?(k) }
payload[:aggs][field] = {
metric => {
field: agg_options[metric][:field] || field
}
}
else
payload[:aggs][field] = {
terms: {
field: agg_options[:field] || field,
size: size
}.merge(shared_agg_options)
}
end
where = {}
where = (options[:where] || {}).reject { |k| k == field } unless options[:smart_aggs] == false
agg_filters = where_filters(where.merge(agg_options[:where] || {}))
# only do one level comparison for simplicity
filters.select! do |filter|
if agg_filters.include?(filter)
true
else
post_filters << filter
false
end
end
if agg_filters.any?
payload[:aggs][field] = {
filter: {
bool: {
must: agg_filters
}
},
aggs: {
field => payload[:aggs][field]
}
}
end
end
end
def set_filters(payload, filters, post_filters)
if post_filters.any?
payload[:post_filter] = {
bool: {
filter: post_filters
}
}
end
if filters.any?
# more efficient query if no aggs
payload[:query] = {
bool: {
must: payload[:query],
filter: filters
}
}
end
end
# TODO id transformation for arrays
def set_order(payload)
order = options[:order].is_a?(Enumerable) ? options[:order] : {options[:order] => :asc}
id_field = below50? ? :_id : :_uid
payload[:sort] = order.is_a?(Array) ? order : Hash[order.map { |k, v| [k.to_s == "id" ? id_field : k, v] }]
end
def where_filters(where)
filters = []
(where || {}).each do |field, value|
field = :_id if field.to_s == "id"
if field == :or
value.each do |or_clause|
filters << {bool: {should: or_clause.map { |or_statement| {bool: {filter: where_filters(or_statement)}} }}}
end
elsif field == :_or
filters << {bool: {should: value.map { |or_statement| {bool: {filter: where_filters(or_statement)}} }}}
elsif field == :_not
filters << {bool: {must_not: where_filters(value)}}
elsif field == :_and
filters << {bool: {must: value.map { |or_statement| {bool: {filter: where_filters(or_statement)}} }}}
else
# expand ranges
if value.is_a?(Range)
value = {gte: value.first, (value.exclude_end? ? :lt : :lte) => value.last}
end
value = {in: value} if value.is_a?(Array)
if value.is_a?(Hash)
value.each do |op, op_value|
case op
when :within, :bottom_right, :bottom_left
# do nothing
when :near
filters << {
geo_distance: {
field => location_value(op_value),
distance: value[:within] || "50mi"
}
}
when :geo_polygon
filters << {
geo_polygon: {
field => op_value
}
}
when :geo_shape
shape = op_value.except(:relation)
shape[:coordinates] = coordinate_array(shape[:coordinates]) if shape[:coordinates]
filters << {
geo_shape: {
field => {
relation: op_value[:relation] || "intersects",
shape: shape
}
}
}
when :top_left
filters << {
geo_bounding_box: {
field => {
top_left: location_value(op_value),
bottom_right: location_value(value[:bottom_right])
}
}
}
when :top_right
filters << {
geo_bounding_box: {
field => {
top_right: location_value(op_value),
bottom_left: location_value(value[:bottom_left])
}
}
}
when :regexp # support for regexp queries without using a regexp ruby object
filters << {regexp: {field => {value: op_value}}}
when :not # not equal
filters << {bool: {must_not: term_filters(field, op_value)}}
when :all
op_value.each do |val|
filters << term_filters(field, val)
end
when :in
filters << term_filters(field, op_value)
else
range_query =
case op
when :gt
{from: op_value, include_lower: false}
when :gte
{from: op_value, include_lower: true}
when :lt
{to: op_value, include_upper: false}
when :lte
{to: op_value, include_upper: true}
else
raise "Unknown where operator: #{op.inspect}"
end
# issue 132
if (existing = filters.find { |f| f[:range] && f[:range][field] })
existing[:range][field].merge!(range_query)
else
filters << {range: {field => range_query}}
end
end
end
else
filters << term_filters(field, value)
end
end
end
filters
end
def term_filters(field, value)
if value.is_a?(Array) # in query
if value.any?(&:nil?)
{bool: {should: [term_filters(field, nil), term_filters(field, value.compact)]}}
else
{terms: {field => value}}
end
elsif value.nil?
{bool: {must_not: {exists: {field: field}}}}
elsif value.is_a?(Regexp)
{regexp: {field => {value: value.source}}}
else
{term: {field => value}}
end
end
def custom_filter(field, value, factor)
if below50?
{
filter: {
bool: {
must: where_filters(field => value)
}
},
boost_factor: factor
}
else
{
filter: where_filters(field => value),
weight: factor
}
end
end
def boost_filters(boost_by, options = {})
boost_by.map do |field, value|
log = value.key?(:log) ? value[:log] : options[:log]
value[:factor] ||= 1
script_score = {
field_value_factor: {
field: field,
factor: value[:factor].to_f,
modifier: value[:modifier] || (log ? "ln2p" : nil)
}
}
if value[:missing]
if below50?
raise ArgumentError, "The missing option for boost_by is not supported in Elasticsearch < 5"
else
script_score[:field_value_factor][:missing] = value[:missing].to_f
end
else
script_score[:filter] = {
exists: {
field: field
}
}
end
script_score
end
end
# Recursively descend through nesting of arrays until we reach either a lat/lon object or an array of numbers,
# eventually returning the same structure with all values transformed to [lon, lat].
#
def coordinate_array(value)
if value.is_a?(Hash)
[value[:lon], value[:lat]]
elsif value.is_a?(Array) and !value[0].is_a?(Numeric)
value.map { |a| coordinate_array(a) }
else
value
end
end
def location_value(value)
if value.is_a?(Array)
value.map(&:to_f).reverse
else
value
end
end
def below50?
Searchkick.server_below?("5.0.0-alpha1")
end
def below60?
Searchkick.server_below?("6.0.0-alpha1")
end
def below61?
Searchkick.server_below?("6.1.0-alpha1")
end
end
end
|
Railsoverview::Application.configure do
# Settings specified here will take precedence over those in config/application.rb
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send
# config.action_mailer.raise_delivery_errors = false
Railsoverview::Application.configure do
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
:address => 'smtp.gmail.com',
:domain => 'simon-fletcher.me',
:port => 587,
:user_name => 'simonfletcher0@gmail.com',
:password => 'sf1032ld',
:authentication => :plain
}
config.action_mailer.raise_delivery_errors = true
end
# Print deprecation notices to the Rails logger
config.active_support.deprecation = :log
# Only use best-standards-support built into browsers
config.action_dispatch.best_standards_support = :builtin
# Raise exception on mass assignment protection for Active Record models
config.active_record.mass_assignment_sanitizer = :strict
# Log the query plan for queries taking more than this (works
# with SQLite, MySQL, and PostgreSQL)
config.active_record.auto_explain_threshold_in_seconds = 0.5
# Do not compress assets
config.assets.compress = false
# Expands the lines which load the assets
config.assets.debug = true
end
removing development environment
|
require "heroku/client"
class Heroku::Client::Organizations
@headers = {}
class << self
def api options = {}
@api ||= begin
require("excon")
key = Heroku::Auth.get_credentials[1]
auth = "Basic #{Base64.encode64(':' + key).gsub("\n", '')}"
hdrs = headers.merge( {"Authorization" => auth } )
@connection = Excon.new(manager_url, options.merge(:headers => hdrs))
end
self
end
def add_headers(headers)
@headers.merge! headers
end
def headers
@headers
end
def request params
begin
response = @connection.request(params)
rescue Excon::Errors::HTTPStatusError => error
klass = case error.response.status
when 401 then Heroku::API::Errors::Unauthorized
when 402 then Heroku::API::Errors::VerificationRequired
when 403 then Heroku::API::Errors::Forbidden
when 404
if error.request[:path].match /\/apps\/\/.*/
Heroku::API::Errors::NilApp
else
Heroku::API::Errors::NotFound
end
when 408 then Heroku::API::Errors::Timeout
when 422 then Heroku::API::Errors::RequestFailed
when 423 then Heroku::API::Errors::Locked
when 429 then Heroku::API::Errors::RateLimitExceeded
when /50./ then Heroku::API::Errors::RequestFailed
else Heroku::API::Errors::ErrorWithResponse
end
decompress_response!(error.response)
reerror = klass.new(error.message, error.response)
reerror.set_backtrace(error.backtrace)
raise(reerror)
end
if response.body && !response.body.empty?
decompress_response!(response)
begin
response.body = MultiJson.load(response.body)
rescue
# leave non-JSON body as is
end
end
# reset (non-persistent) connection
# @connection.reset
response
end
# Orgs
#################################
def get_orgs
begin
api.request(
:expects => 200,
:path => "/v1/user/info",
:method => :get
)
# user is not a member of any organization
rescue Heroku::API::Errors::NotFound
Excon::Response.new(:body => { 'user' => {:not_found => true} })
end
end
# Apps
#################################
def get_apps(org)
api.request(
:expects => 200,
:method => :get,
:path => "/v1/organization/#{org}/app"
)
end
def post_app(params, org)
params["app_name"] = params.delete("name") if params["name"]
api.request(
:expects => 201,
:method => :post,
:path => "/v1/organization/#{org}/create-app",
:body => Heroku::Helpers.json_encode(params),
:headers => {"Content-Type" => "application/json"}
)
end
def transfer_app(to_org, app, locked)
api.request(
:expects => 200,
:method => :put,
:path => "/v1/app/#{app}",
:body => Heroku::Helpers.json_encode( { "owner" => to_org, "locked" => locked || 'false' } ),
:headers => {"Content-Type" => "application/json"}
)
end
def post_collaborator(org, app, user)
api.request(
:expects => 200,
:method => :post,
:path => "v1/organization/#{org}/app/#{app}/collaborators",
:body => Heroku::Helpers.json_encode({ "email" => user }),
:headers => {"Content-Type" => "application/json"}
)
end
def delete_collaborator(org, app, user)
api.request(
:expects => 200,
:method => :delete,
:path => "v1/organization/#{org}/app/#{app}/collaborators/#{user}"
)
end
def join_app(app)
api.request(
:expects => 200,
:method => :post,
:path => "/v1/app/#{app}/join"
)
end
def leave_app(app)
api.request(
:expects => 204,
:method => :delete,
:path => "/v1/app/#{app}/join"
)
end
def lock_app(app)
api.request(
:expects => 200,
:method => :post,
:path => "/v1/app/#{app}/lock"
)
end
def unlock_app(app)
api.request(
:expects => 204,
:method => :delete,
:path => "/v1/app/#{app}/lock"
)
end
# Members
#################################
def get_members(org)
api.request(
:expects => 200,
:method => :get,
:path => "/v1/organization/#{org}/user"
)
end
def add_member(org, member, role)
api.request(
:expects => [201, 302],
:method => :post,
:path => "/v1/organization/#{org}/user",
:body => Heroku::Helpers.json_encode( { "email" => member, "role" => role } ),
:headers => {"Content-Type" => "application/json"}
)
end
def set_member(org, member, role)
api.request(
:expects => [200, 304],
:method => :put,
:path => "/v1/organization/#{org}/user/#{CGI.escape(member)}",
:body => Heroku::Helpers.json_encode( { "role" => role } ),
:headers => {"Content-Type" => "application/json"}
)
end
def remove_member(org, member)
api.request(
:expects => 204,
:method => :delete,
:path => "/v1/organization/#{org}/user/#{CGI.escape(member)}"
)
end
private
def decompress_response!(response)
return unless response.headers['Content-Encoding'] == 'gzip'
response.body = Zlib::GzipReader.new(StringIO.new(response.body)).read
end
def manager_url
Heroku::Auth.full_host
end
end
end
disable ssl for org api on devclouds
require "heroku/client"
class Heroku::Client::Organizations
@headers = {}
class << self
def api options = {}
@api ||= begin
require("excon")
key = Heroku::Auth.get_credentials[1]
auth = "Basic #{Base64.encode64(':' + key).gsub("\n", '')}"
hdrs = headers.merge( {"Authorization" => auth } )
options[:ssl_verify_peer] = Heroku::Auth.verify_host?(Heroku::Auth.host)
@connection = Excon.new(manager_url, options.merge(:headers => hdrs))
end
self
end
def add_headers(headers)
@headers.merge! headers
end
def headers
@headers
end
def request params
begin
response = @connection.request(params)
rescue Excon::Errors::HTTPStatusError => error
klass = case error.response.status
when 401 then Heroku::API::Errors::Unauthorized
when 402 then Heroku::API::Errors::VerificationRequired
when 403 then Heroku::API::Errors::Forbidden
when 404
if error.request[:path].match /\/apps\/\/.*/
Heroku::API::Errors::NilApp
else
Heroku::API::Errors::NotFound
end
when 408 then Heroku::API::Errors::Timeout
when 422 then Heroku::API::Errors::RequestFailed
when 423 then Heroku::API::Errors::Locked
when 429 then Heroku::API::Errors::RateLimitExceeded
when /50./ then Heroku::API::Errors::RequestFailed
else Heroku::API::Errors::ErrorWithResponse
end
decompress_response!(error.response)
reerror = klass.new(error.message, error.response)
reerror.set_backtrace(error.backtrace)
raise(reerror)
end
if response.body && !response.body.empty?
decompress_response!(response)
begin
response.body = MultiJson.load(response.body)
rescue
# leave non-JSON body as is
end
end
# reset (non-persistent) connection
# @connection.reset
response
end
# Orgs
#################################
def get_orgs
begin
api.request(
:expects => 200,
:path => "/v1/user/info",
:method => :get
)
# user is not a member of any organization
rescue Heroku::API::Errors::NotFound
Excon::Response.new(:body => { 'user' => {:not_found => true} })
end
end
# Apps
#################################
def get_apps(org)
api.request(
:expects => 200,
:method => :get,
:path => "/v1/organization/#{org}/app"
)
end
def post_app(params, org)
params["app_name"] = params.delete("name") if params["name"]
api.request(
:expects => 201,
:method => :post,
:path => "/v1/organization/#{org}/create-app",
:body => Heroku::Helpers.json_encode(params),
:headers => {"Content-Type" => "application/json"}
)
end
def transfer_app(to_org, app, locked)
api.request(
:expects => 200,
:method => :put,
:path => "/v1/app/#{app}",
:body => Heroku::Helpers.json_encode( { "owner" => to_org, "locked" => locked || 'false' } ),
:headers => {"Content-Type" => "application/json"}
)
end
def post_collaborator(org, app, user)
api.request(
:expects => 200,
:method => :post,
:path => "v1/organization/#{org}/app/#{app}/collaborators",
:body => Heroku::Helpers.json_encode({ "email" => user }),
:headers => {"Content-Type" => "application/json"}
)
end
def delete_collaborator(org, app, user)
api.request(
:expects => 200,
:method => :delete,
:path => "v1/organization/#{org}/app/#{app}/collaborators/#{user}"
)
end
def join_app(app)
api.request(
:expects => 200,
:method => :post,
:path => "/v1/app/#{app}/join"
)
end
def leave_app(app)
api.request(
:expects => 204,
:method => :delete,
:path => "/v1/app/#{app}/join"
)
end
def lock_app(app)
api.request(
:expects => 200,
:method => :post,
:path => "/v1/app/#{app}/lock"
)
end
def unlock_app(app)
api.request(
:expects => 204,
:method => :delete,
:path => "/v1/app/#{app}/lock"
)
end
# Members
#################################
def get_members(org)
api.request(
:expects => 200,
:method => :get,
:path => "/v1/organization/#{org}/user"
)
end
def add_member(org, member, role)
api.request(
:expects => [201, 302],
:method => :post,
:path => "/v1/organization/#{org}/user",
:body => Heroku::Helpers.json_encode( { "email" => member, "role" => role } ),
:headers => {"Content-Type" => "application/json"}
)
end
def set_member(org, member, role)
api.request(
:expects => [200, 304],
:method => :put,
:path => "/v1/organization/#{org}/user/#{CGI.escape(member)}",
:body => Heroku::Helpers.json_encode( { "role" => role } ),
:headers => {"Content-Type" => "application/json"}
)
end
def remove_member(org, member)
api.request(
:expects => 204,
:method => :delete,
:path => "/v1/organization/#{org}/user/#{CGI.escape(member)}"
)
end
private
def decompress_response!(response)
return unless response.headers['Content-Encoding'] == 'gzip'
response.body = Zlib::GzipReader.new(StringIO.new(response.body)).read
end
def manager_url
Heroku::Auth.full_host
end
end
end
|
# = The Section Architecture
#
# Umlaut has what could be considered a 'domain specific language' for
# describing the display individual sections of content on the resolve menu
# page. These sections often correspond to a ServiceTypeValue, like "fulltext".
# But sometimes may include multiple ServiceTypeValues (eg related_items section
# includes cited_by and similar_items), or no ServiceTypeValue at all (eg
# section to display a COinS).
#
# A description of a section is simply a hash with certain conventional
# keys describing various aspects of the contents and display of that section.
# These hashes are listed in the resolve_sections application configuration
# variable, initialized in the resolve_views.rb initializer, and customized
# or over-ridden in the local resolve_views.rb initializer.
#
# One benefit of describing a section through configuration is that section
# display can often by changed at configure time without requiring a code
# time. Another is that the description of the section can be used not
# only to generate the initial HTML page; but also by the javascript that
# update the sections with new background content as available; and by the
# partial_html_sections api that delivers HTML fragments for sections in an
# XML or JSON container.
#
# A description of a section is simply a hash, suitable for passing to
# SectionRenderer.new, detailed below. Plus some additional variables
# specifying _where_ to display the section, documented in the resolve_views.rb
# initializer.
#
# = The SectionRenderer
# A SectionRenderer object provides logic for displaying a specific section
# on the Umlaut resolve menu page. It is initialized with a hash describing
# the details -- or significantly, with simply a pointer to such a hash
# already existing in the resolve_sections config variable.
#
# A SectionRenderer is typically created by the ResolveHelper#render_section
# method, which then passes the SectionRender object to the
# _section_display.erb.html that does the actual rendering, using
# the SectionRenderer for logic and hashes to pass to render calls in
# the partial.
#
#
# == Section Options
#
# Section options are typically configured in hashes in the application
# config variable resolve_sections, which is expected to be a list of hashes.
# That hash is suitable to be passed to a SectionRenderer.new() as configuration
# options for the section. The various ways these options can be used
# is documented below.
#
# === Simplest Case, Defaults
#
# As is common in ruby, SectionRenderer will make a lot of conventional
# assumptions, allowing you to be very concise for the basic simple case:
#
# { :div_id => "fulltext", :html_area => :main }
#
# This means that:
# * this section is assumed to be contained within a <div id="fulltext">. The
# div won't be automatically rendered, it's the containing pages
# responsibility to put in a div with this id.
#
# * this section is assumed to contain responses of type
# ServiceTypeValue["fulltext"]
#
# * The section will be displayed with stock heading block including a title
# constructed from the display_name of ServiceTypeValue["fulltext"], or
# in general the display_name of the first ServiceTypeValue included
# in this section.
#
# * The section will include a stock 'spinner' if there are potential background
# results being gathered for the ServiceTypeValue(s) contained.
#
# * The actual ServiceResponses collected for the ServiceTypeValue included
# will be rendered with a _standard_response_item
# partial, using render :collection.
#
# * The section will be displayed whether or not there are any actual
# responses included. If there are no responses, a message will be displayed
# to that effect.
#
# The display of a section can be customized via configuration parameters to
# a large degree, including supplying your own partial to take over almost
# all display of the section.
#
# === Customizing ServiceTypeValues
#
# You can specifically supply the ServiceTypeValues contained in this
# section, to a different type than would be guessed from the div_id:
#
# {:div_id => "my_area", :service_type_values => ["fulltext"]}
#
# Or specify multiple types included in one section:
#
# {:div_id => "related_items", :service_type_values => ['cited_by', 'similar]}
#
# Or a section that isn't used for displaying service responses at all,
# and has no service type:
#
# {:div_id => "coins", :partial => "coins", :service_type_values => []}
#
# Note that a custom partial needs to be supplied if there are no service_type_values supplied.
#
# === Customizing heading display
#
# You can supply a title for the section that's different than what would
# be guessed from it's ServiceTypeValues. You can also supply a prompt.
#
# {:div_id =>"excerpts", :section_title=>"Lots of good stuff", :section_prompt => "Limited previes and excerpts."}
#
# You can also suppress display of the stock section heading at all:
# {:show_heading => false, ...}
#
# This may be becuase you don't want a heading, or because you are supplying
# a custom partial that will take care of the heading in a custom way.
#
# === Customizing spinner display
#
# You can also suppress display of the stock spinner, because you don't
# want a spinner, or because your custom partial will be taking care of it.
# {:show_spinner => false, ...}
#
# By default, the spinner displays what type of thing it's waiting on, guessing
# from the ServiceTypeValue configured. If you want to specify this item name:
# {:item_name_plural => "Related Items", ...}
#
# === Customizing visibility of section
#
# By default, a section will simply be displayed regardless of whether
# there are any actual responses to display. However, the 'visibility'
# argument can be used to customize this in many ways.
# visibilty:
# [*true*]
# Default, always show section.
# [*false*]
# Never show section. (Not sure why you'd want this).
# [<b>:any_services</b>]
# Show section if and only if there are any configured
# services that generate the ServiceTypeValues included
# in this section, regardless of whether in this case
# they have or not.
# [<b>:in_progress</b>]
# Show the section if responses exist, OR if any services
# are currently in progress that are capable of generating
# responses of the right type for this section.
# [<b>:responses_exist</b>]
# Show the section if and only if some responses
# have actually been generated of the types contained
# in this section.
# [<b>:complete_with_responses</b>]
# Show the section only if there are responses
# generated, AND all services supplying
# responses of the type contained in section
# have completed, no more responses are possible.
# [<b>(lambda object)</b>]
# Most flexibly of all, you can supply your own lambda
# supplying custom logic to determine whether to show
# the section, based on current context. The lambda
# will be passed the SectionRenderer object as an argument,
# providing access to the Umlaut Request with context.
# eg:
# :visibility => lambda do |renderer|
# renderer.request.something == something
# end
#
# === List with limit
#
# You can have the section automatically use the ResolveHelper#list_with_limit
# helper to limit the number of items initially displayed, with the rest behind
# a 'more' expand/contract widget.
#
# { :div_id => "highlighted_link",
# :list_visible_limit => 1,
# :visibility => :in_progress, ... }
#
# === Custom partial display
#
# By default, the SectionRenderer assumes that all the ServiceResposnes included
# are capable of being displayed by the standard_item_response, and displays
# them simply by render standard_item_response with a \:colection. Sometimes
# this assumption isn't true, or you want custom display for other reasons.
# You can supply your own partial that the renderer will use to display
# the content.
#
# { :div_id => "my_div", :partial => "my_partial", ... }
#
# The partial so supplied should live in resolve/_my_partial.html.erb
#
# When this partial is called, it will have local variables set
# to give it the data it needs in order to create a display:
#
# [*responses_by_type*]
# a hash keyed by ServiceTypeValue name, with the
# the value being an array of the respective ServiceType
# objects.
# [*responses*] a flattened list of all ServiceTypes included in
# this section, of varying ServiceTypeValues. Most
# useful when the section only includes one
# ServiceTypeValue
# [*renderer*] The SectionRenderer object itself, from which
# the current umlaut request can be obtained,
# among other things.
#
# You can supply additional static local arguments to the partial
# in the SectionRenderer setup:
#
# {:div_id=> "foo", :partial=>"my_partial", :partial_locals => {:mode => "big"}, ... }
#
# the :partial_locals argument can be used with the standard_response_item
# too:
# {:div_id => "highlighted_link", :partial_locals => {:show_source => true}}
#
# Note that your custom partial will still be displayed with stock
# header and possibly spinner surrounding it. You can suppress these elements:
#
# {:div_id => "cover_image", :partial => "cover_image", :show_heading => false, :show_spinner => false}
#
# But even so, some 'wrapping' html is rendered surrounding your partial.
# If you want to disable even this, becuase your partial will take care of it
# itself, you can do so with \:show_partial_only => true
# {:div_id => "search_inside", :partial => "search_inside", :show_partial_only => true}
class SectionRenderer
include ActionView::Helpers::TagHelper
@@bg_update_sections = @@partial_update_sections = nil
# First argument is the current umlaut Request object.
# Second argument is a session description hash. See class overview
# for an overview. Recognized keys of session description hash:
# * [id] SessionRenderer will look up session description hash in
# resolve_views finding one with :div_id == id
# * [div_id] The id of the <div> the section lives in. Also used
# generally as unique ID for the section.
# * [service_type_values] ServiceTypeValue's that this section contains.
# defaults to [ServiceTypeValue[div_id]]
# * [section_title] Title for the section. Defaults to
# service_type_values.first.display_name
# * [section_prompt] Prompt. Default nil.
# * [show_heading] Show the heading section at all. Default true.
# * [show_spinner] Show a stock spinner for bg action for service_type_values.
# default true.
# * [item_name_plural] Pluralized name of the objects included, used in
# spinner message. Default
# service_type_values.first.display_name_pluralize
# * [visibilty] What logic to use to decide whether to show the section at
# all. true|false|:any_services|:in_progress|:responses_exist|:complete_with_responses|(lambda object)
# * [list_visible_limit] Use list_with_limit to limit initially displayed
# items to value. Default nil, meaning don't use
# list_with_limit.
# * [partial] Use a custom partial to display this section, instead of
# using render("standard_response_item", :collection => [all responses]) as default.
# * [show_partial_only] Display custom partial without any of the usual
# standardized wrapping HTML. Custom partial will
# take care of it itself.
def initialize(a_umlaut_request, section_def = {})
@umlaut_request = a_umlaut_request
@section_id = section_def[:id] || section_def[:div_id]
raise Exception.new("SectionRenderer needs an :id passed in arguments hash") unless @section_id
# Merge in default arguments for this section from config.
construct_options(section_def)
end
# Returns all ServiceTypeValue objects contained in this section, as
# configured. Lazy caches result for perfomance.
def service_type_values
@service_type_values ||=
@options[:service_type_values].collect do |s|
s.kind_of?(ServiceTypeValue)? s : ServiceTypeValue[s]
end
end
# Whether any services that generate #service_type_values are
# currently in progress. Lazy caches result for efficiency.
def services_in_progress?
# cache for efficiency
@services_in_progress ||=
@umlaut_request.service_types_in_progress?(service_type_values)
end
# Hash of ServiceType objects (join obj
# representing individual reponse data) included in this
# section. Keyed by string ServiceTypeValue id, value is array
# of ServiceTypes
def responses
unless (@responses)
@responses = {}
service_type_values.each do |st|
@responses[st.name] = @umlaut_request.get_service_type(st)
end
end
@responses
end
# All the values from #responses, flattened into a simple Array.
def responses_list
responses.values.flatten
end
def responses_empty?
responses_list.empty?
end
def request
return @umlaut_request
end
def div_id
return @section_id
end
def show_heading?
(! show_partial_only?) && @options[:show_heading]
end
def render_heading
content_tag(:div, :class=>"section_heading")
output = ''
output <<= '<div class="section_heading">'
(output <<= '<h3>' << CGI::escapeHTML(section_title) << '</h3>') if section_title
(output <<= '<p class="section_prompt">' << CGI::escapeHTML(section_prompt) << '</p>') if section_prompt
output <<= '</div>'
output.html_safe
end
def show_spinner?
(! show_partial_only?) && @options[:show_spinner] &&
@umlaut_request.service_types_in_progress?(service_type_values)
end
# A hash suitable to be passed to Rails render(), to render
# a spinner for this section. Called by section_display partial,
# nobody else should need to call it.
def spinner_render_hash
{ :partial => "background_progress",
:locals =>{ :svc_types => service_type_values,
:div_id => "progress_#{@section_id}",
:current_set_empty => responses_empty?,
:item_name => @options[:item_name_plural]}
}
end
def show_partial_only?
@options[:show_partial_only]
end
def custom_partial?
! @options[:partial].nil?
end
# A hash suitable to be passed to Rails render() to render the
# inner content portion of the section. Called by the section_display
# partial, nobody else should need to call this. You may be looking
# for ResolveHelper#render_section instead.
def content_render_hash
if custom_partial?
{:partial => @options[:partial].to_s,
:object => responses_list,
:locals => @options[:partial_locals].merge(
{:responses_by_type => responses,
:responses => responses_list,
:umlaut_request => request,
:renderer => self})}
else
{:partial => @options[:item_partial].to_s,
:collection => responses_list,
:locals => @options[:partial_locals].clone}
end
end
# used only with with list_with_limit functionality in section_display
# partial.
def item_render_hash(item)
# need to clone @options[:partial_locals], because
# Rails will modify it to add the 'object' to it. Bah!
{:partial => @options[:item_partial],
:object => item,
:locals => @options[:partial_locals].clone}
end
# Is the section visible according to it's settings calculated in current
# context?
def visible?
case @options[:visibility]
when true, false
@options[:visibility]
when :any_services
# do any services exist which even potentially generate our types, even
# if they've completed without doing so?.
nil != @umlaut_request.dispatched_services.to_a.find do |ds|
! (service_type_values & ds.service.service_types_generated ).empty?
end
when :in_progress
# Do we have any of our types generated, or any services in progress
# that might generate them?
(! responses_empty?) || services_in_progress?
when :responses_exist
# Have any responses of our type actually been generated?
! responses_empty?
when :complete_with_responses
(! responses.empty?) && ! (services_in_progress?)
when Proc
# It's a lambda, which takes @umlaut_request as an arg
@options[:visibility].call(self)
else true
end
end
def list_visible_limit
@options[:list_visible_limit]
end
def section_title
@options[:section_title]
end
def section_prompt
@options[:section_prompt]
end
protected
def construct_options(arguments)
# Fill in static defaults
@options = {:show_spinner => true,
:show_heading => true,
:visibility => true,
:show_partial_only => false,
:partial_locals => {}}.merge!(arguments)
# service type value default to same name as section_id
@options[:service_type_values] ||= [@section_id]
# Fill in calculatable-defaults
if (service_type_values.length > 0)
@options = {:section_title =>
service_type_values.first.display_name
}.merge(@options)
end
# Partials to display. Default to _standard_response_item item partial.
if ( @options[:partial] == true)
@options[:partial] = @section_id
end
if (@options[:partial].blank?)
@options[:item_partial] =
case @options[:item_partial]
when true then @section_id + "_item"
when String then options[:item_partial]
else "standard_response_item"
end
end
# sanity check
if ( @options[:show_partial_only] && ! @options[:partial])
raise Exception.new("SectionRenderer: You must supply a :partial argument if :show_partial_only is set true")
end
return @options
end
end
factor out any_services? so it can be called seperately
# = The Section Architecture
#
# Umlaut has what could be considered a 'domain specific language' for
# describing the display individual sections of content on the resolve menu
# page. These sections often correspond to a ServiceTypeValue, like "fulltext".
# But sometimes may include multiple ServiceTypeValues (eg related_items section
# includes cited_by and similar_items), or no ServiceTypeValue at all (eg
# section to display a COinS).
#
# A description of a section is simply a hash with certain conventional
# keys describing various aspects of the contents and display of that section.
# These hashes are listed in the resolve_sections application configuration
# variable, initialized in the resolve_views.rb initializer, and customized
# or over-ridden in the local resolve_views.rb initializer.
#
# One benefit of describing a section through configuration is that section
# display can often by changed at configure time without requiring a code
# time. Another is that the description of the section can be used not
# only to generate the initial HTML page; but also by the javascript that
# update the sections with new background content as available; and by the
# partial_html_sections api that delivers HTML fragments for sections in an
# XML or JSON container.
#
# A description of a section is simply a hash, suitable for passing to
# SectionRenderer.new, detailed below. Plus some additional variables
# specifying _where_ to display the section, documented in the resolve_views.rb
# initializer.
#
# = The SectionRenderer
# A SectionRenderer object provides logic for displaying a specific section
# on the Umlaut resolve menu page. It is initialized with a hash describing
# the details -- or significantly, with simply a pointer to such a hash
# already existing in the resolve_sections config variable.
#
# A SectionRenderer is typically created by the ResolveHelper#render_section
# method, which then passes the SectionRender object to the
# _section_display.erb.html that does the actual rendering, using
# the SectionRenderer for logic and hashes to pass to render calls in
# the partial.
#
#
# == Section Options
#
# Section options are typically configured in hashes in the application
# config variable resolve_sections, which is expected to be a list of hashes.
# That hash is suitable to be passed to a SectionRenderer.new() as configuration
# options for the section. The various ways these options can be used
# is documented below.
#
# === Simplest Case, Defaults
#
# As is common in ruby, SectionRenderer will make a lot of conventional
# assumptions, allowing you to be very concise for the basic simple case:
#
# { :div_id => "fulltext", :html_area => :main }
#
# This means that:
# * this section is assumed to be contained within a <div id="fulltext">. The
# div won't be automatically rendered, it's the containing pages
# responsibility to put in a div with this id.
#
# * this section is assumed to contain responses of type
# ServiceTypeValue["fulltext"]
#
# * The section will be displayed with stock heading block including a title
# constructed from the display_name of ServiceTypeValue["fulltext"], or
# in general the display_name of the first ServiceTypeValue included
# in this section.
#
# * The section will include a stock 'spinner' if there are potential background
# results being gathered for the ServiceTypeValue(s) contained.
#
# * The actual ServiceResponses collected for the ServiceTypeValue included
# will be rendered with a _standard_response_item
# partial, using render :collection.
#
# * The section will be displayed whether or not there are any actual
# responses included. If there are no responses, a message will be displayed
# to that effect.
#
# The display of a section can be customized via configuration parameters to
# a large degree, including supplying your own partial to take over almost
# all display of the section.
#
# === Customizing ServiceTypeValues
#
# You can specifically supply the ServiceTypeValues contained in this
# section, to a different type than would be guessed from the div_id:
#
# {:div_id => "my_area", :service_type_values => ["fulltext"]}
#
# Or specify multiple types included in one section:
#
# {:div_id => "related_items", :service_type_values => ['cited_by', 'similar]}
#
# Or a section that isn't used for displaying service responses at all,
# and has no service type:
#
# {:div_id => "coins", :partial => "coins", :service_type_values => []}
#
# Note that a custom partial needs to be supplied if there are no service_type_values supplied.
#
# === Customizing heading display
#
# You can supply a title for the section that's different than what would
# be guessed from it's ServiceTypeValues. You can also supply a prompt.
#
# {:div_id =>"excerpts", :section_title=>"Lots of good stuff", :section_prompt => "Limited previes and excerpts."}
#
# You can also suppress display of the stock section heading at all:
# {:show_heading => false, ...}
#
# This may be becuase you don't want a heading, or because you are supplying
# a custom partial that will take care of the heading in a custom way.
#
# === Customizing spinner display
#
# You can also suppress display of the stock spinner, because you don't
# want a spinner, or because your custom partial will be taking care of it.
# {:show_spinner => false, ...}
#
# By default, the spinner displays what type of thing it's waiting on, guessing
# from the ServiceTypeValue configured. If you want to specify this item name:
# {:item_name_plural => "Related Items", ...}
#
# === Customizing visibility of section
#
# By default, a section will simply be displayed regardless of whether
# there are any actual responses to display. However, the 'visibility'
# argument can be used to customize this in many ways.
# visibilty:
# [*true*]
# Default, always show section.
# [*false*]
# Never show section. (Not sure why you'd want this).
# [<b>:any_services</b>]
# Show section if and only if there are any configured
# services that generate the ServiceTypeValues included
# in this section, regardless of whether in this case
# they have or not.
# [<b>:in_progress</b>]
# Show the section if responses exist, OR if any services
# are currently in progress that are capable of generating
# responses of the right type for this section.
# [<b>:responses_exist</b>]
# Show the section if and only if some responses
# have actually been generated of the types contained
# in this section.
# [<b>:complete_with_responses</b>]
# Show the section only if there are responses
# generated, AND all services supplying
# responses of the type contained in section
# have completed, no more responses are possible.
# [<b>(lambda object)</b>]
# Most flexibly of all, you can supply your own lambda
# supplying custom logic to determine whether to show
# the section, based on current context. The lambda
# will be passed the SectionRenderer object as an argument,
# providing access to the Umlaut Request with context.
# eg:
# :visibility => lambda do |renderer|
# renderer.request.something == something
# end
#
# === List with limit
#
# You can have the section automatically use the ResolveHelper#list_with_limit
# helper to limit the number of items initially displayed, with the rest behind
# a 'more' expand/contract widget.
#
# { :div_id => "highlighted_link",
# :list_visible_limit => 1,
# :visibility => :in_progress, ... }
#
# === Custom partial display
#
# By default, the SectionRenderer assumes that all the ServiceResposnes included
# are capable of being displayed by the standard_item_response, and displays
# them simply by render standard_item_response with a \:colection. Sometimes
# this assumption isn't true, or you want custom display for other reasons.
# You can supply your own partial that the renderer will use to display
# the content.
#
# { :div_id => "my_div", :partial => "my_partial", ... }
#
# The partial so supplied should live in resolve/_my_partial.html.erb
#
# When this partial is called, it will have local variables set
# to give it the data it needs in order to create a display:
#
# [*responses_by_type*]
# a hash keyed by ServiceTypeValue name, with the
# the value being an array of the respective ServiceType
# objects.
# [*responses*] a flattened list of all ServiceTypes included in
# this section, of varying ServiceTypeValues. Most
# useful when the section only includes one
# ServiceTypeValue
# [*renderer*] The SectionRenderer object itself, from which
# the current umlaut request can be obtained,
# among other things.
#
# You can supply additional static local arguments to the partial
# in the SectionRenderer setup:
#
# {:div_id=> "foo", :partial=>"my_partial", :partial_locals => {:mode => "big"}, ... }
#
# the :partial_locals argument can be used with the standard_response_item
# too:
# {:div_id => "highlighted_link", :partial_locals => {:show_source => true}}
#
# Note that your custom partial will still be displayed with stock
# header and possibly spinner surrounding it. You can suppress these elements:
#
# {:div_id => "cover_image", :partial => "cover_image", :show_heading => false, :show_spinner => false}
#
# But even so, some 'wrapping' html is rendered surrounding your partial.
# If you want to disable even this, becuase your partial will take care of it
# itself, you can do so with \:show_partial_only => true
# {:div_id => "search_inside", :partial => "search_inside", :show_partial_only => true}
class SectionRenderer
include ActionView::Helpers::TagHelper
@@bg_update_sections = @@partial_update_sections = nil
# First argument is the current umlaut Request object.
# Second argument is a session description hash. See class overview
# for an overview. Recognized keys of session description hash:
# * [id] SessionRenderer will look up session description hash in
# resolve_views finding one with :div_id == id
# * [div_id] The id of the <div> the section lives in. Also used
# generally as unique ID for the section.
# * [service_type_values] ServiceTypeValue's that this section contains.
# defaults to [ServiceTypeValue[div_id]]
# * [section_title] Title for the section. Defaults to
# service_type_values.first.display_name
# * [section_prompt] Prompt. Default nil.
# * [show_heading] Show the heading section at all. Default true.
# * [show_spinner] Show a stock spinner for bg action for service_type_values.
# default true.
# * [item_name_plural] Pluralized name of the objects included, used in
# spinner message. Default
# service_type_values.first.display_name_pluralize
# * [visibilty] What logic to use to decide whether to show the section at
# all. true|false|:any_services|:in_progress|:responses_exist|:complete_with_responses|(lambda object)
# * [list_visible_limit] Use list_with_limit to limit initially displayed
# items to value. Default nil, meaning don't use
# list_with_limit.
# * [partial] Use a custom partial to display this section, instead of
# using render("standard_response_item", :collection => [all responses]) as default.
# * [show_partial_only] Display custom partial without any of the usual
# standardized wrapping HTML. Custom partial will
# take care of it itself.
def initialize(a_umlaut_request, section_def = {})
@umlaut_request = a_umlaut_request
@section_id = section_def[:id] || section_def[:div_id]
raise Exception.new("SectionRenderer needs an :id passed in arguments hash") unless @section_id
# Merge in default arguments for this section from config.
construct_options(section_def)
end
# Returns all ServiceTypeValue objects contained in this section, as
# configured. Lazy caches result for perfomance.
def service_type_values
@service_type_values ||=
@options[:service_type_values].collect do |s|
s.kind_of?(ServiceTypeValue)? s : ServiceTypeValue[s]
end
end
# Whether any services that generate #service_type_values are
# currently in progress. Lazy caches result for efficiency.
def services_in_progress?
# cache for efficiency
@services_in_progress ||=
@umlaut_request.service_types_in_progress?(service_type_values)
end
# Hash of ServiceType objects (join obj
# representing individual reponse data) included in this
# section. Keyed by string ServiceTypeValue id, value is array
# of ServiceTypes
def responses
unless (@responses)
@responses = {}
service_type_values.each do |st|
@responses[st.name] = @umlaut_request.get_service_type(st)
end
end
@responses
end
# All the values from #responses, flattened into a simple Array.
def responses_list
responses.values.flatten
end
def responses_empty?
responses_list.empty?
end
def request
return @umlaut_request
end
def div_id
return @section_id
end
def show_heading?
(! show_partial_only?) && @options[:show_heading]
end
def render_heading
content_tag(:div, :class=>"section_heading")
output = ''
output <<= '<div class="section_heading">'
(output <<= '<h3>' << CGI::escapeHTML(section_title) << '</h3>') if section_title
(output <<= '<p class="section_prompt">' << CGI::escapeHTML(section_prompt) << '</p>') if section_prompt
output <<= '</div>'
output.html_safe
end
def show_spinner?
(! show_partial_only?) && @options[:show_spinner] &&
@umlaut_request.service_types_in_progress?(service_type_values)
end
# A hash suitable to be passed to Rails render(), to render
# a spinner for this section. Called by section_display partial,
# nobody else should need to call it.
def spinner_render_hash
{ :partial => "background_progress",
:locals =>{ :svc_types => service_type_values,
:div_id => "progress_#{@section_id}",
:current_set_empty => responses_empty?,
:item_name => @options[:item_name_plural]}
}
end
def show_partial_only?
@options[:show_partial_only]
end
def custom_partial?
! @options[:partial].nil?
end
# A hash suitable to be passed to Rails render() to render the
# inner content portion of the section. Called by the section_display
# partial, nobody else should need to call this. You may be looking
# for ResolveHelper#render_section instead.
def content_render_hash
if custom_partial?
{:partial => @options[:partial].to_s,
:object => responses_list,
:locals => @options[:partial_locals].merge(
{:responses_by_type => responses,
:responses => responses_list,
:umlaut_request => request,
:renderer => self})}
else
{:partial => @options[:item_partial].to_s,
:collection => responses_list,
:locals => @options[:partial_locals].clone}
end
end
# used only with with list_with_limit functionality in section_display
# partial.
def item_render_hash(item)
# need to clone @options[:partial_locals], because
# Rails will modify it to add the 'object' to it. Bah!
{:partial => @options[:item_partial],
:object => item,
:locals => @options[:partial_locals].clone}
end
# Is the section visible according to it's settings calculated in current
# context?
def visible?
case @options[:visibility]
when true, false
@options[:visibility]
when :any_services
any_services?
when :in_progress
# Do we have any of our types generated, or any services in progress
# that might generate them?
(! responses_empty?) || services_in_progress?
when :responses_exist
# Have any responses of our type actually been generated?
! responses_empty?
when :complete_with_responses
(! responses.empty?) && ! (services_in_progress?)
when Proc
# It's a lambda, which takes @umlaut_request as an arg
@options[:visibility].call(self)
else true
end
end
# do any services exist which even potentially generate our types, even
# if they've completed without doing so?.
def any_services?
nil != @umlaut_request.dispatched_services.to_a.find do |ds|
! (service_type_values & ds.service.service_types_generated ).empty?
end
end
def list_visible_limit
@options[:list_visible_limit]
end
def section_title
@options[:section_title]
end
def section_prompt
@options[:section_prompt]
end
protected
def construct_options(arguments)
# Fill in static defaults
@options = {:show_spinner => true,
:show_heading => true,
:visibility => true,
:show_partial_only => false,
:partial_locals => {}}.merge!(arguments)
# service type value default to same name as section_id
@options[:service_type_values] ||= [@section_id]
# Fill in calculatable-defaults
if (service_type_values.length > 0)
@options = {:section_title =>
service_type_values.first.display_name
}.merge(@options)
end
# Partials to display. Default to _standard_response_item item partial.
if ( @options[:partial] == true)
@options[:partial] = @section_id
end
if (@options[:partial].blank?)
@options[:item_partial] =
case @options[:item_partial]
when true then @section_id + "_item"
when String then options[:item_partial]
else "standard_response_item"
end
end
# sanity check
if ( @options[:show_partial_only] && ! @options[:partial])
raise Exception.new("SectionRenderer: You must supply a :partial argument if :show_partial_only is set true")
end
return @options
end
end
|
# Settings specified here will take precedence over those in config/environment.rb
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the webserver when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.action_controller.consider_all_requests_local = true
config.action_view.debug_rjs = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send
config.action_mailer.perform_deliveries = false
config.action_mailer.raise_delivery_errors = false
config.gem "faker"
config.gem "populator"
#config.gem "sqlite3-ruby", :version => "1.2.3"
#config.gem "net-ssh"
#config.gem "net-scp"
config.gem 'somegem' in config/ is replaced by bundler Gemfile
# Settings specified here will take precedence over those in config/environment.rb
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the webserver when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.action_controller.consider_all_requests_local = true
config.action_view.debug_rjs = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send
config.action_mailer.perform_deliveries = false
config.action_mailer.raise_delivery_errors = false
|
module HstoreTranslate
module Translates
def translates(*attrs)
include InstanceMethods
class_attribute :translated_attrs
self.translated_attrs = attrs
attrs.each do |attr_name|
serialize "#{attr_name}_translations", ActiveRecord::Coders::Hstore unless HstoreTranslate::native_hstore?
define_method attr_name do
read_hstore_translation(attr_name)
end
define_method "#{attr_name}=" do |value|
write_hstore_translation(attr_name, value)
end
define_singleton_method "with_#{attr_name}_translation" do |value, locale = I18n.locale|
quoted_translation_store = connection.quote_column_name("#{attr_name}_translations")
where("#{quoted_translation_store} @> hstore(:locale, :value)", locale: locale, value: value)
end
end
alias_method_chain :respond_to?, :translates
alias_method_chain :method_missing, :translates
end
module InstanceMethods
def disable_fallback
@disable_fallback = true
end
def enable_fallback
@disable_fallback = false
end
protected
def hstore_translate_fallback_locales(locale)
return if !!@disable_fallback || !I18n.respond_to?(:fallbacks)
I18n.fallbacks[locale]
end
def read_hstore_translation(attr_name, locale = I18n.locale)
translations = send("#{attr_name}_translations") || {}
translation = translations[locale.to_s]
if fallback_locales = hstore_translate_fallback_locales(locale)
fallback_locales.each do |fallback_locale|
t = translations[fallback_locale.to_s]
if t && !t.empty? # differs from blank?
translation = t
break
end
end
end
translation
end
def write_hstore_translation(attr_name, value, locale = I18n.locale)
translation_store = "#{attr_name}_translations"
translations = send(translation_store) || {}
send("#{translation_store}_will_change!") unless translations[locale.to_s] == value
translations[locale.to_s] = value
send("#{translation_store}=", translations)
value
end
def respond_to_with_translates?(symbol, include_all = false)
return true if parse_translated_attribute_accessor(symbol)
respond_to_without_translates?(symbol, include_all)
end
def method_missing_with_translates(method_name, *args)
translated_attr_name, locale, assigning = parse_translated_attribute_accessor(method_name)
return method_missing_without_translates(method_name, *args) unless translated_attr_name
if assigning
write_hstore_translation(translated_attr_name, args.first, locale)
else
read_hstore_translation(translated_attr_name, locale)
end
end
def parse_translated_attribute_accessor(method_name)
return unless method_name =~ /\A([a-z_]+)_([a-z]{2})(=?)\z/
translated_attr_name = $1.to_sym
return unless translated_attrs.include?(translated_attr_name)
locale = $2.to_sym
assigning = $3.present?
[translated_attr_name, locale, assigning]
end
end
end
end
Document parse method.
module HstoreTranslate
module Translates
def translates(*attrs)
include InstanceMethods
class_attribute :translated_attrs
self.translated_attrs = attrs
attrs.each do |attr_name|
serialize "#{attr_name}_translations", ActiveRecord::Coders::Hstore unless HstoreTranslate::native_hstore?
define_method attr_name do
read_hstore_translation(attr_name)
end
define_method "#{attr_name}=" do |value|
write_hstore_translation(attr_name, value)
end
define_singleton_method "with_#{attr_name}_translation" do |value, locale = I18n.locale|
quoted_translation_store = connection.quote_column_name("#{attr_name}_translations")
where("#{quoted_translation_store} @> hstore(:locale, :value)", locale: locale, value: value)
end
end
alias_method_chain :respond_to?, :translates
alias_method_chain :method_missing, :translates
end
module InstanceMethods
def disable_fallback
@disable_fallback = true
end
def enable_fallback
@disable_fallback = false
end
protected
def hstore_translate_fallback_locales(locale)
return if !!@disable_fallback || !I18n.respond_to?(:fallbacks)
I18n.fallbacks[locale]
end
def read_hstore_translation(attr_name, locale = I18n.locale)
translations = send("#{attr_name}_translations") || {}
translation = translations[locale.to_s]
if fallback_locales = hstore_translate_fallback_locales(locale)
fallback_locales.each do |fallback_locale|
t = translations[fallback_locale.to_s]
if t && !t.empty? # differs from blank?
translation = t
break
end
end
end
translation
end
def write_hstore_translation(attr_name, value, locale = I18n.locale)
translation_store = "#{attr_name}_translations"
translations = send(translation_store) || {}
send("#{translation_store}_will_change!") unless translations[locale.to_s] == value
translations[locale.to_s] = value
send("#{translation_store}=", translations)
value
end
def respond_to_with_translates?(symbol, include_all = false)
return true if parse_translated_attribute_accessor(symbol)
respond_to_without_translates?(symbol, include_all)
end
def method_missing_with_translates(method_name, *args)
translated_attr_name, locale, assigning = parse_translated_attribute_accessor(method_name)
return method_missing_without_translates(method_name, *args) unless translated_attr_name
if assigning
write_hstore_translation(translated_attr_name, args.first, locale)
else
read_hstore_translation(translated_attr_name, locale)
end
end
# Internal: Parse a translated convenience accessor name.
#
# method_name - The accessor name.
#
# Examples
#
# parse_translated_attribute_accessor("title_en=")
# # => [:title, :en, true]
#
# parse_translated_attribute_accessor("title_fr")
# # => [:title, :fr, false]
#
# Returns the attribute name Symbol, locale Symbol, and a Boolean
# indicating whether or not the caller is attempting to assign a value.
def parse_translated_attribute_accessor(method_name)
return unless method_name =~ /\A([a-z_]+)_([a-z]{2})(=?)\z/
translated_attr_name = $1.to_sym
return unless translated_attrs.include?(translated_attr_name)
locale = $2.to_sym
assigning = $3.present?
[translated_attr_name, locale, assigning]
end
end
end
end
|
#
# If your target environment is configured to use an Apache Cassandra cluster,
# please keep in mind that you will need to perform some configuration changes prior
# to running this tool:
#
# 1. Edit your cassandra.yml configuration file and set a high timeout value for each keyspace, e.g.:
# timeout: 100000
#
# 2. Create the following indexes in your Cassandra cluster:
# CREATE INDEX page_views_context_id_idx ON page_views.page_views (context_id);
# CREATE INDEX grade_changes_context_id_idx ON auditors.grade_changes (context_id);
#
# 3. Update timeout settings in your server's cassandra.yaml configuration files to large values, i.e.:
#
# read_request_timeout_in_ms: 60000
# range_request_timeout_in_ms: 60000
# request_timeout_in_ms: 60000
#
# Index creation can be a long-running process, so you should verify that the indexes have
# been successfully created by querying the page_views.page_views and auditors.grade_changes tables
# using a WHERE condition for the context_id column.
#
class SectionSplitter
def self.run(opts)
result = []
user = opts[:user_id] && User.find(opts[:user_id])
raise "User ID not provided or user not found" unless user
if opts[:course_id]
course = Course.find(opts[:course_id])
raise "Course not found: #{opts[:course_id]}" unless course.present?
result += self.process_course(user, course, opts)
end
if opts[:account_id]
account = Account.find(opts[:account_id])
raise "Account not found: #{opts[:account_id]}" unless account.present?
account.courses.each do |course|
result += self.process_course(user, course, opts)
end
end
result
end
def self.process_course(user, course, opts)
# Sanity check
return unless course
unless course.active_course_sections.length > 1
Rails.logger.info "[SECTION-SPLITTER] Skipping course #{course.id}: not a multi-section course"
return []
end
unless (course.active_course_sections.select {|s| s.student_enrollments.length > 0}.length) > 1
Rails.logger.info "[SECTION-SPLITTER] Skipping course #{course.id}: does not contain multiple sections with enrollments"
return []
end
Rails.logger.info "[SECTION-SPLITTER] Splitting course #{course.id} [#{course.name}]..."
result = []
start_ts = Time.now
begin
real_time = Benchmark.realtime do
args = {
:enrollment_term => course.enrollment_term,
:abstract_course => course.abstract_course,
:account => course.account,
:start_at => course.start_at,
:conclude_at => course.conclude_at,
:time_zone => course.time_zone
}
course.active_course_sections.each do |source_section|
target_course = self.perform_course_copy(user, course, source_section, args)
self.perform_section_migration(target_course, source_section)
Rails.logger.info "[SECTION-SPLITTER] Converted section #{source_section.id} [#{source_section.name}] into course #{target_course.id} [#{target_course.name}]"
result << target_course
end
end
Rails.logger.info "[SECTION-SPLITTER] Finished splitting course #{course.id} [#{course.name}] in #{real_time} seconds."
if opts[:delete]
Rails.logger.info "[SECTION-SPLITTER] Deleting course #{course.id} [#{course.name}]..."
course.destroy
end
ensure
clean_delayed_jobs(course, start_ts)
end
result
end
def self.clean_delayed_jobs(course, timestamp)
begin
Delayed::Job.where("created_at >= ?", timestamp).each(&:destroy)
rescue Exception => e
Rails.logger.error "[SECTION-SPLITTER] Unable to clean up delayed jobs for course ID=#{course.id}: #{e.inspect}"
end
end
def self.perform_course_copy(user, source_course, source_section, args)
args[:name] = source_course.name
args[:course_code] = source_section.name
target_course = source_course.account.courses.new
target_course.attributes = args
target_course.workflow_state = source_course.workflow_state
target_course.save!
content_migration = target_course.content_migrations.build(:user => nil, :source_course => source_course, :context => target_course, :migration_type => 'course_copy_importer', :initiated_source => :manual)
content_migration.migration_settings[:source_course_id] = source_course.id
content_migration.workflow_state = 'created'
content_migration.migration_settings[:import_immediately] = true
content_migration.copy_options = {:everything => true}
content_migration.migration_settings[:migration_ids_to_import] = {:copy => {:everything => true}}
content_migration.user = user
content_migration.save
worker = Canvas::Migration::Worker::CourseCopyWorker.new
begin
worker.perform(content_migration)
rescue Exception => e
Canvas::Errors.capture_exception(:section_splitter, $ERROR_INFO)
Rails.logger.error "[SECTION-SPLITTER] Unable to perform course copy (content migration ID=#{content_migration.id}) for course ID=#{source_course.id} [#{source_course.name}]: #{e.inspect}"
raise e
end
target_course.reload
target_course
end
def self.perform_section_migration(target_course, source_section)
worker = SectionMigrationWorker.new(target_course.id, source_section.id)
begin
worker.perform
rescue Exception => e
Canvas::Errors.capture_exception(:section_splitter, $ERROR_INFO)
Rails.logger.error "[SECTION-SPLITTER] Unable to migrate source section ID=#{source_section.id} to target course ID=#{target_course.id}: #{e.inspect}"
end
end
SectionMigrationWorker = Struct.new(:target_course_id, :source_section_id) do
def perform
@target_course = Course.find(target_course_id)
@source_section = CourseSection.find(source_section_id)
@source_course = @source_section.course
@source_asset_strings = {}
# Remove course content that is not available to the source section
clean_assignments
clean_quizzes
clean_discussion_topics
clean_announcements
clean_calendar_events
# Migrate user data
migrate_section
migrate_enrollments
migrate_overrides
migrate_groups
migrate_submissions
migrate_quiz_submissions
migrate_discussion_entries
migrate_messages
migrate_page_views_and_audit_logs
migrate_asset_user_accesses
migrate_content_participation_counts
@target_course.save!
end
def clean_announcements
clean_overridables(@target_course.announcements)
@target_course.reload
end
def clean_assignments
clean_overridables(@target_course.assignments)
@target_course.reload
end
def clean_discussion_topics
clean_overridables(@target_course.discussion_topics)
@target_course.reload
end
def clean_quizzes
clean_overridables(@target_course.quizzes)
@target_course.reload
end
def clean_overridables(collection)
to_remove = collection.map {|a| {:target => a, :source => source_model(@source_course, a)}}.select {|h| remove_based_on_overrides?(h[:source])}
to_remove.each do |h|
model = h[:target]
if model.is_a?(DiscussionTopic)
DiscussionTopic::MaterializedView.for(model).destroy
elsif model.is_a?(Quizzes::Quiz) && model.assignment.present?
@target_course.assignments.delete(model.assignment)
model.assignment.assignment_overrides.each {|o| o.destroy_permanently!}
model.assignment.destroy_permanently!
elsif model.is_a?(Assignment)
if model.quiz.present?
@target_course.quizzes.delete(model.quiz)
model.quiz.assignment_overrides.each {|o| o.destroy_permanently!}
model.quiz.destroy_permanently!
end
if model.discussion_topic.present?
DiscussionTopic::MaterializedView.for(model.discussion_topic).destroy
@target_course.discussion_topics.delete(model.discussion_topic)
model.discussion_topic.assignment_overrides.each {|o| o.destroy_permanently!}
model.discussion_topic.destroy_permanently!
end
end
collection.delete(model)
model.assignment_overrides.each {|o| o.destroy_permanently!}
model.destroy_permanently!
end
end
def clean_calendar_events
@source_course.calendar_events.active.where("course_section_id IS NOT NULL AND course_section_id <> ?", @source_section.id).each do |source_event|
target_event = source_model(@target_course, source_event)
target_event.destroy_permanently!
end
@source_course.reload
@target_course.reload
end
def remove_based_on_overrides?(model)
overrides = model.active_assignment_overrides.select {|ao| ao.set_type == 'CourseSection'}
!overrides.empty? && !overrides.any? {|ao| ao.set_id == @source_section.id}
end
# Uses heuristics to locate the corresponding source content item in the source course for the given item in the target course.
def source_model(source_course, model)
source_model =
case model
when Announcement
source_course.announcements.find {|a| a.workflow_state == model.workflow_state && a.title == model.title}
when Assignment
source_course.assignments.find {|a| a.workflow_state == model.workflow_state && a.title == model.title && a.points_possible == model.points_possible}
when DiscussionTopic
source_course.discussion_topics.find {|d| d.workflow_state == model.workflow_state && d.title == model.title}
when Quizzes::Quiz
source_course.quizzes.find {|q| q.workflow_state == model.workflow_state && q.title == model.title && q.points_possible == model.points_possible && q.question_count == model.question_count}
when CalendarEvent
source_course.calendar_events.find {|e| e.workflow_state == model.workflow_state && e.title == model.title && e.start_at == model.start_at && e.end_at == model.end_at}
when GroupCategory
source_course.group_categories.find {|g| g.name == model.name && g.role == model.role && g.deleted_at == model.deleted_at && g.group_limit == model.group_limit}
else
nil
end
raise "Unable to find source item for [#{model.inspect}] in course ID=#{source_course.id}" unless source_model
@source_asset_strings[source_model.asset_string] = model.asset_string if source_course == @source_course
source_model
end
def migrate_section
@source_section.course = @target_course
@source_section.save!
end
def migrate_enrollments
@source_section.enrollments.each do |e|
e.course = @target_course
e.save!
end
@source_section.reload
@target_course.reload
end
def migrate_overrides
@target_course.assignments.active.each {|a| process_overrides(a)}
@target_course.quizzes.active.each {|q| process_overrides(q)}
@target_course.discussion_topics.active.each {|d| process_overrides(d)}
end
def process_overrides(model)
source_model = source_model(@source_course, model)
student_ids = @target_course.student_enrollments.map(&:user_id)
source_model.active_assignment_overrides.each do |override|
if (override.set_type == 'CourseSection' && override.set_id == @source_section.id) ||
(override.set_type == 'ADHOC' && (override.assignment_override_students.map(&:user_id) - student_ids).empty?)
clone_override(override, model)
end
end
end
def clone_override(override, new_model)
return unless new_model.assignment_overrides.where(:set_type => override.set_type, :set_id => override.set_id).empty?
new_override = override.clone
case new_model
when Assignment
new_override.assignment = new_model
new_override.save
if new_override.set_type == 'ADHOC'
override.assignment_override_students.each do |aos|
new_aos = aos.clone
new_aos.assignment = new_model
new_override.assignment_override_students << new_aos
new_override.save
end
end
when Quizzes::Quiz
new_override.quiz = new_model
new_override.save
if new_override.set_type == 'ADHOC'
override.assignment_override_students.each do |aos|
new_aos = aos.clone
new_aos.quiz = new_model
new_override.assignment_override_students << new_aos
new_override.save
end
end
when DiscussionTopic
new_override.discussion_topic = new_model
new_override.save
if new_override.set_type == 'ADHOC'
override.assignment_override_students.each do |aos|
new_aos = aos.clone
new_aos.discussion_topic = new_model
new_override.assignment_override_students << new_aos
new_override.save
end
end
else
raise "Unexpected model type in update_override: #{new_model.inspect}"
end
new_override.reload
end
def migrate_groups
group_category_map = {}
@source_course.group_categories.each do |gc|
new_category = gc.clone
new_category.context = @target_course
@target_course.group_categories << new_category
@target_course.save
group_category_map[gc] = new_category
end
groups = @source_course.groups.where(:course_section_id => @source_section.id)
groups.each do |group|
next unless group.group_category
new_category = group_category_map[group.group_category]
group.group_category = new_category
group.save
end
groups.update_all(:context_id => @target_course.id)
@source_course.reload
@target_course.reload
end
def migrate_submissions
student_ids = @target_course.student_enrollments.map(&:user_id)
@target_course.assignments.each do |a|
Submission.where(:assignment => a, :user_id => student_ids).delete_all
source_assignment = source_model(@source_course, a)
submissions = Submission.where(:assignment => source_assignment, :user_id => student_ids)
submissions.select {|s| s.attachment_ids.present? }.each do |s|
attachment_ids = s.attachment_ids.split(",")
Attachment.where(:id => attachment_ids, :context_type => 'Assignment', :context_id => source_assignment.id).update_all(:context_id => a.id)
end
submission_comments = SubmissionComment.where(:submission_id => submissions.map(&:id), :context => @source_course)
submission_comments.update_all(:context_id => @target_course.id)
submissions.update_all(:assignment_id => a.id)
end
@source_course.reload
@target_course.reload
end
def migrate_quiz_submissions
student_ids = @target_course.student_enrollments.map(&:user_id)
@target_course.quizzes.each do |q|
source_quiz = source_model(@source_course, q)
submissions = Quizzes::QuizSubmission.where(:quiz => source_quiz, :user_id => student_ids)
submissions.update_all(:quiz_id => q.id)
end
@source_course.reload
@target_course.reload
end
def migrate_discussion_entries
section_user_ids = @target_course.enrollments.map(&:user_id).uniq
@target_course.discussion_topics.each do |topic|
source_topic = source_model(@source_course, topic)
source_topic.root_discussion_entries.each do |entry|
entry_ids = ([entry.id] + entry.flattened_discussion_subentries.pluck(:id)).uniq
participant_ids = ([entry.user_id] + entry.flattened_discussion_subentries.pluck(:user_id)).uniq
non_section_participant_ids = participant_ids - section_user_ids
if non_section_participant_ids.empty?
DiscussionEntry.where(:id => entry_ids).update_all(:discussion_topic_id => topic.id)
DiscussionEntryParticipant.where("discussion_entry_id IN (?) AND user_id NOT IN (?)", entry_ids, section_user_ids).delete_all
end
end
source_topic.discussion_topic_participants.where(:user_id => section_user_ids).update_all(:discussion_topic_id => topic.id)
source_topic.reload
topic.last_reply_at = topic.discussion_entries.last.try(:created_at) || topic.posted_at
topic.save
DiscussionTopic.where(:id => topic.id).update_all(:user_id => source_topic.user_id) # ugly hack, but user_id is a readonly attribute
end
@source_course.reload
@target_course.reload
@target_course.discussion_topics.each do |topic|
begin
DiscussionTopic::MaterializedView.for(topic).update_materialized_view_without_send_later
rescue Exception => e
Canvas::Errors.capture_exception(:section_splitter, $ERROR_INFO)
Rails.logger.error "Unable to regenerate DiscussionTopic::MaterializedView for ID=#{topic.id}: #{e.inspect}"
end
end
end
def migrate_messages
user_ids = @target_course.enrollments.map(&:user_id)
@source_course.messages.where(:user_id => user_ids).update_all(:context_id => @target_course.id)
@source_course.reload
@target_course.reload
end
def migrate_page_views_and_audit_logs
user_ids = @target_course.enrollments.map(&:user_id)
if cassandra?
migrate_page_views_cassandra(user_ids)
migrate_page_views_counters_by_context_and_hour(user_ids)
migrate_page_views_counters_by_context_and_user(user_ids)
migrate_participations_by_context(user_ids)
migrate_grade_changes(user_ids)
else
@source_course.page_views.where(:user_id => user_ids).each do |p|
p.context = @target_course
p.save
end
end
end
def migrate_page_views_cassandra(user_ids)
page_views = []
PageView::EventStream.database.execute("SELECT request_id, context_type, user_id FROM page_views WHERE context_id = ?", @source_course.id).fetch {|row| page_views << row.to_hash}
request_ids = page_views
.select {|row| row["context_type"] == "Course" && user_ids.include?(row["user_id"].to_i)}
.map {|row| row["request_id"]}
.uniq
PageView::EventStream.database.update("UPDATE page_views SET context_id = ? WHERE request_id IN (?)", @target_course.id, request_ids)
end
def migrate_page_views_counters_by_context_and_hour(user_ids)
source_course_global_id = @source_course.global_id
target_course_global_id = @target_course.global_id
user_ids.each do |user_id|
user_global_id = User.find(user_id).global_id
source_context_user_global_id = "course_#{source_course_global_id}/user_#{user_global_id}"
target_context_user_global_id = "course_#{target_course_global_id}/user_#{user_global_id}"
page_views_counters_by_context_and_hour = []
query = "SELECT context, hour_bucket, page_view_count, participation_count FROM page_views_counters_by_context_and_hour WHERE context = ?"
PageView::EventStream.database.execute(query, source_context_user_global_id).fetch {|row| page_views_counters_by_context_and_hour << row.to_hash}
page_views_counters_by_context_and_hour.each do |row|
primary_key = {
"context" => target_context_user_global_id,
"hour_bucket" => row["hour_bucket"]
}
PageView::EventStream.database.insert_record("page_views_counters_by_context_and_hour", primary_key, {})
if row["page_view_count"]
PageView::EventStream.database.update("UPDATE page_views_counters_by_context_and_hour SET page_view_count = page_view_count + ? WHERE context = ? AND hour_bucket = ?", row["page_view_count"], primary_key["context"], primary_key["hour_bucket"])
end
if row["participation_count"]
PageView::EventStream.database.update("UPDATE page_views_counters_by_context_and_hour SET participation_count = participation_count + ? WHERE context = ? AND hour_bucket = ?", row["participation_count"], primary_key["context"], primary_key["hour_bucket"])
end
PageView::EventStream.database.update("DELETE FROM page_views_counters_by_context_and_hour WHERE context = ? AND hour_bucket = ?", source_context_user_global_id, row["hour_bucket"])
end
end
end
def migrate_page_views_counters_by_context_and_user(user_ids)
source_course_global_id = @source_course.global_id
target_course_global_id = @target_course.global_id
user_ids.each do |user_id|
user_global_id = User.find(user_id).global_id.to_s
source_context_global_id = "course_#{source_course_global_id}"
target_context_global_id = "course_#{target_course_global_id}"
page_views_counters_by_context_and_user = []
query = "SELECT context, user_id, page_view_count, participation_count FROM page_views_counters_by_context_and_user WHERE context = ? AND user_id = ?"
PageView::EventStream.database.execute(query, source_context_global_id, user_global_id).fetch {|row| page_views_counters_by_context_and_user << row.to_hash}
page_views_counters_by_context_and_user.each do |row|
primary_key = {
"context" => target_context_global_id,
"user_id" => user_global_id
}
PageView::EventStream.database.insert_record("page_views_counters_by_context_and_user", primary_key, {})
if row["page_view_count"]
PageView::EventStream.database.update("UPDATE page_views_counters_by_context_and_user SET page_view_count = page_view_count + ? WHERE context = ? AND user_id = ?", row["page_view_count"], primary_key["context"], primary_key["user_id"])
end
if row["participation_count"]
PageView::EventStream.database.update("UPDATE page_views_counters_by_context_and_user SET participation_count = participation_count + ? WHERE context = ? AND user_id = ?", row["participation_count"], primary_key["context"], primary_key["user_id"])
end
PageView::EventStream.database.update("DELETE FROM page_views_counters_by_context_and_user WHERE context = ? AND user_id = ?", source_context_global_id, user_global_id)
end
end
end
def migrate_participations_by_context(user_ids)
source_course_global_id = @source_course.global_id
target_course_global_id = @target_course.global_id
user_ids.each do |user_id|
user_global_id = User.find(user_id).global_id
source_context_user_global_id = "course_#{source_course_global_id}/user_#{user_global_id}"
target_context_user_global_id = "course_#{target_course_global_id}/user_#{user_global_id}"
participations_by_context = []
query = "SELECT context, created_at, request_id, asset_category, asset_code, asset_user_access_id, url FROM participations_by_context WHERE context = ?"
PageView::EventStream.database.execute(query, source_context_user_global_id).fetch {|row| participations_by_context << row.to_hash}
participations_by_context.each do |row|
values = {
"asset_category" => row["asset_category"],
"asset_code" => @source_asset_strings[row["asset_code"]],
"asset_user_access_id" => row["asset_user_access_id"],
"url" => row["url"]
}
primary_key = {
"context" => target_context_user_global_id,
"created_at" => row["created_at"],
"request_id" => row["request_id"]
}
if values["asset_code"].present?
PageView::EventStream.database.insert_record("participations_by_context", primary_key, values)
PageView::EventStream.database.update("DELETE FROM participations_by_context WHERE context = ? AND created_at = ? AND request_id = ?", source_context_user_global_id, row["created_at"], row["request_id"])
end
end
end
end
def migrate_grade_changes(user_ids)
source_course_global_id = @source_course.global_id
target_course_global_id = @target_course.global_id
user_global_ids = user_ids.map {|user_id| User.find(user_id).global_id}
grade_changes = []
query = "SELECT id, assignment_id, context_id, context_type, student_id FROM grade_changes WHERE context_id = ?"
Auditors::GradeChange::Stream.database.execute(query, source_course_global_id).fetch {|row| grade_changes << row.to_hash}
grade_changes.select! {|row| row["context_type"] == "Course" && user_global_ids.include?(row["student_id"])}
grade_changes.each do |row|
assignment_id = Shard::local_id_for(row["assignment_id"])[0]
assignment = Assignment.find(assignment_id)
next unless assignment
new_assignment = source_model(@target_course, assignment)
values = {
"assignment_id" => new_assignment.id,
"context_id" => target_course_global_id
}
primary_key = {
"id" => row["id"]
}
Auditors::GradeChange::Stream.database.update_record("grade_changes", primary_key, values)
end
end
def migrate_asset_user_accesses
user_ids = @target_course.enrollments.map(&:user_id)
@source_course.asset_user_accesses.where(:user_id => user_ids).update_all(:context_id => @target_course.id)
end
def migrate_content_participation_counts
user_ids = @target_course.enrollments.map(&:user_id)
@source_course.content_participation_counts.where(:user_id => user_ids).update_all(:context_id => @target_course.id)
end
def cassandra?
Setting.get('enable_page_views', 'db') == 'cassandra'
end
end
end
LMS-2031: Section split tool error in course 11694
#
# If your target environment is configured to use an Apache Cassandra cluster,
# please keep in mind that you will need to perform some configuration changes prior
# to running this tool:
#
# 1. Edit your cassandra.yml configuration file and set a high timeout value for each keyspace, e.g.:
# timeout: 100000
#
# 2. Create the following indexes in your Cassandra cluster:
# CREATE INDEX page_views_context_id_idx ON page_views.page_views (context_id);
# CREATE INDEX grade_changes_context_id_idx ON auditors.grade_changes (context_id);
#
# 3. Update timeout settings in your server's cassandra.yaml configuration files to large values, i.e.:
#
# read_request_timeout_in_ms: 60000
# range_request_timeout_in_ms: 60000
# request_timeout_in_ms: 60000
#
# Index creation can be a long-running process, so you should verify that the indexes have
# been successfully created by querying the page_views.page_views and auditors.grade_changes tables
# using a WHERE condition for the context_id column.
#
class SectionSplitter
def self.run(opts)
result = []
user = opts[:user_id] && User.find(opts[:user_id])
raise "User ID not provided or user not found" unless user
if opts[:course_id]
course = Course.find(opts[:course_id])
raise "Course not found: #{opts[:course_id]}" unless course.present?
result += self.process_course(user, course, opts)
end
if opts[:account_id]
account = Account.find(opts[:account_id])
raise "Account not found: #{opts[:account_id]}" unless account.present?
account.courses.each do |course|
result += self.process_course(user, course, opts)
end
end
result
end
def self.process_course(user, course, opts)
# Sanity check
return unless course
unless course.active_course_sections.length > 1
Rails.logger.info "[SECTION-SPLITTER] Skipping course #{course.id}: not a multi-section course"
return []
end
unless (course.active_course_sections.select {|s| s.student_enrollments.length > 0}.length) > 1
Rails.logger.info "[SECTION-SPLITTER] Skipping course #{course.id}: does not contain multiple sections with enrollments"
return []
end
Rails.logger.info "[SECTION-SPLITTER] Splitting course #{course.id} [#{course.name}]..."
result = []
start_ts = Time.now
begin
real_time = Benchmark.realtime do
args = {
:enrollment_term => course.enrollment_term,
:abstract_course => course.abstract_course,
:account => course.account,
:start_at => course.start_at,
:conclude_at => course.conclude_at,
:time_zone => course.time_zone
}
course.active_course_sections.each do |source_section|
target_course = self.perform_course_copy(user, course, source_section, args)
self.perform_section_migration(target_course, source_section)
Rails.logger.info "[SECTION-SPLITTER] Converted section #{source_section.id} [#{source_section.name}] into course #{target_course.id} [#{target_course.name}]"
result << target_course
end
end
Rails.logger.info "[SECTION-SPLITTER] Finished splitting course #{course.id} [#{course.name}] in #{real_time} seconds."
if opts[:delete]
Rails.logger.info "[SECTION-SPLITTER] Deleting course #{course.id} [#{course.name}]..."
course.destroy
end
ensure
clean_delayed_jobs(course, start_ts)
end
result
end
def self.clean_delayed_jobs(course, timestamp)
begin
Delayed::Job.where("created_at >= ?", timestamp).each(&:destroy)
rescue Exception => e
Rails.logger.error "[SECTION-SPLITTER] Unable to clean up delayed jobs for course ID=#{course.id}: #{e.inspect}"
end
end
def self.perform_course_copy(user, source_course, source_section, args)
args[:name] = source_course.name
args[:course_code] = source_section.name
target_course = source_course.account.courses.new
target_course.attributes = args
target_course.workflow_state = source_course.workflow_state
target_course.save!
content_migration = target_course.content_migrations.build(:user => nil, :source_course => source_course, :context => target_course, :migration_type => 'course_copy_importer', :initiated_source => :manual)
content_migration.migration_settings[:source_course_id] = source_course.id
content_migration.workflow_state = 'created'
content_migration.migration_settings[:import_immediately] = true
content_migration.copy_options = {:everything => true}
content_migration.migration_settings[:migration_ids_to_import] = {:copy => {:everything => true}}
content_migration.user = user
content_migration.save
worker = Canvas::Migration::Worker::CourseCopyWorker.new
begin
worker.perform(content_migration)
rescue Exception => e
Canvas::Errors.capture_exception(:section_splitter, $ERROR_INFO)
Rails.logger.error "[SECTION-SPLITTER] Unable to perform course copy (content migration ID=#{content_migration.id}) for course ID=#{source_course.id} [#{source_course.name}]: #{e.inspect}"
raise e
end
target_course.reload
target_course
end
def self.perform_section_migration(target_course, source_section)
worker = SectionMigrationWorker.new(target_course.id, source_section.id)
begin
worker.perform
rescue Exception => e
Canvas::Errors.capture_exception(:section_splitter, $ERROR_INFO)
Rails.logger.error "[SECTION-SPLITTER] Unable to migrate source section ID=#{source_section.id} to target course ID=#{target_course.id}: #{e.inspect}"
end
end
SectionMigrationWorker = Struct.new(:target_course_id, :source_section_id) do
def perform
@target_course = Course.find(target_course_id)
@source_section = CourseSection.find(source_section_id)
@source_course = @source_section.course
@source_asset_strings = {}
# Remove course content that is not available to the source section
clean_assignments
clean_quizzes
clean_discussion_topics
clean_announcements
clean_calendar_events
# Migrate user data
migrate_section
migrate_enrollments
migrate_overrides
migrate_groups
migrate_submissions
migrate_quiz_submissions
migrate_discussion_entries
migrate_messages
migrate_page_views_and_audit_logs
migrate_asset_user_accesses
migrate_content_participation_counts
@target_course.save!
end
def clean_announcements
clean_overridables(@target_course.announcements)
@target_course.reload
end
def clean_assignments
clean_overridables(@target_course.assignments)
@target_course.reload
end
def clean_discussion_topics
clean_overridables(@target_course.discussion_topics)
@target_course.reload
end
def clean_quizzes
clean_overridables(@target_course.quizzes)
@target_course.reload
end
def clean_overridables(collection)
to_remove = collection.map {|a| {:target => a, :source => source_model(@source_course, a)}}.select {|h| remove_based_on_overrides?(h[:source])}
to_remove.each do |h|
model = h[:target]
if model.is_a?(DiscussionTopic)
DiscussionTopic::MaterializedView.for(model).destroy
elsif model.is_a?(Quizzes::Quiz) && model.assignment.present?
@target_course.assignments.delete(model.assignment)
model.assignment.assignment_overrides.each {|o| o.destroy_permanently!}
model.assignment.destroy_permanently!
elsif model.is_a?(Assignment)
if model.quiz.present?
@target_course.quizzes.delete(model.quiz)
model.quiz.assignment_overrides.each {|o| o.destroy_permanently!}
model.quiz.destroy_permanently!
end
if model.discussion_topic.present?
DiscussionTopic::MaterializedView.for(model.discussion_topic).destroy
@target_course.discussion_topics.delete(model.discussion_topic)
model.discussion_topic.assignment_overrides.each {|o| o.destroy_permanently!}
model.discussion_topic.destroy_permanently!
end
end
collection.delete(model)
model.assignment_overrides.each {|o| o.destroy_permanently!}
model.destroy_permanently!
end
end
def clean_calendar_events
@source_course.calendar_events.active.where("course_section_id IS NOT NULL AND course_section_id <> ?", @source_section.id).each do |source_event|
target_event = source_model(@target_course, source_event)
target_event.destroy_permanently!
end
@source_course.reload
@target_course.reload
end
def remove_based_on_overrides?(model)
overrides = model.active_assignment_overrides.select {|ao| ao.set_type == 'CourseSection'}
!overrides.empty? && !overrides.any? {|ao| ao.set_id == @source_section.id}
end
# Uses heuristics to locate the corresponding source content item in the source course for the given item in the target course.
def source_model(source_course, model)
source_model =
case model
when Announcement
source_course.announcements.where(:workflow_state => model.workflow_state, :title => model.title).first
when Assignment
target_modules = model.context_module_tags.where(:context => model.context).map {|t| t.context_module.name}.sort
models = source_course.assignments.where(:workflow_state => model.workflow_state, :title => model.title)
models.select! {|m| m.context_module_tags.where(:context => source_course).map {|t| t.context_module.name}.sort == target_modules}
models.first
when DiscussionTopic
target_modules = model.context_module_tags.where(:context => model.context).map {|t| t.context_module.name}.sort
models = source_course.discussion_topics.where(:workflow_state => model.workflow_state, :title => model.title)
models.select! {|m| m.context_module_tags.where(:context => source_course).map {|t| t.context_module.name}.sort == target_modules}
models.first
when Quizzes::Quiz
target_modules = model.context_module_tags.where(:context => model.context).map {|t| t.context_module.name}.sort
models = source_course.quizzes.where(:workflow_state => model.workflow_state, :title => model.title, :question_count => model.question_count)
models.select! {|m| m.context_module_tags.where(:context => source_course).map {|t| t.context_module.name}.sort == target_modules}
models.first
when CalendarEvent
source_course.calendar_events.where(:workflow_state => model.workflow_state, :title => model.title, :start_at => model.start_at, :end_at => model.end_at).first
when GroupCategory
source_course.group_categories.where(:name => model.name, :role => model.role, :deleted_at => model.deleted_at, :group_limit => model.group_limit).first
else
nil
end
raise "Unable to find source item for [#{model.inspect}] in course ID=#{source_course.id}" unless source_model
@source_asset_strings[source_model.asset_string] = model.asset_string if source_course == @source_course
source_model
end
def migrate_section
@source_section.course = @target_course
@source_section.save!
end
def migrate_enrollments
@source_section.enrollments.each do |e|
e.course = @target_course
e.save!
end
@source_section.reload
@target_course.reload
end
def migrate_overrides
@target_course.assignments.active.each {|a| process_overrides(a)}
@target_course.quizzes.active.each {|q| process_overrides(q)}
@target_course.discussion_topics.active.each {|d| process_overrides(d)}
end
def process_overrides(model)
source_model = source_model(@source_course, model)
student_ids = @target_course.student_enrollments.map(&:user_id)
source_model.active_assignment_overrides.each do |override|
if (override.set_type == 'CourseSection' && override.set_id == @source_section.id) ||
(override.set_type == 'ADHOC' && (override.assignment_override_students.map(&:user_id) - student_ids).empty?)
clone_override(override, model)
end
end
end
def clone_override(override, new_model)
return unless new_model.assignment_overrides.where(:set_type => override.set_type, :set_id => override.set_id).empty?
new_override = override.clone
case new_model
when Assignment
new_override.assignment = new_model
new_override.save
if new_override.set_type == 'ADHOC'
override.assignment_override_students.each do |aos|
new_aos = aos.clone
new_aos.assignment = new_model
new_override.assignment_override_students << new_aos
new_override.save
end
end
when Quizzes::Quiz
new_override.quiz = new_model
new_override.save
if new_override.set_type == 'ADHOC'
override.assignment_override_students.each do |aos|
new_aos = aos.clone
new_aos.quiz = new_model
new_override.assignment_override_students << new_aos
new_override.save
end
end
when DiscussionTopic
new_override.discussion_topic = new_model
new_override.save
if new_override.set_type == 'ADHOC'
override.assignment_override_students.each do |aos|
new_aos = aos.clone
new_aos.discussion_topic = new_model
new_override.assignment_override_students << new_aos
new_override.save
end
end
else
raise "Unexpected model type in update_override: #{new_model.inspect}"
end
new_override.reload
end
def migrate_groups
group_category_map = {}
@source_course.group_categories.each do |gc|
new_category = gc.clone
new_category.context = @target_course
@target_course.group_categories << new_category
@target_course.save
group_category_map[gc] = new_category
end
groups = @source_course.groups.where(:course_section_id => @source_section.id)
groups.each do |group|
next unless group.group_category
new_category = group_category_map[group.group_category]
group.group_category = new_category
group.save
end
groups.update_all(:context_id => @target_course.id)
@source_course.reload
@target_course.reload
end
def migrate_submissions
student_ids = @target_course.student_enrollments.map(&:user_id)
@target_course.assignments.each do |a|
Submission.where(:assignment => a, :user_id => student_ids).delete_all
source_assignment = source_model(@source_course, a)
submissions = Submission.where(:assignment => source_assignment, :user_id => student_ids)
submissions.select {|s| s.attachment_ids.present? }.each do |s|
attachment_ids = s.attachment_ids.split(",")
Attachment.where(:id => attachment_ids, :context_type => 'Assignment', :context_id => source_assignment.id).update_all(:context_id => a.id)
end
submission_comments = SubmissionComment.where(:submission_id => submissions.map(&:id), :context => @source_course)
submission_comments.update_all(:context_id => @target_course.id)
submissions.update_all(:assignment_id => a.id)
end
@source_course.reload
@target_course.reload
end
def migrate_quiz_submissions
student_ids = @target_course.student_enrollments.map(&:user_id)
@target_course.quizzes.each do |q|
source_quiz = source_model(@source_course, q)
submissions = Quizzes::QuizSubmission.where(:quiz => source_quiz, :user_id => student_ids)
submissions.update_all(:quiz_id => q.id)
end
@source_course.reload
@target_course.reload
end
def migrate_discussion_entries
section_user_ids = @target_course.enrollments.map(&:user_id).uniq
@target_course.discussion_topics.each do |topic|
source_topic = source_model(@source_course, topic)
source_topic.root_discussion_entries.each do |entry|
entry_ids = ([entry.id] + entry.flattened_discussion_subentries.pluck(:id)).uniq
participant_ids = ([entry.user_id] + entry.flattened_discussion_subentries.pluck(:user_id)).uniq
non_section_participant_ids = participant_ids - section_user_ids
if non_section_participant_ids.empty?
DiscussionEntry.where(:id => entry_ids).update_all(:discussion_topic_id => topic.id)
DiscussionEntryParticipant.where("discussion_entry_id IN (?) AND user_id NOT IN (?)", entry_ids, section_user_ids).delete_all
end
end
source_topic.discussion_topic_participants.where(:user_id => section_user_ids).update_all(:discussion_topic_id => topic.id)
source_topic.reload
topic.last_reply_at = topic.discussion_entries.last.try(:created_at) || topic.posted_at
topic.save
DiscussionTopic.where(:id => topic.id).update_all(:user_id => source_topic.user_id) # ugly hack, but user_id is a readonly attribute
end
@source_course.reload
@target_course.reload
@target_course.discussion_topics.each do |topic|
begin
DiscussionTopic::MaterializedView.for(topic).update_materialized_view_without_send_later
rescue Exception => e
Canvas::Errors.capture_exception(:section_splitter, $ERROR_INFO)
Rails.logger.error "Unable to regenerate DiscussionTopic::MaterializedView for ID=#{topic.id}: #{e.inspect}"
end
end
end
def migrate_messages
user_ids = @target_course.enrollments.map(&:user_id)
@source_course.messages.where(:user_id => user_ids).update_all(:context_id => @target_course.id)
@source_course.reload
@target_course.reload
end
def migrate_page_views_and_audit_logs
user_ids = @target_course.enrollments.map(&:user_id)
if cassandra?
migrate_page_views_cassandra(user_ids)
migrate_page_views_counters_by_context_and_hour(user_ids)
migrate_page_views_counters_by_context_and_user(user_ids)
migrate_participations_by_context(user_ids)
migrate_grade_changes(user_ids)
else
@source_course.page_views.where(:user_id => user_ids).each do |p|
p.context = @target_course
p.save
end
end
end
def migrate_page_views_cassandra(user_ids)
page_views = []
PageView::EventStream.database.execute("SELECT request_id, context_type, user_id FROM page_views WHERE context_id = ?", @source_course.id).fetch {|row| page_views << row.to_hash}
request_ids = page_views
.select {|row| row["context_type"] == "Course" && user_ids.include?(row["user_id"].to_i)}
.map {|row| row["request_id"]}
.uniq
PageView::EventStream.database.update("UPDATE page_views SET context_id = ? WHERE request_id IN (?)", @target_course.id, request_ids)
end
def migrate_page_views_counters_by_context_and_hour(user_ids)
source_course_global_id = @source_course.global_id
target_course_global_id = @target_course.global_id
user_ids.each do |user_id|
user_global_id = User.find(user_id).global_id
source_context_user_global_id = "course_#{source_course_global_id}/user_#{user_global_id}"
target_context_user_global_id = "course_#{target_course_global_id}/user_#{user_global_id}"
page_views_counters_by_context_and_hour = []
query = "SELECT context, hour_bucket, page_view_count, participation_count FROM page_views_counters_by_context_and_hour WHERE context = ?"
PageView::EventStream.database.execute(query, source_context_user_global_id).fetch {|row| page_views_counters_by_context_and_hour << row.to_hash}
page_views_counters_by_context_and_hour.each do |row|
primary_key = {
"context" => target_context_user_global_id,
"hour_bucket" => row["hour_bucket"]
}
PageView::EventStream.database.insert_record("page_views_counters_by_context_and_hour", primary_key, {})
if row["page_view_count"]
PageView::EventStream.database.update("UPDATE page_views_counters_by_context_and_hour SET page_view_count = page_view_count + ? WHERE context = ? AND hour_bucket = ?", row["page_view_count"], primary_key["context"], primary_key["hour_bucket"])
end
if row["participation_count"]
PageView::EventStream.database.update("UPDATE page_views_counters_by_context_and_hour SET participation_count = participation_count + ? WHERE context = ? AND hour_bucket = ?", row["participation_count"], primary_key["context"], primary_key["hour_bucket"])
end
PageView::EventStream.database.update("DELETE FROM page_views_counters_by_context_and_hour WHERE context = ? AND hour_bucket = ?", source_context_user_global_id, row["hour_bucket"])
end
end
end
def migrate_page_views_counters_by_context_and_user(user_ids)
source_course_global_id = @source_course.global_id
target_course_global_id = @target_course.global_id
user_ids.each do |user_id|
user_global_id = User.find(user_id).global_id.to_s
source_context_global_id = "course_#{source_course_global_id}"
target_context_global_id = "course_#{target_course_global_id}"
page_views_counters_by_context_and_user = []
query = "SELECT context, user_id, page_view_count, participation_count FROM page_views_counters_by_context_and_user WHERE context = ? AND user_id = ?"
PageView::EventStream.database.execute(query, source_context_global_id, user_global_id).fetch {|row| page_views_counters_by_context_and_user << row.to_hash}
page_views_counters_by_context_and_user.each do |row|
primary_key = {
"context" => target_context_global_id,
"user_id" => user_global_id
}
PageView::EventStream.database.insert_record("page_views_counters_by_context_and_user", primary_key, {})
if row["page_view_count"]
PageView::EventStream.database.update("UPDATE page_views_counters_by_context_and_user SET page_view_count = page_view_count + ? WHERE context = ? AND user_id = ?", row["page_view_count"], primary_key["context"], primary_key["user_id"])
end
if row["participation_count"]
PageView::EventStream.database.update("UPDATE page_views_counters_by_context_and_user SET participation_count = participation_count + ? WHERE context = ? AND user_id = ?", row["participation_count"], primary_key["context"], primary_key["user_id"])
end
PageView::EventStream.database.update("DELETE FROM page_views_counters_by_context_and_user WHERE context = ? AND user_id = ?", source_context_global_id, user_global_id)
end
end
end
def migrate_participations_by_context(user_ids)
source_course_global_id = @source_course.global_id
target_course_global_id = @target_course.global_id
user_ids.each do |user_id|
user_global_id = User.find(user_id).global_id
source_context_user_global_id = "course_#{source_course_global_id}/user_#{user_global_id}"
target_context_user_global_id = "course_#{target_course_global_id}/user_#{user_global_id}"
participations_by_context = []
query = "SELECT context, created_at, request_id, asset_category, asset_code, asset_user_access_id, url FROM participations_by_context WHERE context = ?"
PageView::EventStream.database.execute(query, source_context_user_global_id).fetch {|row| participations_by_context << row.to_hash}
participations_by_context.each do |row|
values = {
"asset_category" => row["asset_category"],
"asset_code" => @source_asset_strings[row["asset_code"]],
"asset_user_access_id" => row["asset_user_access_id"],
"url" => row["url"]
}
primary_key = {
"context" => target_context_user_global_id,
"created_at" => row["created_at"],
"request_id" => row["request_id"]
}
if values["asset_code"].present?
PageView::EventStream.database.insert_record("participations_by_context", primary_key, values)
PageView::EventStream.database.update("DELETE FROM participations_by_context WHERE context = ? AND created_at = ? AND request_id = ?", source_context_user_global_id, row["created_at"], row["request_id"])
end
end
end
end
def migrate_grade_changes(user_ids)
source_course_global_id = @source_course.global_id
target_course_global_id = @target_course.global_id
user_global_ids = user_ids.map {|user_id| User.find(user_id).global_id}
grade_changes = []
query = "SELECT id, assignment_id, context_id, context_type, student_id FROM grade_changes WHERE context_id = ?"
Auditors::GradeChange::Stream.database.execute(query, source_course_global_id).fetch {|row| grade_changes << row.to_hash}
grade_changes.select! {|row| row["context_type"] == "Course" && user_global_ids.include?(row["student_id"])}
grade_changes.each do |row|
assignment_id = Shard::local_id_for(row["assignment_id"])[0]
assignment = Assignment.find(assignment_id)
next unless assignment
new_assignment = source_model(@target_course, assignment)
values = {
"assignment_id" => new_assignment.id,
"context_id" => target_course_global_id
}
primary_key = {
"id" => row["id"]
}
Auditors::GradeChange::Stream.database.update_record("grade_changes", primary_key, values)
end
end
def migrate_asset_user_accesses
user_ids = @target_course.enrollments.map(&:user_id)
@source_course.asset_user_accesses.where(:user_id => user_ids).update_all(:context_id => @target_course.id)
end
def migrate_content_participation_counts
user_ids = @target_course.enrollments.map(&:user_id)
@source_course.content_participation_counts.where(:user_id => user_ids).update_all(:context_id => @target_course.id)
end
def cassandra?
Setting.get('enable_page_views', 'db') == 'cassandra'
end
end
end
|
Rails.application.configure do
config.middleware.use I18n::JS::Middleware
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = false
config.action_controller.perform_caching = false
config.action_mailer.default_url_options = { host: ENV['dashboard_url'] }
config.action_mailer.raise_delivery_errors = true
config.action_mailer.perform_deliveries = true
config.action_mailer.default :charset => "utf-8"
config.action_mailer.delivery_method = :mailgun
config.action_mailer.mailgun_settings = {
api_key: ENV['mailgun_key'],
domain: ENV['mailgun_domain'],
}
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
config.log_level = :debug
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = false
# Adds additional error checking when serving assets at runtime.
# Checks for improperly declared sprockets dependencies.
# Raises helpful error messages.
config.assets.raise_runtime_errors = true
# Raises error for missing translations
config.action_view.raise_on_missing_translations = true
# i18n-js
# Provides support for localization/translations on the front end utilizing Rails localization.
# Uses same translation files, config/
# In combination with configuration
config.middleware.use I18n::JS::Middleware
end
Enable verbose query logging
This new feature of Rails 5.2 is super useful.
Rails.application.configure do
config.middleware.use I18n::JS::Middleware
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = false
config.action_controller.perform_caching = false
config.action_mailer.default_url_options = { host: ENV['dashboard_url'] }
config.action_mailer.raise_delivery_errors = true
config.action_mailer.perform_deliveries = true
config.action_mailer.default :charset => "utf-8"
config.action_mailer.delivery_method = :mailgun
config.action_mailer.mailgun_settings = {
api_key: ENV['mailgun_key'],
domain: ENV['mailgun_domain'],
}
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
config.log_level = :debug
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Include the Ruby call site for database queries
config.active_record.verbose_query_logs = true
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = false
# Adds additional error checking when serving assets at runtime.
# Checks for improperly declared sprockets dependencies.
# Raises helpful error messages.
config.assets.raise_runtime_errors = true
# Raises error for missing translations
config.action_view.raise_on_missing_translations = true
# i18n-js
# Provides support for localization/translations on the front end utilizing Rails localization.
# Uses same translation files, config/
# In combination with configuration
config.middleware.use I18n::JS::Middleware
end
|
# encoding: utf-8
module IceNine
class Freezer
# A freezer class for handling Enumerable objects
class Enumerable < Object
# Deep Freeze an Enumerable
#
# @example
# enumerable = IceNine:Freezer::Enumerable.deep_freeze(%w[a b c])
# enumerable.select(&:frozen?) # => ['a', 'b', 'c']
#
# @param [Enumerable] enumerable
#
# @return [Enumerable]
#
# @todo use super on #each when Struct#each returns self in Rubinius
#
# @api public
def self.deep_freeze(enumerable)
enumerable.each { |entry| IceNine.deep_freeze(entry) }
super enumerable
end
end # class Enumerable
end # class Freezer
end # module IceNine
Change Enumerable freezer to handle #each yielding multiple args
# encoding: utf-8
module IceNine
class Freezer
# A freezer class for handling Enumerable objects
class Enumerable < Object
# Deep Freeze an Enumerable
#
# @example
# enumerable = IceNine:Freezer::Enumerable.deep_freeze(%w[a b c])
# enumerable.select(&:frozen?) # => ['a', 'b', 'c']
#
# @param [Enumerable] enumerable
#
# @return [Enumerable]
#
# @todo use super on #each when Struct#each returns self in Rubinius
#
# @api public
def self.deep_freeze(enumerable)
enumerable.each do |*args|
args.each { |arg| IceNine.deep_freeze(arg) }
end
super enumerable
end
end # class Enumerable
end # class Freezer
end # module IceNine
|
# encoding: UTF-8
require "httparty"
# The top level module contains the different data sources
# as sub-modules. Currently there are the following modules
# available:
#
# * Freebase
# * Factbook
# * LinkedGeoData
# * Gdacs
#
# The existing modules are extended stepwise and additional
# sources are added in the future.
require 'logger'
# Top module that contains the whole library. Each sub-module
# is wrappes one source.
module SemanticCrawler
$log = Logger.new(File.expand_path('../../log/logfile.log', __FILE__), 'daily')
end
# CIA Factbook RDF Dump - module: Factbook
require "semantic_crawler/factbook"
require "semantic_crawler/factbook/country"
# GDACS.org - module: Gdacs
require "semantic_crawler/gdacs"
require "semantic_crawler/gdacs/feed.rb"
require "semantic_crawler/gdacs/feed_item.rb"
require "semantic_crawler/gdacs/resource.rb"
require "semantic_crawler/gdacs/emergency_feed.rb"
require "semantic_crawler/gdacs/emergency_feed_item.rb"
# DBPedia - module: Dbpedia
require "semantic_crawler/dbpedia"
# GeoNames.org - module: GeoNames
require "semantic_crawler/geo_names"
# LinkedGeoData.org - module: LinkedGeoData
require "semantic_crawler/linked_geo_data"
require "semantic_crawler/linked_geo_data/relevant_node"
# Fao.org - module: Fao
require "semantic_crawler/fao"
require "semantic_crawler/fao/country"
# Freebase.com - module: Freebase
require "semantic_crawler/freebase/country"
Changing the logfile name from logfile.log to semantic-crawler.log
# encoding: UTF-8
require "httparty"
# The top level module contains the different data sources
# as sub-modules. Currently there are the following modules
# available:
#
# * Freebase
# * Factbook
# * LinkedGeoData
# * Gdacs
#
# The existing modules are extended stepwise and additional
# sources are added in the future.
require 'logger'
# Top module that contains the whole library. Each sub-module
# is wrappes one source.
module SemanticCrawler
$log = Logger.new(File.expand_path('../../log/semantic-crawler.log', __FILE__), 'daily')
end
# CIA Factbook RDF Dump - module: Factbook
require "semantic_crawler/factbook"
require "semantic_crawler/factbook/country"
# GDACS.org - module: Gdacs
require "semantic_crawler/gdacs"
require "semantic_crawler/gdacs/feed.rb"
require "semantic_crawler/gdacs/feed_item.rb"
require "semantic_crawler/gdacs/resource.rb"
require "semantic_crawler/gdacs/emergency_feed.rb"
require "semantic_crawler/gdacs/emergency_feed_item.rb"
# DBPedia - module: Dbpedia
require "semantic_crawler/dbpedia"
# GeoNames.org - module: GeoNames
require "semantic_crawler/geo_names"
# LinkedGeoData.org - module: LinkedGeoData
require "semantic_crawler/linked_geo_data"
require "semantic_crawler/linked_geo_data/relevant_node"
# Fao.org - module: Fao
require "semantic_crawler/fao"
require "semantic_crawler/fao/country"
# Freebase.com - module: Freebase
require "semantic_crawler/freebase/country"
|
# encoding: utf-8
# Settings specified here will take precedence over those in config/environment.rb
Markus::Application.configure do
# Other Precompiled Assets
config.assets.precompile += %w(pdfjs.js)
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the webserver when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
# FIXME: The following lines can be commented
# out when jQuery is fully implemented
# config.action_controller.perform_caching = false
# config.action_controller.allow_forgery_protection = true
# Load any local configuration that is kept out of source control
if File.exists?(File.join(File.dirname(__FILE__), 'local_environment_override.rb'))
instance_eval File.read(File.join(File.dirname(__FILE__), 'local_environment_override.rb'))
end
# Show Deprecated Warnings (to :log or to :stderr)
config.active_support.deprecation = :stderr
config.log_level = :debug
# set log-level (:debug, :info, :warn, :error, :fatal)
# Don't care if the mailer can't send
config.action_mailer.raise_delivery_errors = false
# Log the query plan for queries taking more than this (works
# with SQLite, MySQL, and PostgreSQL)
config.active_record.auto_explain_threshold_in_seconds = 1.0
###################################################################
# MarkUs SPECIFIC CONFIGURATION
# - use "/" as path separator no matter what OS server is running
###################################################################
###################################################################
# Set the course name here
COURSE_NAME = 'CSC108 Fall 2009: Introduction to Computer Programming'
###################################################################
# MarkUs relies on external user authentication: An external script
# (ideally a small C program) is called with username and password
# piped to stdin of that program (first line is username, second line
# is password).
#
# If and only if it exits with a return code of 0, the username/password
# combination is considered valid and the user is authenticated. Moreover,
# the user is authorized, if it exists as a user in MarkUs.
#
# That is why MarkUs does not allow usernames/passwords which contain
# \n or \0. These are the only restrictions.
VALIDATE_FILE = "#{::Rails.root.to_s}/config/dummy_validate.sh"
###################################################################
# Authentication Settings
###################################################################
# Set this to true/false if you want to use an external authentication scheme
# that sets the REMOTE_USER variable.
REMOTE_USER_AUTH = false
###################################################################
# This is where the logout button will redirect to when clicked.
# Set this to one of the three following options:
#
# "DEFAULT" - MarkUs will use its default logout routine.
# A logout link will be provided.
#
# The DEFAULT option should not be used if REMOTE_USER_AUTH is set to true,
# as it will not result in a successful logout.
#
# -----------------------------------------------------------------------------
#
# "http://address.of.choice" - Logout will redirect to the specified URI.
#
# If REMOTE_USER_AUTH is set to true, it would be possible
# to specify a custom address which would log the user out of the authentication
# scheme.
# Choosing this option with REMOTE_USER_AUTH is set to false will still properly
# log the user out of MarkUs.
#
# -----------------------------------------------------------------------------
#
# "NONE" - Logout link will be hidden.
#
# It only recommended that you use this if REMOTE_USER_AUTH is set to true
# and do not have a custom logout page.
#
# If you are using HTTP's basic authentication, you probably want to use this
# option.
LOGOUT_REDIRECT = 'DEFAULT'
###################################################################
# File storage (Repository) settings
###################################################################
# Options for Repository_type are 'svn' and 'memory' for now
# 'memory' is by design not persistent and only used for testing MarkUs
REPOSITORY_TYPE = 'git' # use Subversion as storage backend
###################################################################
# Directory where Repositories will be created. Make sure MarkUs is allowed
# to write to this directory
REPOSITORY_STORAGE = "#{::Rails.root.to_s}/data/dev/repos"
###################################################################
# Directory where Repositories will be created. Make sure MarkUs is allowed
# to write to this directory
REPOSITORY_STORAGE_GITOLITE = "#{::Rails.root.to_s}/data/dev/repos/gitoliteRepos"
###################################################################
# Directory where authentication keys will be uploaded. Make sure MarkUs is
# allowed to write to this directory
KEY_STORAGE = "#{::Rails.root.to_s}/data/dev/keys"
###################################################################
# Directory where converted PDF files will be stored as JPEGs. Make sure MarkUs
# is allowed to write to this directory
PDF_STORAGE = "#{::Rails.root.to_s}/data/dev/pdfs"
###################################################################
# Location of the public and private key for the git user on the system
GITOLITE_SETTINGS = { public_key: "/home/git/git.pub",
private_key: "/home/git/.ssh/id_rsa" }
###################################################################
# Directory where the Automated Testing Repositories will be created.
# make sure markus is allowed to write to this directory
AUTOMATED_TESTS_REPOSITORY = "#{::Rails.root.to_s}/data/dev/automated_tests"
###################################################################
# Set this to true or false if you want to be able to display and annotate
# PDF documents within the browser.
PDF_SUPPORT = false
###################################################################
# Change this to 'REPOSITORY_EXTERNAL_SUBMITS_ONLY = true' if you
# are using Subversion as a storage backend and the instructor wants his/her
# students to submit to the repositories Subversion clients only. Set this
# to true if you intend to force students to submit via Subversion
# clients only. The MarkUs Web interface for submissions will be read-only.
REPOSITORY_EXTERNAL_SUBMITS_ONLY = false
###################################################################
# This config setting only makes sense, if you are using
# 'REPOSITORY_EXTERNAL_SUBMITS_ONLY = true'. If you have Apache httpd
# configured so that the repositories created by MarkUs will be available to
# the outside world, this is the URL which internally "points" to the
# REPOSITORY_STORAGE directory configured earlier. Hence, Subversion
# repositories will be available to students for example via URL
# http://www.example.com/markus/svn/Repository_Name. Make sure the path
# after the hostname matches your <Location> directive in your Apache
# httpd configuration
REPOSITORY_EXTERNAL_BASE_URL = 'http://www.example.com/markus/svn'
###################################################################
# This setting is important for two scenarios:
# First, if MarkUs should use Subversion repositories created by a
# third party, point it to the place where it will find the Subversion
# authz file. In that case, MarkUs would need at least read access to
# that file.
# Second, if MarkUs is configured with REPOSITORY_EXTERNAL_SUBMITS_ONLY
# set to 'true', you can configure as to where MarkUs should write the
# Subversion authz file.
REPOSITORY_PERMISSION_FILE = REPOSITORY_STORAGE + '/conf'
###################################################################
# This setting configures if MarkUs is reading Subversion
# repositories' permissions only OR is admin of the Subversion
# repositories. In the latter case, it will write to
# $REPOSITORY_SVN_AUTHZ_FILE, otherwise it doesn't. Change this to
# 'false' if repositories are created by a third party.
IS_REPOSITORY_ADMIN = true
###################################################################
# Set this to the desired default language MarkUs should load if
# nothing else tells it otherwise. At the moment valid values are
# 'en', 'fr'. Please make sure that proper locale files are present
# in config/locales.
MARKUS_DEFAULT_LANGUAGE = 'en'
###################################################################
# Session Timeouts
###################################################################
USER_STUDENT_SESSION_TIMEOUT = 1800 # Timeout for student users
USER_TA_SESSION_TIMEOUT = 1800 # Timeout for grader users
USER_ADMIN_SESSION_TIMEOUT = 1800 # Timeout for admin users
###################################################################
# CSV upload order of fields (usually you don't want to change this)
###################################################################
# Order of student CSV uploads
USER_STUDENT_CSV_UPLOAD_ORDER = [:user_name, :last_name, :first_name, :section_name]
# Order of graders CSV uploads
USER_TA_CSV_UPLOAD_ORDER = [:user_name, :last_name, :first_name]
###################################################################
# Logging Options
###################################################################
# If set to true then the MarkusLogger will be enabled
MARKUS_LOGGING_ENABLED = true
# If set to true then the rotation of the logfiles will be defined
# by MARKUS_LOGGING_ROTATE_INTERVAL instead of the size of the file
MARKUS_LOGGING_ROTATE_BY_INTERVAL = false
# Set the maximum size file that the logfiles will have before rotating
MARKUS_LOGGING_SIZE_THRESHOLD = 1024000000
# Sets the interval which rotations will occur if
# MARKUS_LOGGING_ROTATE_BY_INTERVAL is set to true,
# possible values are: 'daily', 'weekly', 'monthly'
MARKUS_LOGGING_ROTATE_INTERVAL = 'daily'
# Name of the logfile that will carry information, debugging and
# warning messages
MARKUS_LOGGING_LOGFILE = "log/info_#{::Rails.env}.log"
# Name of the logfile that will carry error and fatal messages
MARKUS_LOGGING_ERRORLOGFILE = "log/error_#{::Rails.env}.log"
# This variable sets the number of old log files that will be kept
MARKUS_LOGGING_OLDFILES = 10
#####################################################################
# Markus Session Store configuration
# see config/initializers/session_store.rb
#####################################################################
SESSION_COOKIE_NAME = '_markus_session'
SESSION_COOKIE_SECRET = '650d281667d8011a3a6ad6dd4b5d4f9ddbce14a7d78b107812dbb40b24e234256ab2c5572c8196cf6cde6b85942688b6bfd337ffa0daee648d04e1674cf1fdf6'
SESSION_COOKIE_EXPIRE_AFTER = 3.weeks
SESSION_COOKIE_HTTP_ONLY = true
SESSION_COOKIE_SECURE = false
###################################################################
# END OF MarkUs SPECIFIC CONFIGURATION
###################################################################
end
Added additional gitolite config parameter for Vagrant, removed unused parameter
# encoding: utf-8
# Settings specified here will take precedence over those in config/environment.rb
Markus::Application.configure do
# Other Precompiled Assets
config.assets.precompile += %w(pdfjs.js)
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the webserver when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
# FIXME: The following lines can be commented
# out when jQuery is fully implemented
# config.action_controller.perform_caching = false
# config.action_controller.allow_forgery_protection = true
# Load any local configuration that is kept out of source control
if File.exists?(File.join(File.dirname(__FILE__), 'local_environment_override.rb'))
instance_eval File.read(File.join(File.dirname(__FILE__), 'local_environment_override.rb'))
end
# Show Deprecated Warnings (to :log or to :stderr)
config.active_support.deprecation = :stderr
config.log_level = :debug
# set log-level (:debug, :info, :warn, :error, :fatal)
# Don't care if the mailer can't send
config.action_mailer.raise_delivery_errors = false
# Log the query plan for queries taking more than this (works
# with SQLite, MySQL, and PostgreSQL)
config.active_record.auto_explain_threshold_in_seconds = 1.0
###################################################################
# MarkUs SPECIFIC CONFIGURATION
# - use "/" as path separator no matter what OS server is running
###################################################################
###################################################################
# Set the course name here
COURSE_NAME = 'CSC108 Fall 2009: Introduction to Computer Programming'
###################################################################
# MarkUs relies on external user authentication: An external script
# (ideally a small C program) is called with username and password
# piped to stdin of that program (first line is username, second line
# is password).
#
# If and only if it exits with a return code of 0, the username/password
# combination is considered valid and the user is authenticated. Moreover,
# the user is authorized, if it exists as a user in MarkUs.
#
# That is why MarkUs does not allow usernames/passwords which contain
# \n or \0. These are the only restrictions.
VALIDATE_FILE = "#{::Rails.root.to_s}/config/dummy_validate.sh"
###################################################################
# Authentication Settings
###################################################################
# Set this to true/false if you want to use an external authentication scheme
# that sets the REMOTE_USER variable.
REMOTE_USER_AUTH = false
###################################################################
# This is where the logout button will redirect to when clicked.
# Set this to one of the three following options:
#
# "DEFAULT" - MarkUs will use its default logout routine.
# A logout link will be provided.
#
# The DEFAULT option should not be used if REMOTE_USER_AUTH is set to true,
# as it will not result in a successful logout.
#
# -----------------------------------------------------------------------------
#
# "http://address.of.choice" - Logout will redirect to the specified URI.
#
# If REMOTE_USER_AUTH is set to true, it would be possible
# to specify a custom address which would log the user out of the authentication
# scheme.
# Choosing this option with REMOTE_USER_AUTH is set to false will still properly
# log the user out of MarkUs.
#
# -----------------------------------------------------------------------------
#
# "NONE" - Logout link will be hidden.
#
# It only recommended that you use this if REMOTE_USER_AUTH is set to true
# and do not have a custom logout page.
#
# If you are using HTTP's basic authentication, you probably want to use this
# option.
LOGOUT_REDIRECT = 'DEFAULT'
###################################################################
# File storage (Repository) settings
###################################################################
# Options for Repository_type are 'svn' and 'memory' for now
# 'memory' is by design not persistent and only used for testing MarkUs
REPOSITORY_TYPE = 'git' # use Subversion as storage backend
###################################################################
# Directory where Repositories will be created. Make sure MarkUs is allowed
# to write to this directory
REPOSITORY_STORAGE = "#{::Rails.root.to_s}/data/dev/repos"
###################################################################
# Directory where authentication keys will be uploaded. Make sure MarkUs is
# allowed to write to this directory
KEY_STORAGE = "#{::Rails.root.to_s}/data/dev/keys"
###################################################################
# Directory where converted PDF files will be stored as JPEGs. Make sure MarkUs
# is allowed to write to this directory
PDF_STORAGE = "#{::Rails.root.to_s}/data/dev/pdfs"
###################################################################
# Location of the public and private key for the git user on the system
GITOLITE_SETTINGS = { public_key: "/home/vagrant/git.pub",
private_key: "/home/vagrant/.ssh/id_rsa",
host: "localhost" }
###################################################################
# Directory where the Automated Testing Repositories will be created.
# make sure markus is allowed to write to this directory
AUTOMATED_TESTS_REPOSITORY = "#{::Rails.root.to_s}/data/dev/automated_tests"
###################################################################
# Set this to true or false if you want to be able to display and annotate
# PDF documents within the browser.
PDF_SUPPORT = false
###################################################################
# Change this to 'REPOSITORY_EXTERNAL_SUBMITS_ONLY = true' if you
# are using Subversion as a storage backend and the instructor wants his/her
# students to submit to the repositories Subversion clients only. Set this
# to true if you intend to force students to submit via Subversion
# clients only. The MarkUs Web interface for submissions will be read-only.
REPOSITORY_EXTERNAL_SUBMITS_ONLY = false
###################################################################
# This config setting only makes sense, if you are using
# 'REPOSITORY_EXTERNAL_SUBMITS_ONLY = true'. If you have Apache httpd
# configured so that the repositories created by MarkUs will be available to
# the outside world, this is the URL which internally "points" to the
# REPOSITORY_STORAGE directory configured earlier. Hence, Subversion
# repositories will be available to students for example via URL
# http://www.example.com/markus/svn/Repository_Name. Make sure the path
# after the hostname matches your <Location> directive in your Apache
# httpd configuration
REPOSITORY_EXTERNAL_BASE_URL = 'http://www.example.com/markus/svn'
###################################################################
# This setting is important for two scenarios:
# First, if MarkUs should use Subversion repositories created by a
# third party, point it to the place where it will find the Subversion
# authz file. In that case, MarkUs would need at least read access to
# that file.
# Second, if MarkUs is configured with REPOSITORY_EXTERNAL_SUBMITS_ONLY
# set to 'true', you can configure as to where MarkUs should write the
# Subversion authz file.
REPOSITORY_PERMISSION_FILE = REPOSITORY_STORAGE + '/conf'
###################################################################
# This setting configures if MarkUs is reading Subversion
# repositories' permissions only OR is admin of the Subversion
# repositories. In the latter case, it will write to
# $REPOSITORY_SVN_AUTHZ_FILE, otherwise it doesn't. Change this to
# 'false' if repositories are created by a third party.
IS_REPOSITORY_ADMIN = true
###################################################################
# Set this to the desired default language MarkUs should load if
# nothing else tells it otherwise. At the moment valid values are
# 'en', 'fr'. Please make sure that proper locale files are present
# in config/locales.
MARKUS_DEFAULT_LANGUAGE = 'en'
###################################################################
# Session Timeouts
###################################################################
USER_STUDENT_SESSION_TIMEOUT = 1800 # Timeout for student users
USER_TA_SESSION_TIMEOUT = 1800 # Timeout for grader users
USER_ADMIN_SESSION_TIMEOUT = 1800 # Timeout for admin users
###################################################################
# CSV upload order of fields (usually you don't want to change this)
###################################################################
# Order of student CSV uploads
USER_STUDENT_CSV_UPLOAD_ORDER = [:user_name, :last_name, :first_name, :section_name]
# Order of graders CSV uploads
USER_TA_CSV_UPLOAD_ORDER = [:user_name, :last_name, :first_name]
###################################################################
# Logging Options
###################################################################
# If set to true then the MarkusLogger will be enabled
MARKUS_LOGGING_ENABLED = true
# If set to true then the rotation of the logfiles will be defined
# by MARKUS_LOGGING_ROTATE_INTERVAL instead of the size of the file
MARKUS_LOGGING_ROTATE_BY_INTERVAL = false
# Set the maximum size file that the logfiles will have before rotating
MARKUS_LOGGING_SIZE_THRESHOLD = 1024000000
# Sets the interval which rotations will occur if
# MARKUS_LOGGING_ROTATE_BY_INTERVAL is set to true,
# possible values are: 'daily', 'weekly', 'monthly'
MARKUS_LOGGING_ROTATE_INTERVAL = 'daily'
# Name of the logfile that will carry information, debugging and
# warning messages
MARKUS_LOGGING_LOGFILE = "log/info_#{::Rails.env}.log"
# Name of the logfile that will carry error and fatal messages
MARKUS_LOGGING_ERRORLOGFILE = "log/error_#{::Rails.env}.log"
# This variable sets the number of old log files that will be kept
MARKUS_LOGGING_OLDFILES = 10
#####################################################################
# Markus Session Store configuration
# see config/initializers/session_store.rb
#####################################################################
SESSION_COOKIE_NAME = '_markus_session'
SESSION_COOKIE_SECRET = '650d281667d8011a3a6ad6dd4b5d4f9ddbce14a7d78b107812dbb40b24e234256ab2c5572c8196cf6cde6b85942688b6bfd337ffa0daee648d04e1674cf1fdf6'
SESSION_COOKIE_EXPIRE_AFTER = 3.weeks
SESSION_COOKIE_HTTP_ONLY = true
SESSION_COOKIE_SECURE = false
###################################################################
# END OF MarkUs SPECIFIC CONFIGURATION
###################################################################
end
|
DEFAULT_RBENV_ROOT = '/usr/local/anyenv'.freeze
def run(attributes, username = nil)
init(username)
clone_anyenv
clone_anyenv_update
install_envs(attributes)
end
private
def init(username)
@username = username
@anyenv_root_path = anyenv_root(username)
@init_cmd = anyenv_init(@anyenv_root_path)
end
def scheme
@scheme ||= node[:anyenv][:scheme] || 'git'
end
def anyenv_root(username)
return anyenv_system_root if username.nil?
anyenv_user_root(username)
end
def anyenv_system_root
if node[:anyenv] && node[:anyenv][:anyenv_root]
return node[:anyenv][:anyenv_root]
end
DEFAULT_RBENV_ROOT
end
def anyenv_user_root(username)
if node[:anyenv][:users][username].key?(:anyenv_root)
return node[:anyenv][:users][username][:anyenv_root]
end
case node[:platform]
when 'darwin'
"/Users/#{username}/.anyenv"
else
"/home/#{username}/.anyenv"
end
end
def anyenv_init(root_path)
init_str = %(export ANYENV_ROOT="#{root_path}"; )
init_str << %(export PATH="#{root_path}/bin:${PATH}"; )
init_str << %(eval "$(anyenv init -)"; )
end
def clone_repository(install_path, repo_path)
git install_path do
user @username if @username
repository repo_path if repo_path
not_if "test -d #{install_path}"
end
end
def clone_anyenv
repo_path = "#{scheme}://github.com/riywo/anyenv.git"
clone_repository(@anyenv_root_path, repo_path)
end
def clone_anyenv_update
install_path = "#{@anyenv_root_path}/plugins/anyenv-update"
repo_path = "#{scheme}://github.com/znz/anyenv-update.git"
clone_repository(install_path, repo_path)
end
def install_envs(attributes)
attributes[:install_versions].each do |envs|
envs.each do |env, vers|
install_env(env)
vers.each do |ver|
install_env_version(env, ver)
end
global_version(env, vers.first)
end
end
end
def install_env(envname)
execute "install #{envname}" do
user @username if @username
command "#{@init_cmd} yes | anyenv install #{envname}"
not_if "#{@init_cmd} type #{envname}"
end
end
def install_env_version(envname, version)
execute "#{envname} install #{version}" do
user @username if @username
command "#{@init_cmd} yes | #{envname} install #{version}"
not_if "#{@init_cmd} #{envname} versions | grep #{version}"
end
end
def global_version(envname, version)
execute "#{envname} global #{version}" do
user @username if @username
command "#{@init_cmd} #{envname} global #{version}; " \
"#{@init_cmd} #{envname} rehash"
not_if "#{@init_cmd} #{envname} global | grep #{version}"
end
end
Add install dir from ANYENV_ROOT
DEFAULT_ANYENV_ROOT = '/usr/local/anyenv'.freeze
def run(attributes, username = nil)
init(username)
clone_anyenv
clone_anyenv_update
install_envs(attributes)
end
private
def init(username)
@username = username
@anyenv_root_path = anyenv_root(username)
@init_cmd = anyenv_init(@anyenv_root_path)
end
def scheme
@scheme ||= node[:anyenv][:scheme] || 'git'
end
def anyenv_root(username)
return anyenv_system_root if username.nil?
anyenv_user_root(username)
end
def anyenv_system_root
if node[:anyenv] && node[:anyenv][:anyenv_root]
return node[:anyenv][:anyenv_root]
end
return ENV['ANYENV_ROOT'] || DEFAULT_ANYENV_ROOT
end
def anyenv_user_root(username)
if node[:anyenv][:users][username].key?(:anyenv_root)
return node[:anyenv][:users][username][:anyenv_root]
end
case node[:platform]
when 'darwin'
"/Users/#{username}/.anyenv"
else
"/home/#{username}/.anyenv"
end
end
def anyenv_init(root_path)
init_str = %(export ANYENV_ROOT="#{root_path}"; )
init_str << %(export PATH="#{root_path}/bin:${PATH}"; )
init_str << %(eval "$(anyenv init -)"; )
end
def clone_repository(install_path, repo_path)
git install_path do
user @username if @username
repository repo_path if repo_path
not_if "test -d #{install_path}"
end
end
def clone_anyenv
repo_path = "#{scheme}://github.com/riywo/anyenv.git"
clone_repository(@anyenv_root_path, repo_path)
end
def clone_anyenv_update
install_path = "#{@anyenv_root_path}/plugins/anyenv-update"
repo_path = "#{scheme}://github.com/znz/anyenv-update.git"
clone_repository(install_path, repo_path)
end
def install_envs(attributes)
attributes[:install_versions].each do |envs|
envs.each do |env, vers|
install_env(env)
vers.each do |ver|
install_env_version(env, ver)
end
global_version(env, vers.first)
end
end
end
def install_env(envname)
execute "install #{envname}" do
user @username if @username
command "#{@init_cmd} yes | anyenv install #{envname}"
not_if "#{@init_cmd} type #{envname}"
end
end
def install_env_version(envname, version)
execute "#{envname} install #{version}" do
user @username if @username
command "#{@init_cmd} yes | #{envname} install #{version}"
not_if "#{@init_cmd} #{envname} versions | grep #{version}"
end
end
def global_version(envname, version)
execute "#{envname} global #{version}" do
user @username if @username
command "#{@init_cmd} #{envname} global #{version}; " \
"#{@init_cmd} #{envname} rehash"
not_if "#{@init_cmd} #{envname} global | grep #{version}"
end
end
|
module Serializr
module Thrift
extend ActiveSupport::Concern
def self.require_generated_source
if File.exists?("#{Rails.root}/lib/serializr/gen-rb")
$: << "#{Rails.root}/lib/serializr/gen-rb"
require "#{Rails.root}/lib/serializr/gen-rb/serializr_app"
end
end
def self.gen_idl
FileUtils.mkdir_p Rails.root + "lib/serializr/"
schema_file = open(Rails.root + "lib/serializr/serializr_app.thrift", "w")
schema_file.write("namespace rb SerializrModel\n")
services = []
self.serialized_models.map do |klass|
schema_file.write "\n\n#{klass.to_thrift_struct}"
services << klass.to_thrift_service
end
schema_file.write "\n\nservice SerializrApp { \n\t#{services.flatten.join("\n\t")} \n}"
schema_file.close
system "cd #{Rails.root}/lib/serializr; thrift --gen rb serializr_app.thrift"
end
def self.serialized_models
Dir.glob( Rails.root + 'app/models/*' ).map do |f|
klass = File.basename( f ).gsub( /^(.+).rb/, '\1').camelize.constantize
klass if klass.respond_to? :to_thrift_service
end
end
def self.handler
sm = self.serialized_models
Class.new do |k|
k.class_eval do
sm.map { |m| include m.thrift_handler_module }
end
end
end
def self.processor
SerializrModel::SerializrApp::Processor.new(self.handler)
end
def self.rack_middleware hook_path="/thrift", protocol_factory=::Thrift::BinaryProtocolAcceleratedFactory
[
Serializr::Thrift::RackMiddleware,
{
:processor => self.processor,
:hook_path => hook_path,
:protocol_factory => protocol_factory.new
}
]
end
module ClassMethods
def to_thrift_struct
num = 0
types_conversion = {
:string => "string",
:integer => "i32",
:date => "i32",
:boolean => "bool",
:text => "string"
}
attrs = columns_hash.map do |k,v|
num += 1
"#{num}: #{types_conversion[v.type] || "string"} #{k}"
end
"struct #{self.name} { \n\t#{attrs.join("\n\t")} \n}"
end
def to_thrift_service
services = []
services << "list<#{self.name}> list#{self.name.pluralize}(1: i32 limit)"
services << "#{self.name} get#{self.name}(1: i32 id)"
services << "#{self.name} delete#{self.name}(1: i32 id)"
services
end
def thrift_handler_module
klass_name = self.name
klass = self
Module.new do |m|
m.send(:define_method, "get#{klass_name}".to_sym) do |id|
klass.find_by_id(id).to_thrift
end
m.send(:define_method, "list#{klass_name.pluralize}".to_sym) do |limit|
klass.all(:limit => limit).map(&:to_thrift)
end
m.send(:define_method, "delete#{klass_name}".to_sym) do |id|
klass.destroy(id)
end
end
end
end
module InstanceMethods
def to_thrift
"SerializrModel::#{self.class.name}".constantize.new(self.serializable_hash)
end
end
class RackMiddleware
attr_reader :hook_path, :processor, :protocol_factory
def initialize(app, options = {})
@app = app
@processor = options[:processor] || (raise ArgumentError, "You have to specify a processor.")
@protocol_factory = options[:protocol_factory] || BinaryProtocolFactory.new
@hook_path = options[:hook_path] || "/thrift"
end
def call(env)
request = Rack::Request.new(env)
if request.path == hook_path
output = StringIO.new
transport = ::Thrift::IOStreamTransport.new(request.body, output)
protocol = @protocol_factory.get_protocol(transport)
@processor.process(protocol, protocol)
output.rewind
response = Rack::Response.new(output)
response["Content-Type"] = "application/x-thrift"
response.finish
else
@app.call(env)
end
end
end
end
end
Still workin on thrift implementation...
module Serializr
module Thrift
extend ActiveSupport::Concern
def self.require_generated_source
if File.exists?("#{Rails.root}/lib/serializr/gen-rb")
$: << "#{Rails.root}/lib/serializr/gen-rb"
require "#{Rails.root}/lib/serializr/gen-rb/serializr_app"
end
end
def self.gen_idl
FileUtils.mkdir_p "#{Rails.root}/lib/serializr/"
schema_file = open "#{Rails.root}/lib/serializr/serializr_app.thrift", "w"
schema_file.write "namespace rb SerializrModel\n\n"
schema_file.write "exception NotFoundError { \n\t1: string message \n}\n"
schema_file.write "exception ValidationError { \n\t1: list<string> errors \n}\n"
services = []
self.serialized_models.map do |klass|
schema_file.write "\n\n#{klass.to_thrift_struct}"
services << klass.to_thrift_service
end
schema_file.write "\n\nservice SerializrApp { \n\t#{services.flatten.join("\n\t")} \n}"
schema_file.close
system "cd #{Rails.root}/lib/serializr; thrift --gen rb serializr_app.thrift"
end
def self.serialized_models
Dir.glob( Rails.root + 'app/models/*' ).map do |f|
klass = File.basename( f ).gsub( /^(.+).rb/, '\1').camelize.constantize
klass if klass.respond_to? :to_thrift_service
end
end
def self.handler
sm = self.serialized_models
Class.new do |k|
k.class_eval do
sm.map { |m| include m.thrift_handler_module }
end
end
end
def self.rack_middleware hook_path="/thrift", protocol_factory=::Thrift::BinaryProtocolAcceleratedFactory
[
Serializr::Thrift::RackMiddleware,
{
:processor => SerializrModel::SerializrApp::Processor.new(self.handler.new),
:hook_path => hook_path,
:protocol_factory => protocol_factory.new
}
]
end
module ClassMethods
def to_thrift_struct
num = 0
types_conversion = {
:string => "string",
:integer => "i32",
:date => "i32",
:boolean => "bool",
:text => "string",
:time => "i32",
:datetime => "i32"
}
attrs = columns_hash.map do |k,v|
num += 1
"#{num}: #{types_conversion[v.type] || "string"} #{k}"
end
"struct #{self.name} { \n\t#{attrs.join("\n\t")} \n}"
end
def to_thrift_service
services = ["", "# #{self.name} Service"]
services << "list<#{self.name}> list#{self.name.pluralize}(1: i32 limit)"
services << "i32 count#{self.name.pluralize}()"
services << "#{self.name} get#{self.name}(1: i32 id) throws (1: NotFoundError err)"
services << "list<#{self.name}> get#{self.name.pluralize}(1: list<i32> ids)"
services << "i32 delete#{self.name}(1: i32 id) throws (1: NotFoundError err)"
services << "i32 create#{self.name}(1: #{self.name} #{self.name.downcase}) throws (1: ValidationError err)"
services << "#{self.name} update#{self.name}(1: #{self.name} #{self.name.downcase}) throws (1: NotFoundError not_found_error, 2: ValidationError validation_error)"
services
end
def thrift_handler_module
klass_name = self.name
klass = self
Module.new do |m|
# countRecords
m.send(:define_method, "count#{klass_name.pluralize}".to_sym) do
klass.count
end
# getRecords
m.send(:define_method, "get#{klass_name.pluralize}".to_sym) do |ids|
klass.all(:conditions => { :id => ids }).map(&:to_thrift)
end
# listRecords
m.send(:define_method, "list#{klass_name.pluralize}".to_sym) do |limit|
klass.all(:limit => limit).map(&:to_thrift)
end
# getRecord
m.send(:define_method, "get#{klass_name}".to_sym) do |id|
begin
klass.find(id).to_thrift
rescue => e
raise SerializrModel::NotFoundError.new "#{klass.name} ##{id} does not exist"
end
end
# deleteRecord
m.send(:define_method, "delete#{klass_name}".to_sym) do |id|
begin
id if klass.destroy(id)
rescue => e
raise SerializrModel::NotFoundError.new "#{klass.name} ##{id} does not exist"
end
end
# createRecord
m.send(:define_method, "create#{klass_name}".to_sym) do |thrift_record|
record = klass.from_thrift(thrift_record)
begin
record.save!
record.id
rescue => e
err = SerializrModel::ValidationError.new
err.errors = record.errors.full_messages
raise err
end
end
# updateRecord
m.send(:define_method, "update#{klass_name}".to_sym) do |thrift_record|
attrs = klass.attrs_from_thrift(thrift_record)
record_id = attrs.delete("id")
record = klass.find_by_id(record_id)
record.to_thrift if record.update_attributes!(attrs)
end
end
end
def attrs_from_thrift rec
rec.struct_fields.values.inject({}) do |h,f|
h.merge(f[:name] => rec.send(f[:name]))
end
end
def from_thrift rec
attrs = attrs_from_thrift(rec)
attrs.delete "id"
self.new(attrs)
end
end
module InstanceMethods
def to_thrift
h = self.serializable_hash.inject({}) do |hh,f|
v = (f[1].is_a?(Date) || f[1].is_a?(Time)) ? f[1].to_time.to_i : f[1]
hh.merge(f[0] => v)
end
"SerializrModel::#{self.class.name}".constantize.new(h)
end
end
class RackMiddleware
attr_reader :hook_path, :processor, :protocol_factory
def initialize(app, options = {})
@app = app
@processor = options[:processor] || (raise ArgumentError, "You have to specify a processor.")
@protocol_factory = options[:protocol_factory] || BinaryProtocolFactory.new
@hook_path = options[:hook_path] || "/thrift"
end
def call(env)
request = Rack::Request.new(env)
if request.path == hook_path
output = StringIO.new
transport = ::Thrift::IOStreamTransport.new(request.body, output)
protocol = @protocol_factory.get_protocol(transport)
@processor.process(protocol, protocol)
output.rewind
response = Rack::Response.new(output)
response["Content-Type"] = "application/x-thrift"
response.finish
else
@app.call(env)
end
end
end
end
end |
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = true
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# Adds additional error checking when serving assets at runtime.
# Checks for improperly declared sprockets dependencies.
# Raises helpful error messages.
config.assets.raise_runtime_errors = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
config.action_mailer.delivery_method = :smtp
ActionMailer::Base.smtp_settings = {
:port => 587,
:address => "smtp.mailgun.org",
:domain => ENV['domain'],
:user_name => ENV['username'],
:password => ENV['password'],
:authentication => :plain,
}
# config.action_mailer.smtp_settings = {
# :authentication => :plain,
# :address => "smtp.gmail.com",
# :port => 587,
# :user_name => ENV['gmail_username'],
# :password => ENV['gmail_password'],
# }
config.action_mailer.default_url_options = { host: "localhost:3000" }
config.after_initialize do
Bullet.enable = true
Bullet.alert = true
Bullet.bullet_logger = true
Bullet.console = true
end
end
taking out comments from development.rb
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = true
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# Adds additional error checking when serving assets at runtime.
# Checks for improperly declared sprockets dependencies.
# Raises helpful error messages.
config.assets.raise_runtime_errors = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
config.action_mailer.delivery_method = :smtp
ActionMailer::Base.smtp_settings = {
:port => 587,
:address => "smtp.mailgun.org",
:domain => ENV['domain'],
:user_name => ENV['username'],
:password => ENV['password'],
:authentication => :plain,
}
config.action_mailer.default_url_options = { host: "localhost:3000" }
config.after_initialize do
Bullet.enable = true
Bullet.alert = true
Bullet.bullet_logger = true
Bullet.console = true
end
end
|
# -*- coding: utf-8 -*-
# 絵文字と文字コードの変換処理
# とりあえず1ファイルに書く
# 定数
Jpmobile::Emoticon::SEND_NKF_OPTIONS = {
"shift_jis" => "-sWx --no-cp932",
"iso-2022-jp" => "-jW",
}
Jpmobile::Emoticon::RECEIVE_NKF_OPTIONS = {
"shift_jis" => "-wSx --no-cp932",
"iso-2022-jp" => "-wJ",
"euc-jp" => "-wE",
"utf-8" => "-wW",
}
Jpmobile::Emoticon::SUBJECT_REGEXP = %r!=\?(shift[_-]jis|iso-2022-jp|euc-jp|utf-8)\?B\?(.+)\?=!i
# convert_to で機種依存文字や絵文字に対応するために
# Unquoter 内で NKF を使用するようにしたもの
module TMail
class Unquoter
class << self
# http://www.kbmj.com/~shinya/rails_seminar/slides/#(30)
def convert_to_with_nkf(text, to, from)
if text and to =~ /^utf-8$/i and from =~ /^iso-2022-jp$/i
NKF.nkf("-Jw", text)
elsif text and from =~ /^utf-8$/i and to =~ /^iso-2022-jp$/i
NKF.nkf("-Wj", text)
else
if from =~ /^shift_jis$/i
convert_to_without_nkf(text, to, "cp932")
else
convert_to_without_nkf(text, to, from)
end
end
end
alias_method_chain :convert_to, :nkf
end
end
end
module ActionMailer
class Base
WAVE_DASH = [0x301c].pack("U")
FULLWIDTH_TILDA = [0xff5e].pack("U")
alias :create_without_jpmobile! :create!
alias :create_mail_without_jpmobile :create_mail
def create_mail
# メールアドレスから判定
if recipients.is_a?(String)
@mobile = Jpmobile::Email.detect(recipients).new({}) rescue nil
# 波ダッシュ問題の回避
@subject = @subject.gsub(FULLWIDTH_TILDA, WAVE_DASH)
@body = @body.gsub(FULLWIDTH_TILDA, WAVE_DASH)
# 数値参照に変換
@subject = Jpmobile::Emoticon::utf8_to_unicodecr(@subject)
@body = Jpmobile::Emoticon::utf8_to_unicodecr(@body)
case @mobile
when Jpmobile::Mobile::Docomo
@jpm_encode = "shift_jis"
@to_sjis = true
when Jpmobile::Mobile::Au
@jpm_encode = "iso-2022-jp"
@to_sjis = false
when Jpmobile::Mobile::Vodafone, Jpmobile::Mobile::Jphone
@jpm_encode = "iso-2022-jp"
@to_sjis = false
when Jpmobile::Mobile::Softbank
@jpm_encode = "shift_jis"
@to_sjis = true
else
# 上記以外は iso-2022-jp で送信する
@charset = "iso-2022-jp"
@mobile = nil
end
end
create_mail_without_jpmobile
end
def create!(method_name, *parameters)
create_without_jpmobile!(method_name, *parameters)
return @mail unless @mobile
# TMail::Mail の encoded を hook する
@mail.instance_eval do
def emoji_convert(mail_encode, body_encode, to_sjis, mobile = nil)
@mail_encode = mail_encode
@emoji_sjis = to_sjis
@nkf_opts = Jpmobile::Emoticon::SEND_NKF_OPTIONS[@mail_encode]
@mobile = mobile
end
alias :encoded_without_jpmobile :encoded
def encoded
if @mobile
jpm_subject = NKF.nkf(@nkf_opts, self.subject)
jpm_subject = Jpmobile::Emoticon.unicodecr_to_email(jpm_subject, @mobile)
jpm_subject = "=?#{@mail_encode}?B?" + [jpm_subject].pack("m").delete("\r\n") + "?="
case @mobile
when Jpmobile::Mobile::Au, Jpmobile::Mobile::Vodafone, Jpmobile::Mobile::Jphone
jpm_body = self.quoted_body
self.charset = @mail_encode
# AU は iso-2022-jp なのでそのまま
self.subject = jpm_subject
else
jpm_body = self.body
self.charset = @mail_encode
self.header["subject"].instance_variable_set(:@body, jpm_subject)
end
jpm_body = NKF.nkf(@nkf_opts, jpm_body)
jpm_body = Jpmobile::Emoticon.unicodecr_to_email(jpm_body, @mobile)
self.body = jpm_body
end
encoded_without_jpmobile
end
end
# 絵文字・漢字コード変換
@mail.emoji_convert(@jpm_encode, @jpm_encode, @to_sjis, @mobile)
@mail
end
# receive
class << self
alias :receive_without_jpmobile :receive
def receive(raw_mail)
@raw_data = raw_mail
@mail = receive_without_jpmobile(raw_mail)
# 携帯かどうか判定
if (@mobile = Jpmobile::Email.detect(@mail.from.first).new({}) rescue nil)
# 携帯であれば subject は @header から直接取得して変換する
header = @mail.instance_variable_get(:@header)
subject = header["subject"].instance_variable_get(:@body)
if subject.match(Jpmobile::Emoticon::SUBJECT_REGEXP)
code = $1
subject = $2
else
code = nil
end
# FIXME: 漢字コード決めうちなので汎用的な方法に変更
case @mobile
when Jpmobile::Mobile::Docomo
# shift_jis コードであることが前提
# subject の絵文字・漢字コード変換
subject = Jpmobile::Emoticon.external_to_unicodecr_docomo(subject.unpack('m').first)
@mail.subject = NKF.nkf(Jpmobile::Emoticon::RECEIVE_NKF_OPTIONS[code.downcase], subject)
# body の絵文字・漢字コード変換
body = Jpmobile::Emoticon.external_to_unicodecr_docomo(@mail.quoted_body)
@mail.body = NKF.nkf(Jpmobile::Emoticon::RECEIVE_NKF_OPTIONS[code], body)
when Jpmobile::Mobile::Au
# iso-2022-jp コードを変換
# subject の絵文字・漢字コード変換
subject = Jpmobile::Emoticon.external_to_unicodecr_au_mail(subject.unpack('m').first)
@mail.subject = NKF.nkf(Jpmobile::Emoticon::RECEIVE_NKF_OPTIONS[code.downcase], subject)
# body の絵文字・漢字コード変換
# @mail.charset が iso-2022-jp なので無理に変換すると TMail 側で変換されてしまうので,漢字コードはそのまま
body = Jpmobile::Emoticon.external_to_unicodecr_au_mail(@mail.quoted_body)
@mail.body = body
when Jpmobile::Mobile::Softbank
case @mail.charset
when /^shift_jis$/i
# subject の絵文字・漢字コード変換
# subject = Jpmobile::Emoticon.external_to_unicodecr_softbank(subject.unpack('m').first)
subject = Jpmobile::Emoticon.external_to_unicodecr_softbank_sjis(subject.unpack('m').first)
@mail.subject = NKF.nkf(Jpmobile::Emoticon::RECEIVE_NKF_OPTIONS[code.downcase], subject)
# body の絵文字・漢字コード変換
body = Jpmobile::Emoticon.external_to_unicodecr_softbank_sjis(@mail.quoted_body)
@mail.body = NKF.nkf(Jpmobile::Emoticon::RECEIVE_NKF_OPTIONS[@mail.charset], body)
when /^utf-8$/i
# subject の絵文字・漢字コード変換
# subject = Jpmobile::Emoticon.external_to_unicodecr_softbank(subject.unpack('m').first)
subject = Jpmobile::Emoticon.external_to_unicodecr_softbank(subject.unpack('m').first)
@mail.subject = NKF.nkf(Jpmobile::Emoticon::RECEIVE_NKF_OPTIONS[code.downcase], subject)
# body の絵文字・漢字コード変換
body = Jpmobile::Emoticon.external_to_unicodecr_softbank(@mail.quoted_body)
@mail.body = NKF.nkf(Jpmobile::Emoticon::RECEIVE_NKF_OPTIONS[@mail.charset], body)
else
# 何もしない
end
end
end
@mail
end
end
end
end
漢字コードを body のコードに変更
Signed-off-by: Shin-ichiro OGAWA <944c384b665d30ce41dfae275e7ba3c4ca979271@gmail.com>
# -*- coding: utf-8 -*-
# 絵文字と文字コードの変換処理
# とりあえず1ファイルに書く
# 定数
Jpmobile::Emoticon::SEND_NKF_OPTIONS = {
"shift_jis" => "-sWx --no-cp932",
"iso-2022-jp" => "-jW",
}
Jpmobile::Emoticon::RECEIVE_NKF_OPTIONS = {
"shift_jis" => "-wSx --no-cp932",
"iso-2022-jp" => "-wJ",
"euc-jp" => "-wE",
"utf-8" => "-wW",
}
Jpmobile::Emoticon::SUBJECT_REGEXP = %r!=\?(shift[_-]jis|iso-2022-jp|euc-jp|utf-8)\?B\?(.+)\?=!i
# convert_to で機種依存文字や絵文字に対応するために
# Unquoter 内で NKF を使用するようにしたもの
module TMail
class Unquoter
class << self
# http://www.kbmj.com/~shinya/rails_seminar/slides/#(30)
def convert_to_with_nkf(text, to, from)
if text and to =~ /^utf-8$/i and from =~ /^iso-2022-jp$/i
NKF.nkf("-Jw", text)
elsif text and from =~ /^utf-8$/i and to =~ /^iso-2022-jp$/i
NKF.nkf("-Wj", text)
else
if from =~ /^shift_jis$/i
convert_to_without_nkf(text, to, "cp932")
else
convert_to_without_nkf(text, to, from)
end
end
end
alias_method_chain :convert_to, :nkf
end
end
end
module ActionMailer
class Base
WAVE_DASH = [0x301c].pack("U")
FULLWIDTH_TILDA = [0xff5e].pack("U")
alias :create_without_jpmobile! :create!
alias :create_mail_without_jpmobile :create_mail
def create_mail
# メールアドレスから判定
if recipients.is_a?(String)
@mobile = Jpmobile::Email.detect(recipients).new({}) rescue nil
# 波ダッシュ問題の回避
@subject = @subject.gsub(FULLWIDTH_TILDA, WAVE_DASH)
@body = @body.gsub(FULLWIDTH_TILDA, WAVE_DASH)
# 数値参照に変換
@subject = Jpmobile::Emoticon::utf8_to_unicodecr(@subject)
@body = Jpmobile::Emoticon::utf8_to_unicodecr(@body)
case @mobile
when Jpmobile::Mobile::Docomo
@jpm_encode = "shift_jis"
@to_sjis = true
when Jpmobile::Mobile::Au
@jpm_encode = "iso-2022-jp"
@to_sjis = false
when Jpmobile::Mobile::Vodafone, Jpmobile::Mobile::Jphone
@jpm_encode = "iso-2022-jp"
@to_sjis = false
when Jpmobile::Mobile::Softbank
@jpm_encode = "shift_jis"
@to_sjis = true
else
# 上記以外は iso-2022-jp で送信する
@charset = "iso-2022-jp"
@mobile = nil
end
end
create_mail_without_jpmobile
end
def create!(method_name, *parameters)
create_without_jpmobile!(method_name, *parameters)
return @mail unless @mobile
# TMail::Mail の encoded を hook する
@mail.instance_eval do
def emoji_convert(mail_encode, body_encode, to_sjis, mobile = nil)
@mail_encode = mail_encode
@emoji_sjis = to_sjis
@nkf_opts = Jpmobile::Emoticon::SEND_NKF_OPTIONS[@mail_encode]
@mobile = mobile
end
alias :encoded_without_jpmobile :encoded
def encoded
if @mobile
jpm_subject = NKF.nkf(@nkf_opts, self.subject)
jpm_subject = Jpmobile::Emoticon.unicodecr_to_email(jpm_subject, @mobile)
jpm_subject = "=?#{@mail_encode}?B?" + [jpm_subject].pack("m").delete("\r\n") + "?="
case @mobile
when Jpmobile::Mobile::Au, Jpmobile::Mobile::Vodafone, Jpmobile::Mobile::Jphone
jpm_body = self.quoted_body
self.charset = @mail_encode
# AU は iso-2022-jp なのでそのまま
self.subject = jpm_subject
else
jpm_body = self.body
self.charset = @mail_encode
self.header["subject"].instance_variable_set(:@body, jpm_subject)
end
jpm_body = NKF.nkf(@nkf_opts, jpm_body)
jpm_body = Jpmobile::Emoticon.unicodecr_to_email(jpm_body, @mobile)
self.body = jpm_body
end
encoded_without_jpmobile
end
end
# 絵文字・漢字コード変換
@mail.emoji_convert(@jpm_encode, @jpm_encode, @to_sjis, @mobile)
@mail
end
# receive
class << self
alias :receive_without_jpmobile :receive
def receive(raw_mail)
@raw_data = raw_mail
@mail = receive_without_jpmobile(raw_mail)
# 携帯かどうか判定
if (@mobile = Jpmobile::Email.detect(@mail.from.first).new({}) rescue nil)
# 携帯であれば subject は @header から直接取得して変換する
header = @mail.instance_variable_get(:@header)
subject = header["subject"].instance_variable_get(:@body)
if subject.match(Jpmobile::Emoticon::SUBJECT_REGEXP)
code = $1
subject = $2
else
code = nil
end
# FIXME: 漢字コード決めうちなので汎用的な方法に変更
case @mobile
when Jpmobile::Mobile::Docomo
# shift_jis コードであることが前提
# subject の絵文字・漢字コード変換
subject = Jpmobile::Emoticon.external_to_unicodecr_docomo(subject.unpack('m').first)
@mail.subject = NKF.nkf(Jpmobile::Emoticon::RECEIVE_NKF_OPTIONS[code.downcase], subject)
# body の絵文字・漢字コード変換
body = Jpmobile::Emoticon.external_to_unicodecr_docomo(@mail.quoted_body)
@mail.body = NKF.nkf(Jpmobile::Emoticon::RECEIVE_NKF_OPTIONS[@mail.charset], body)
when Jpmobile::Mobile::Au
# iso-2022-jp コードを変換
# subject の絵文字・漢字コード変換
subject = Jpmobile::Emoticon.external_to_unicodecr_au_mail(subject.unpack('m').first)
@mail.subject = NKF.nkf(Jpmobile::Emoticon::RECEIVE_NKF_OPTIONS[code.downcase], subject)
# body の絵文字・漢字コード変換
# @mail.charset が iso-2022-jp なので無理に変換すると TMail 側で変換されてしまうので,漢字コードはそのまま
body = Jpmobile::Emoticon.external_to_unicodecr_au_mail(@mail.quoted_body)
@mail.body = body
when Jpmobile::Mobile::Softbank
case @mail.charset
when /^shift_jis$/i
# subject の絵文字・漢字コード変換
# subject = Jpmobile::Emoticon.external_to_unicodecr_softbank(subject.unpack('m').first)
subject = Jpmobile::Emoticon.external_to_unicodecr_softbank_sjis(subject.unpack('m').first)
@mail.subject = NKF.nkf(Jpmobile::Emoticon::RECEIVE_NKF_OPTIONS[code.downcase], subject)
# body の絵文字・漢字コード変換
body = Jpmobile::Emoticon.external_to_unicodecr_softbank_sjis(@mail.quoted_body)
@mail.body = NKF.nkf(Jpmobile::Emoticon::RECEIVE_NKF_OPTIONS[@mail.charset], body)
when /^utf-8$/i
# subject の絵文字・漢字コード変換
# subject = Jpmobile::Emoticon.external_to_unicodecr_softbank(subject.unpack('m').first)
subject = Jpmobile::Emoticon.external_to_unicodecr_softbank(subject.unpack('m').first)
@mail.subject = NKF.nkf(Jpmobile::Emoticon::RECEIVE_NKF_OPTIONS[code.downcase], subject)
# body の絵文字・漢字コード変換
body = Jpmobile::Emoticon.external_to_unicodecr_softbank(@mail.quoted_body)
@mail.body = NKF.nkf(Jpmobile::Emoticon::RECEIVE_NKF_OPTIONS[@mail.charset], body)
else
# 何もしない
end
end
end
@mail
end
end
end
end
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
PAPERCLIP_STORAGE_OPTIONS = {
storage: :filesystem,
path: "public/assets/dev/images/docs/:id_partition/:filename",
url: "assets/dev/images/docs/:id_partition/:filename"
}
REPORTS_STORAGE_OPTIONS = {
storage: :filesystem,
path: "tmp/dev/reports/:filename",
url: "tmp/dev/reports/:filename"
}
# logging
jsonlogger = LogStuff.new_logger("#{Rails.root}/log/logstash_development.log", Logger::INFO)
config.logstasher.enabled = true
config.logstasher.suppress_app_log = false
config.logstasher.logger = jsonlogger
# Need to specifically set the logstasher loglevel since it will overwrite the one set earlier
config.logstasher.log_level = Logger::DEBUG
config.logstasher.source = 'cccd.development'
# Reuse logstasher logger with logstuff
LogStuff.setup(:logger => jsonlogger)
LogStuff.source = 'cccd.development'
#Removed to allow for remote device testing (Ipad or other tablets)
#config.action_controller.asset_host = "http://localhost:3000"
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
if Rails.root.join('tmp/caching-dev.txt').exist?
config.action_controller.perform_caching = true
config.cache_store = :memory_store
config.public_file_server.headers = {
'Cache-Control' => 'public, max-age=172800'
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
config.action_mailer.default_url_options = { host: ENV["GRAPE_SWAGGER_ROOT_URL"] || 'http://localhost:3000' }
config.action_mailer.asset_host = config.action_mailer.default_url_options[:host]
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
# TODO: remove as NOT AVAILABLE in rails 5.1+
# This will be be default behaviour in next version of activerecord - early opt-in
# config.active_record.raise_in_transactional_callbacks = true
# enable the ability to preview devise emails
# And index of all can, be viewed at:
# using webrick defaults at http://localhost:3000/rails/mailers
config.action_mailer.preview_path = "#{Rails.root}/spec/mailers/previews"
#Rack livereload for frontend development
config.middleware.use Rack::LiveReload rescue (puts 'Rack::LiveReload not available')
# normal dev mail configuration
config.action_mailer.perform_deliveries = Settings.govuk_notify.api_key.present?
config.action_mailer.raise_delivery_errors = true
# config for sending mails from dev
# config.action_mailer.perform_deliveries = true
# config.action_mailer.delivery_method = :smtp
# config.action_mailer.smtp_settings = {
# address: ENV['SMTP_SERVER'],
# port: ENV['SMTP_PORT'],
# domain: ENV['SMTP_DOMAIN'],
# user_name: ENV['SMTP_USER'],
# password: ENV['SMTP_PASSWORD'],
# authentication: :login,
# enable_starttls_auto: true
# }
end
Remove configuration no longer in use in Rails 5.2
More info here:
http://edgeguides.rubyonrails.org/upgrading_ruby_on_rails.html#error-handling-in-transaction-callbacks
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
PAPERCLIP_STORAGE_OPTIONS = {
storage: :filesystem,
path: "public/assets/dev/images/docs/:id_partition/:filename",
url: "assets/dev/images/docs/:id_partition/:filename"
}
REPORTS_STORAGE_OPTIONS = {
storage: :filesystem,
path: "tmp/dev/reports/:filename",
url: "tmp/dev/reports/:filename"
}
# logging
jsonlogger = LogStuff.new_logger("#{Rails.root}/log/logstash_development.log", Logger::INFO)
config.logstasher.enabled = true
config.logstasher.suppress_app_log = false
config.logstasher.logger = jsonlogger
# Need to specifically set the logstasher loglevel since it will overwrite the one set earlier
config.logstasher.log_level = Logger::DEBUG
config.logstasher.source = 'cccd.development'
# Reuse logstasher logger with logstuff
LogStuff.setup(:logger => jsonlogger)
LogStuff.source = 'cccd.development'
#Removed to allow for remote device testing (Ipad or other tablets)
#config.action_controller.asset_host = "http://localhost:3000"
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
if Rails.root.join('tmp/caching-dev.txt').exist?
config.action_controller.perform_caching = true
config.cache_store = :memory_store
config.public_file_server.headers = {
'Cache-Control' => 'public, max-age=172800'
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
config.action_mailer.default_url_options = { host: ENV["GRAPE_SWAGGER_ROOT_URL"] || 'http://localhost:3000' }
config.action_mailer.asset_host = config.action_mailer.default_url_options[:host]
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
# enable the ability to preview devise emails
# And index of all can, be viewed at:
# using webrick defaults at http://localhost:3000/rails/mailers
config.action_mailer.preview_path = "#{Rails.root}/spec/mailers/previews"
#Rack livereload for frontend development
config.middleware.use Rack::LiveReload rescue (puts 'Rack::LiveReload not available')
# normal dev mail configuration
config.action_mailer.perform_deliveries = Settings.govuk_notify.api_key.present?
config.action_mailer.raise_delivery_errors = true
# config for sending mails from dev
# config.action_mailer.perform_deliveries = true
# config.action_mailer.delivery_method = :smtp
# config.action_mailer.smtp_settings = {
# address: ENV['SMTP_SERVER'],
# port: ENV['SMTP_PORT'],
# domain: ENV['SMTP_DOMAIN'],
# user_name: ENV['SMTP_USER'],
# password: ENV['SMTP_PASSWORD'],
# authentication: :login,
# enable_starttls_auto: true
# }
end
|
require "sfn"
module Sfn
class Command
# Plan command
class Plan < Command
include Sfn::CommandModule::Base
include Sfn::CommandModule::Planning
include Sfn::CommandModule::Stack
include Sfn::CommandModule::Template
# Run the stack planning command
def execute!
name_required!
name = name_args.first
stack_info = "#{ui.color("Name:", :bold)} #{name}"
begin
stack = provider.stacks.get(name)
rescue Miasma::Error::ApiError::RequestError
stack = provider.stacks.build(name: name)
end
return display_plan_lists(stack) if config[:list]
if config[:plan_name]
# ensure custom attribute is dirty so we can modify
stack.custom = stack.custom.dup
stack.custom[:plan_name] = config[:plan_name]
end
use_existing = false
unless config[:print_only]
ui.info "#{ui.color("SparkleFormation:", :bold)} #{ui.color("plan", :green)}"
if stack && stack.plan
ui.warn "Found existing plan for this stack"
begin
if config[:load_existing]
raise Bogo::Ui::ConfirmationDeclined
end
if config[:load_existing].nil?
ui.confirm "Destroy existing plan"
end
ui.info "Destroying existing plan to generate new plan"
stack.plan.destroy
rescue Bogo::Ui::ConfirmationDeclined
ui.info "Loading existing stack plan for #{ui.color(stack.name, :bold)}..."
use_existing = true
end
end
end
unless use_existing
config[:compile_parameters] ||= Smash.new
if config[:file]
s_name = [name]
c_setter = lambda do |c_stack|
if c_stack.outputs
compile_params = c_stack.outputs.detect do |output|
output.key == "CompileState"
end
end
if compile_params
compile_params = MultiJson.load(compile_params.value)
c_current = config[:compile_parameters].fetch(s_name.join("__"), Smash.new)
config[:compile_parameters][s_name.join("__")] = compile_params.merge(c_current)
end
c_stack.nested_stacks(false).each do |n_stack|
s_name.push(n_stack.data.fetch(:logical_id, n_stack.name))
c_setter.call(n_stack)
s_name.pop
end
end
if stack && stack.persisted?
c_setter.call(stack)
end
ui.debug "Compile parameters - #{config[:compile_parameters]}"
file = load_template_file(:stack => stack)
stack_info << " #{ui.color("Path:", :bold)} #{config[:file]}"
else
file = stack.template.dup
end
unless file
if config[:template]
file = config[:template]
stack_info << " #{ui.color("(template provided)", :green)}"
else
stack_info << " #{ui.color("(no template update)", :yellow)}"
end
end
unless config[:print_only]
ui.info " -> #{stack_info}"
end
if file
if config[:print_only]
ui.puts format_json(parameter_scrub!(template_content(file)))
return
end
original_parameters = stack.parameters
apply_stacks!(stack)
populate_parameters!(file, :current_parameters => stack.root_parameters)
stack.parameters = config_root_parameters
if config[:upload_root_template]
upload_result = store_template(name, file, Smash.new)
stack.template_url = upload_result[:url]
else
stack.template = parameter_scrub!(template_content(file, :scrub))
end
else
apply_stacks!(stack)
original_parameters = stack.parameters
populate_parameters!(stack.template, :current_parameters => stack.root_parameters)
stack.parameters = config_root_parameters
end
# Set options defined within config into stack instance for update request
if config[:merge_api_options]
config.fetch(:options, Smash.new).each_pair do |key, value|
if stack.respond_to?("#{key}=")
stack.send("#{key}=", value)
end
end
end
ui.info " -> Generating plan information..."
else
ui.info " -> Loading plan information..."
end
plan = stack.plan || stack.plan_generate
namespace.const_get(:Realize).
new(config, [name]).execute!
end
# Display plan list in table form
#
# @param [Miasma::Models::Orchestration::Stack]
def display_plan_lists(stack)
unless stack
raise "Failed to locate requested stack `#{name_args.first}`"
end
plans = stack.plans.all
if plans.empty?
ui.warn "No plans found for stack `#{stack.name}`"
return
end
ui.info "Plans for stack: #{ui.color(stack.name, :bold)}\n"
n_width = "Plan Name".length
i_width = "Plan ID".length
s_width = "Plan State".length
c_width = "Created".length
plan_info = plans.map do |plan|
plan_id = plan.id.to_s.split("/").last
n_width = plan.name.to_s.length if plan.name.to_s.length > n_width
i_width = plan_id.to_s.length if plan_id.length > i_width
s_width = plan.state.to_s.length if plan.state.to_s.length > s_width
c_width = plan.created_at.to_s.length if plan.created_at.to_s.length > c_width
[plan.name, plan_id, plan.state, plan.created_at]
end
table = ui.table(self) do
table(:border => false) do
row(:header => true) do
column "Plan Name", :width => n_width + 5
column "Plan ID", :width => i_width + 5
column "Plan State", :width => s_width + 5
column "Created", :width => c_width + 5
end
plan_info.sort_by(&:first).each do |plan|
row do
plan.each do |item|
column item
end
end
end
end
end.display
end
end
end
end
Move API merging outside of conditional
When merging API options, set options prior to realization so
options are set when plan information is generated or pre-generated
plan information is loaded.
require "sfn"
module Sfn
class Command
# Plan command
class Plan < Command
include Sfn::CommandModule::Base
include Sfn::CommandModule::Planning
include Sfn::CommandModule::Stack
include Sfn::CommandModule::Template
# Run the stack planning command
def execute!
name_required!
name = name_args.first
stack_info = "#{ui.color("Name:", :bold)} #{name}"
begin
stack = provider.stacks.get(name)
rescue Miasma::Error::ApiError::RequestError
stack = provider.stacks.build(name: name)
end
return display_plan_lists(stack) if config[:list]
if config[:plan_name]
# ensure custom attribute is dirty so we can modify
stack.custom = stack.custom.dup
stack.custom[:plan_name] = config[:plan_name]
end
use_existing = false
unless config[:print_only]
ui.info "#{ui.color("SparkleFormation:", :bold)} #{ui.color("plan", :green)}"
if stack && stack.plan
ui.warn "Found existing plan for this stack"
begin
if config[:load_existing]
raise Bogo::Ui::ConfirmationDeclined
end
if config[:load_existing].nil?
ui.confirm "Destroy existing plan"
end
ui.info "Destroying existing plan to generate new plan"
stack.plan.destroy
rescue Bogo::Ui::ConfirmationDeclined
ui.info "Loading existing stack plan for #{ui.color(stack.name, :bold)}..."
use_existing = true
end
end
end
unless use_existing
config[:compile_parameters] ||= Smash.new
if config[:file]
s_name = [name]
c_setter = lambda do |c_stack|
if c_stack.outputs
compile_params = c_stack.outputs.detect do |output|
output.key == "CompileState"
end
end
if compile_params
compile_params = MultiJson.load(compile_params.value)
c_current = config[:compile_parameters].fetch(s_name.join("__"), Smash.new)
config[:compile_parameters][s_name.join("__")] = compile_params.merge(c_current)
end
c_stack.nested_stacks(false).each do |n_stack|
s_name.push(n_stack.data.fetch(:logical_id, n_stack.name))
c_setter.call(n_stack)
s_name.pop
end
end
if stack && stack.persisted?
c_setter.call(stack)
end
ui.debug "Compile parameters - #{config[:compile_parameters]}"
file = load_template_file(:stack => stack)
stack_info << " #{ui.color("Path:", :bold)} #{config[:file]}"
else
file = stack.template.dup
end
unless file
if config[:template]
file = config[:template]
stack_info << " #{ui.color("(template provided)", :green)}"
else
stack_info << " #{ui.color("(no template update)", :yellow)}"
end
end
unless config[:print_only]
ui.info " -> #{stack_info}"
end
if file
if config[:print_only]
ui.puts format_json(parameter_scrub!(template_content(file)))
return
end
original_parameters = stack.parameters
apply_stacks!(stack)
populate_parameters!(file, :current_parameters => stack.root_parameters)
stack.parameters = config_root_parameters
if config[:upload_root_template]
upload_result = store_template(name, file, Smash.new)
stack.template_url = upload_result[:url]
else
stack.template = parameter_scrub!(template_content(file, :scrub))
end
else
apply_stacks!(stack)
original_parameters = stack.parameters
populate_parameters!(stack.template, :current_parameters => stack.root_parameters)
stack.parameters = config_root_parameters
end
ui.info " -> Generating plan information..."
else
ui.info " -> Loading plan information..."
end
# Set options defined within config into stack instance for update request
if config[:merge_api_options]
config.fetch(:options, Smash.new).each_pair do |key, value|
if stack.respond_to?("#{key}=")
stack.send("#{key}=", value)
end
end
end
plan = stack.plan || stack.plan_generate
namespace.const_get(:Realize).
new(config, [name]).execute!
end
# Display plan list in table form
#
# @param [Miasma::Models::Orchestration::Stack]
def display_plan_lists(stack)
unless stack
raise "Failed to locate requested stack `#{name_args.first}`"
end
plans = stack.plans.all
if plans.empty?
ui.warn "No plans found for stack `#{stack.name}`"
return
end
ui.info "Plans for stack: #{ui.color(stack.name, :bold)}\n"
n_width = "Plan Name".length
i_width = "Plan ID".length
s_width = "Plan State".length
c_width = "Created".length
plan_info = plans.map do |plan|
plan_id = plan.id.to_s.split("/").last
n_width = plan.name.to_s.length if plan.name.to_s.length > n_width
i_width = plan_id.to_s.length if plan_id.length > i_width
s_width = plan.state.to_s.length if plan.state.to_s.length > s_width
c_width = plan.created_at.to_s.length if plan.created_at.to_s.length > c_width
[plan.name, plan_id, plan.state, plan.created_at]
end
table = ui.table(self) do
table(:border => false) do
row(:header => true) do
column "Plan Name", :width => n_width + 5
column "Plan ID", :width => i_width + 5
column "Plan State", :width => s_width + 5
column "Created", :width => c_width + 5
end
plan_info.sort_by(&:first).each do |plan|
row do
plan.each do |item|
column item
end
end
end
end
end.display
end
end
end
end
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
if Rails.root.join('tmp/caching-dev.txt').exist?
config.action_controller.perform_caching = true
config.cache_store = :memory_store
config.public_file_server.headers = {
'Cache-Control' => "public, max-age=#{2.days.seconds.to_i}"
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# Adds additional error checking when serving assets at runtime.
# Checks for improperly declared sprockets dependencies.
# Raises helpful error messages.
config.assets.raise_runtime_errors = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# better error and web_console is only working when you accessing from localhost
# if you running the development server on a remote machine use TRUSTED_IP
# for that take a look to the .env.bak and set the variable or run "TRUSTED_IP=192.168.1.1 forman start"
if ENV['TRUSTED_IP']
# to use better errors not only on localhost
BetterErrors::Middleware.allow_ip! ENV['TRUSTED_IP']
# use web_console not only on localhost
config.web_console.whitelisted_ips = ENV['TRUSTED_IP']
puts "=> Trusted IP #{ENV['TRUSTED_IP']}"
end
# Mailer configuration for inquiries/requests
config.action_mailer.perform_deliveries = false
puts "=> Auth Endpoint #{ENV['MONSOON_OPENSTACK_AUTH_API_ENDPOINT']}" if ENV['MONSOON_OPENSTACK_AUTH_API_ENDPOINT']
config.exceptions_app = ->(env) { ErrorsController.action(:show).call(env) }
config.log_tags = [ :uuid ]
config.middleware.use TaggedExceptionsMiddleware
end
remove better errors for trusted_ip
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
if Rails.root.join('tmp/caching-dev.txt').exist?
config.action_controller.perform_caching = true
config.cache_store = :memory_store
config.public_file_server.headers = {
'Cache-Control' => "public, max-age=#{2.days.seconds.to_i}"
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# Adds additional error checking when serving assets at runtime.
# Checks for improperly declared sprockets dependencies.
# Raises helpful error messages.
config.assets.raise_runtime_errors = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# web_console is only working when you accessing from localhost
# if you running the development server on a remote machine use TRUSTED_IP
# for that take a look to the .env.bak and set the variable or run "TRUSTED_IP=192.168.1.1 forman start"
if ENV['TRUSTED_IP']
# use web_console not only on localhost
config.web_console.whitelisted_ips = ENV['TRUSTED_IP']
puts "=> Trusted IP #{ENV['TRUSTED_IP']}"
end
# Mailer configuration for inquiries/requests
config.action_mailer.perform_deliveries = false
puts "=> Auth Endpoint #{ENV['MONSOON_OPENSTACK_AUTH_API_ENDPOINT']}" if ENV['MONSOON_OPENSTACK_AUTH_API_ENDPOINT']
config.exceptions_app = ->(env) { ErrorsController.action(:show).call(env) }
config.log_tags = [ :uuid ]
config.middleware.use TaggedExceptionsMiddleware
end
|
# encoding: UTF-8
module Signore class Database
def self.db
@db
end
def self.find opts = {}
opts = {:tags => [], :no_tags => []}.merge opts
@db
.select { |sig| opts[:tags].all? { |tag| sig.tagged_with? tag } }
.reject { |sig| opts[:no_tags].any? { |tag| sig.tagged_with? tag } }
.shuffle.first
end
def self.load path
@path = path
@db = File.exists?(@path) ? YAML.load_file(@path) : []
end
def self.min_yaml
[
'---',
@db.map do |sig|
yaml = ['- !ruby/struct:Signore::Signature']
[:text, :author, :subject, :source].map { |e| [e, sig[e]] }.select(&:last).each do |elem, string|
yaml << " :#{elem}: #{self.yamlify(string)}"
end
yaml << " :tags: [#{sig.tags.join ', '}]" if sig.tags
yaml
end,
].join("\n") + "\n"
end
def self.save sig
@db << sig
FileUtils.mkpath File.dirname @path
File.open(@path, 'w') { |file| file << self.min_yaml }
end
private
def self.yamlify string
case
when string.include?("\n") then "|-\n" + string.gsub(/^/, ' ')
when string.include?(': ') then "'#{string.gsub "'", "''"}'"
else string
end
end
end end
refactor Database.yamlify to use regex matching
# encoding: UTF-8
module Signore class Database
def self.db
@db
end
def self.find opts = {}
opts = {:tags => [], :no_tags => []}.merge opts
@db
.select { |sig| opts[:tags].all? { |tag| sig.tagged_with? tag } }
.reject { |sig| opts[:no_tags].any? { |tag| sig.tagged_with? tag } }
.shuffle.first
end
def self.load path
@path = path
@db = File.exists?(@path) ? YAML.load_file(@path) : []
end
def self.min_yaml
[
'---',
@db.map do |sig|
yaml = ['- !ruby/struct:Signore::Signature']
[:text, :author, :subject, :source].map { |e| [e, sig[e]] }.select(&:last).each do |elem, string|
yaml << " :#{elem}: #{self.yamlify(string)}"
end
yaml << " :tags: [#{sig.tags.join ', '}]" if sig.tags
yaml
end,
].join("\n") + "\n"
end
def self.save sig
@db << sig
FileUtils.mkpath File.dirname @path
File.open(@path, 'w') { |file| file << self.min_yaml }
end
private
def self.yamlify string
case string
when /\n/ then "|-\n" + string.gsub(/^/, ' ')
when /: / then "'#{string.gsub "'", "''"}'"
else string
end
end
end end
|
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = true
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Adds additional error checking when serving assets at runtime.
# Checks for improperly declared sprockets dependencies.
# Raises helpful error messages.
config.assets.raise_runtime_errors = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
end
hahaha no
[ci skip]
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Adds additional error checking when serving assets at runtime.
# Checks for improperly declared sprockets dependencies.
# Raises helpful error messages.
config.assets.raise_runtime_errors = true
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
end
|
module Simtick
class Envelope
def initialize(velocity:, attack_time:, sustain_time:0, release_time:0)
@velocity = velocity
@attack_time = attack_time
@sustain_time = sustain_time
@release_time = release_time
end
def level(ticker)
if ticker <= @attack_time
@velocity * (ticker / @attack_time.to_f)
elsif ticker <= @attack_time + @sustain_time
@velocity
else
t = ticker - @attack_time - @sustain_time
@release_time > 0 ? [0.0, @velocity * (1.0 - t / @release_time)].min : 0.0
end
end
def finished?(ticker)
ticker > @attack_time + @sustain_time + @release_time
end
end
end
Fix bug
module Simtick
class Envelope
def initialize(velocity:, attack_time:, sustain_time:0, release_time:0)
@velocity = velocity
@attack_time = attack_time
@sustain_time = sustain_time
@release_time = release_time
end
def level(ticker)
if ticker <= @attack_time
@velocity * (ticker / @attack_time.to_f)
elsif ticker <= @attack_time + @sustain_time
@velocity
else
t = ticker - @attack_time - @sustain_time
@release_time > 0 ? [0.0, @velocity * (1.0 - t / @release_time)].max : 0.0
end
end
def finished?(ticker)
ticker > @attack_time + @sustain_time + @release_time
end
end
end
|
# Settings specified here will take precedence over those in config/environment.rb
TypoBlog::Application.configure do
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the webserver when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_view.debug_rjs = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send
config.action_mailer.raise_delivery_errors = false
def log_to(stream)
ActiveRecord::Base.logger = Logger.new(stream)
ActiveRecord::Base.clear_active_connections!
end
config.log_level = :debug
end
Log deprecation warning in development
# Settings specified here will take precedence over those in config/environment.rb
TypoBlog::Application.configure do
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the webserver when you make code changes.
config.cache_classes = false
# Log error messages when you accidentally call methods on nil.
config.whiny_nils = true
# Show full error reports and disable caching
config.consider_all_requests_local = true
config.action_view.debug_rjs = true
config.action_controller.perform_caching = false
# Don't care if the mailer can't send
config.action_mailer.raise_delivery_errors = false
config.active_support.deprecation = :log
def log_to(stream)
ActiveRecord::Base.logger = Logger.new(stream)
ActiveRecord::Base.clear_active_connections!
end
config.log_level = :debug
end
|
require 'sinatra/json_rpc/version'
require 'sinatra/json_rpc/errors'
require 'sinatra/json_rpc/request'
require 'sinatra/json_rpc/response'
require 'sinatra/param'
require 'sinatra/base'
require 'multi_json'
require 'JSON'
module Sinatra
module JsonRpc
module Helpers
def send_error(code, data = nil)
resp = Sinatra::JsonRpc::Response.new
if @rpc_req
resp.id = @rpc_req.id unless @rpc_req.id.nil?
end
resp.error = code
resp.error.data = data unless data.nil?
resp.to_json
end
def send_result(result)
halt 204 if @rpc_req.id.nil? # JSON-RPC requests without an ID are notifications, requiring
resp = Sinatra::JsonRpc::Response.new(@rpc_req.id)
resp.result = result
resp.to_json
end
end
def self.registered(app)
app.helpers Sinatra::JsonRpc::Helpers, Sinatra::Param
# Create a Sinatra::JsonRpc::Request from request body
app.before do
raise Sinatra::JsonRpc::ParseError unless request.media_type == 'application/json'
@rpc_req = Sinatra::JsonRpc::Request.new.from_json(request.body.read)
@rpc_req.valid?
if @rpc_req.params
if @rpc_req.params.is_a?(Array)
@params[:splat] = *@rpc_req.params
else
@rpc_req.params.each { |k,v| params[k.to_sym] = v }
end
end
end
# Test whether or not the conditional route matches the JSON-RPC method contained in the request
app.set(:method) { |value| condition { @rpc_req.method == value } }
app.not_found do
status 400
send_error -32601
end
[ Sinatra::JsonRpc::ParseError, MultiJson::LoadError ].each do |err|
app.error err do
status 400
send_error -32700
end
end
app.error Sinatra::JsonRpc::RequestError do
status 400
send_error -32600
end
app.error Sinatra::JsonRpc::ResponseError do
status 500
send_error -32603
end
app.error 400 do
send_error -32602 if body.first.start_with?("Invalid parameter")
end
end
end
register JsonRpc
end
Fixed use of symbolized keys in params (should be string)
require 'sinatra/json_rpc/version'
require 'sinatra/json_rpc/errors'
require 'sinatra/json_rpc/request'
require 'sinatra/json_rpc/response'
require 'sinatra/param'
require 'sinatra/base'
require 'multi_json'
require 'JSON'
module Sinatra
module JsonRpc
module Helpers
def send_error(code, data = nil)
resp = Sinatra::JsonRpc::Response.new
if @rpc_req
resp.id = @rpc_req.id unless @rpc_req.id.nil?
end
resp.error = code
resp.error.data = data unless data.nil?
resp.to_json
end
def send_result(result)
halt 204 if @rpc_req.id.nil? # JSON-RPC requests without an ID are notifications, requiring
resp = Sinatra::JsonRpc::Response.new(@rpc_req.id)
resp.result = result
resp.to_json
end
end
def self.registered(app)
app.helpers Sinatra::JsonRpc::Helpers, Sinatra::Param
# Create a Sinatra::JsonRpc::Request from request body
app.before do
raise Sinatra::JsonRpc::ParseError unless request.media_type == 'application/json'
@rpc_req = Sinatra::JsonRpc::Request.new.from_json(request.body.read)
@rpc_req.valid?
if @rpc_req.params
if @rpc_req.params.is_a?(Array)
@params[:splat] = *@rpc_req.params
else
@rpc_req.params.each { |k,v| params[k] = v }
end
end
end
# Test whether or not the conditional route matches the JSON-RPC method contained in the request
app.set(:method) { |value| condition { @rpc_req.method == value } }
app.not_found do
status 400
send_error -32601
end
[ Sinatra::JsonRpc::ParseError, MultiJson::LoadError ].each do |err|
app.error err do
status 400
send_error -32700
end
end
app.error Sinatra::JsonRpc::RequestError do
status 400
send_error -32600
end
app.error Sinatra::JsonRpc::ResponseError do
status 500
send_error -32603
end
app.error 400 do
send_error -32602 if body.first.start_with?("Invalid parameter")
end
end
end
register JsonRpc
end |
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
config.action_mailer.default_url_options = { host: "localhost" }
# Enable/disable caching. By default caching is disabled.
if Rails.root.join("tmp/caching-dev.txt").exist?
config.action_controller.perform_caching = true
config.cache_store = :memory_store
config.public_file_server.headers = {
"Cache-Control" => "public, max-age=172800"
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Store files locally.
config.active_storage.service = :local
config.react.variant = :development
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
# Web-console wants us to only connect from localhost for safety, but in
# mac-docker we get separate ip addresses. So this grabs ALL our local ips
# and adds them to the OK list
require 'socket'
require 'ipaddr'
config.web_console.whitelisted_ips = Socket.ip_address_list.reduce([]) do |res, addrinfo|
addrinfo.ipv4? ? res << IPAddr.new(addrinfo.ip_address).mask(24) : res
end
end
Fix quote consistency for rubocop
I'm not entirely sure how this got into master without rubocop getting
mad beforehand ... must have been an order of operations thing.
Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
config.action_mailer.default_url_options = { host: "localhost" }
# Enable/disable caching. By default caching is disabled.
if Rails.root.join("tmp/caching-dev.txt").exist?
config.action_controller.perform_caching = true
config.cache_store = :memory_store
config.public_file_server.headers = {
"Cache-Control" => "public, max-age=172800"
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Store files locally.
config.active_storage.service = :local
config.react.variant = :development
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
# Web-console wants us to only connect from localhost for safety, but in
# mac-docker we get separate ip addresses. So this grabs ALL our local ips
# and adds them to the OK list
require "socket"
require "ipaddr"
config.web_console.whitelisted_ips = Socket.ip_address_list.reduce([]) do |res, addrinfo|
addrinfo.ipv4? ? res << IPAddr.new(addrinfo.ip_address).mask(24) : res
end
end
|
module Sisimai
# Sisimai::DateTime provide methods for dealing date and time.
module DateTime
# Imported from p5-Sisimail/lib/Sisimai/DateTime.pm
require 'date'
class << self
BASE_D = 86400 # 1 day = 86400 sec
BASE_Y = 365.2425 # 1 year = 365.2425 days
BASE_L = 29.53059 # 1 lunar month = 29.53059 days
CONST_P = 4 * Math.atan2(1, 1) # PI, 3.1415926535
CONST_E = Math.exp(1) # e, Napier's constant
TZ_OFFSET = 54000 # Max time zone offset, 54000 seconds
TimeUnit = {
o: (BASE_D * BASE_Y * 4), # Olympiad, 4 years
y: (BASE_D * BASE_Y), # Year, Gregorian Calendar
q: (BASE_D * BASE_Y / 4), # Quarter, year/4
l: (BASE_D * BASE_L), # Lunar month
f: (BASE_D * 14), # Fortnight, 2 weeks
w: (BASE_D * 7), # Week, 604800 seconds
d: BASE_D, # Day
h: 3600, # Hour
b: 86.4, # Beat, Swatch internet time: 1000b = 1d
m: 60, # Minute,
s: 1, # Second
}.freeze
MathematicalConstant = {
e: CONST_E,
p: CONST_P,
g: CONST_E**CONST_P,
}.freeze
MonthName = {
full: %w|January February March April May June July August September October November December|,
abbr: %w|Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec|,
}.freeze
DayOfWeek = {
full: %w|Sunday Monday Tuesday Wednesday Thursday Friday Saturday|,
abbr: %w|Sun Mon Tue Wed Thu Fri Sat |,
}.freeze
HourName = {
full: [
'Midnight', 1, 2, 3, 4, 5, 'Morning', 7, 8, 9, 10, 11,
'Noon', 13, 14, 15, 16, 17, 'Evening', 19, 20, 21, 22, 23,
],
abbr: [0..23],
}.freeze
TimeZoneAbbr = {
# http://en.wikipedia.org/wiki/List_of_time_zone_abbreviations
# ACDT: '+1030', # Australian Central Daylight Time UTC+10:30
# ACST: '+0930', # Australian Central Standard Time UTC+09:30
# ACT: '+0800', # ASEAN Common Time UTC+08:00
ADT: '-0300', # Atlantic Daylight Time UTC-03:00
# AEDT: '+1100', # Australian Eastern Daylight Time UTC+11:00
# AEST: '+1000', # Australian Eastern Standard Time UTC+10:00
# AFT: '+0430', # Afghanistan Time UTC+04:30
AKDT: '-0800', # Alaska Daylight Time UTC-08:00
AKST: '-0900', # Alaska Standard Time UTC-09:00
# AMST '+0500', # Armenia Summer Time UTC+05:00
# AMT '+0400', # Armenia Time UTC+04:00
# ART '-0300', # Argentina Time UTC+03:00
# AST '+0300', # Arab Standard Time (Kuwait, Riyadh) UTC+03:00
# AST '+0400', # Arabian Standard Time (Abu Dhabi, Muscat) UTC+04:00
# AST '+0300', # Arabic Standard Time (Baghdad) UTC+03:00
AST: '-0400', # Atlantic Standard Time UTC-04:00
# AWDT: '+0900', # Australian Western Daylight Time UTC+09:00
# AWST: '+0800', # Australian Western Standard Time UTC+08:00
# AZOST: '-0100', # Azores Standard Time UTC-01:00
# AZT: '+0400', # Azerbaijan Time UTC+04:00
# BDT: '+0800', # Brunei Time UTC+08:00
# BIOT: '+0600', # British Indian Ocean Time UTC+06:00
# BIT: '-1200', # Baker Island Time UTC-12:00
# BOT: '-0400', # Bolivia Time UTC-04:00
# BRT: '-0300', # Brasilia Time UTC-03:00
# BST: '+0600', # Bangladesh Standard Time UTC+06:00
# BST: '+0100', # British Summer Time (British Standard Time from Feb 1968 to Oct 1971) UTC+01:00
# BTT: '+0600', # Bhutan Time UTC+06:00
# CAT: '+0200', # Central Africa Time UTC+02:00
# CCT: '+0630', # Cocos Islands Time UTC+06:30
CDT: '-0500', # Central Daylight Time (North America) UTC-05:00
# CEDT: '+0200', # Central European Daylight Time UTC+02:00
# CEST: '+0200', # Central European Summer Time UTC+02:00
# CET: '+0100', # Central European Time UTC+01:00
# CHAST: '+1245', # Chatham Standard Time UTC+12:45
# CIST: '-0800', # Clipperton Island Standard Time UTC-08:00
# CKT: '-1000', # Cook Island Time UTC-10:00
# CLST '-0300', # Chile Summer Time UTC-03:00
# CLT '-0400', # Chile Standard Time UTC-04:00
# COST '-0400', # Colombia Summer Time UTC-04:00
# COT '-0500', # Colombia Time UTC-05:00
CST: '-0600', # Central Standard Time (North America) UTC-06:00
# CST: '+0800', # China Standard Time UTC+08:00
# CVT: '-0100', # Cape Verde Time UTC-01:00
# CXT: '+0700', # Christmas Island Time UTC+07:00
# ChST: '+1000', # Chamorro Standard Time UTC+10:00
# DST: '' # Daylight saving time Depending
# DFT: '+0100', # AIX specific equivalent of Central European Time UTC+01:00
# EAST: '-0600', # Easter Island Standard Time UTC-06:00
# EAT: '+0300', # East Africa Time UTC+03:00
# ECT: '-0400', # Eastern Caribbean Time (does not recognise DST) UTC-04:00
# ECT: '-0500', # Ecuador Time UTC-05:00
EDT: '-0400', # Eastern Daylight Time (North America) UTC-04:00
# EEDT: '+0300', # Eastern European Daylight Time UTC+03:00
# EEST: '+0300', # Eastern European Summer Time UTC+03:00
# EET: '+0200', # Eastern European Time UTC+02:00
EST: '+0500', # Eastern Standard Time (North America) UTC-05:00
# FJT: '+1200', # Fiji Time UTC+12:00
# FKST: '-0400', # Falkland Islands Standard Time UTC-04:00
# GALT: '-0600', # Galapagos Time UTC-06:00
# GET: '+0400', # Georgia Standard Time UTC+04:00
# GFT: '-0300', # French Guiana Time UTC-03:00
# GILT: '+1200', # Gilbert Island Time UTC+12:00
# GIT: '-0900', # Gambier Island Time UTC-09:00
GMT: '+0000', # Greenwich Mean Time UTC
# :GST '-0200', # South Georgia and the South Sandwich Islands UTC-02:00
# :GYT '-0400', # Guyana Time UTC-04:00
HADT: '-0900', # Hawaii-Aleutian Daylight Time UTC-09:00
HAST: '-1000', # Hawaii-Aleutian Standard Time UTC-10:00
# HKT: '+0800', # Hong Kong Time UTC+08:00
# HMT: '+0500', # Heard and McDonald Islands Time UTC+05:00
HST: '-1000', # Hawaii Standard Time UTC-10:00
# IRKT: '+0800', # Irkutsk Time UTC+08:00
# IRST: '+0330', # Iran Standard Time UTC+03:30
# IST: '+0530', # Indian Standard Time UTC+05:30
# IST: '+0100', # Irish Summer Time UTC+01:00
# IST: '+0200', # Israel Standard Time UTC+02:00
JST: '+0900', # Japan Standard Time UTC+09:00
# KRAT: '+0700', # Krasnoyarsk Time UTC+07:00
# KST: '+0900', # Korea Standard Time UTC+09:00
# LHST: '+1030', # Lord Howe Standard Time UTC+10:30
# LINT: '+1400', # Line Islands Time UTC+14:00
# MAGT: '+1100', # Magadan Time UTC+11:00
MDT: '-0600', # Mountain Daylight Time(North America) UTC-06:00
# MIT: '-0930', # Marquesas Islands Time UTC-09:30
# MSD: '+0400', # Moscow Summer Time UTC+04:00
# MSK: '+0300', # Moscow Standard Time UTC+03:00
# MST: '+0800', # Malaysian Standard Time UTC+08:00
MST: '-0700', # Mountain Standard Time(North America) UTC-07:00
# MST: '+0630', # Myanmar Standard Time UTC+06:30
# MUT: '+0400', # Mauritius Time UTC+04:00
# NDT: '-0230', # Newfoundland Daylight Time UTC-02:30
# NFT: '+1130', # Norfolk Time[1] UTC+11:30
# NPT: '+0545', # Nepal Time UTC+05:45
# NST: '-0330', # Newfoundland Standard Time UTC-03:30
# NT: '-0330', # Newfoundland Time UTC-03:30
# OMST: '+0600', # Omsk Time UTC+06:00
PDT: '-0700', # Pacific Daylight Time(North America) UTC-07:00
# PETT: '+1200', # Kamchatka Time UTC+12:00
# PHOT: '+1300', # Phoenix Island Time UTC+13:00
# PKT: '+0500', # Pakistan Standard Time UTC+05:00
PST: '-0800', # Pacific Standard Time (North America) UTC-08:00
# PST: '+0800', # Philippine Standard Time UTC+08:00
# RET: '+0400', # Reunion Time UTC+04:00
# SAMT: '+0400', # Samara Time UTC+04:00
# SAST: '+0200', # South African Standard Time UTC+02:00
# SBT: '+1100', # Solomon Islands Time UTC+11:00
# SCT: '+0400', # Seychelles Time UTC+04:00
# SLT: '+0530', # Sri Lanka Time UTC+05:30
# SST: '-1100', # Samoa Standard Time UTC-11:00
# SST: '+0800', # Singapore Standard Time UTC+08:00
# TAHT: '-1000', # Tahiti Time UTC-10:00
# THA: '+0700', # Thailand Standard Time UTC+07:00
UT: '-0000', # Coordinated Universal Time UTC
UTC: '-0000', # Coordinated Universal Time UTC
# UYST: '-0200', # Uruguay Summer Time UTC-02:00
# UYT: '-0300', # Uruguay Standard Time UTC-03:00
# VET: '-0430', # Venezuelan Standard Time UTC-04:30
# VLAT: '+1000', # Vladivostok Time UTC+10:00
# WAT: '+0100', # West Africa Time UTC+01:00
# WEDT: '+0100', # Western European Daylight Time UTC+01:00
# WEST: '+0100', # Western European Summer Time UTC+01:00
# WET: '-0000', # Western European Time UTC
# YAKT: '+0900', # Yakutsk Time UTC+09:00
# YEKT: '+0500', # Yekaterinburg Time UTC+05:00
}.freeze
# Convert to second
# @param [String] argv1 Digit and a unit of time
# @return [Integer] n: seconds
# 0: 0 or invalid unit of time
# @example Get the value of seconds
# to_second('1d') #=> 86400
# to_second('2h') #=> 7200
def to_second(argv1)
return 0 unless argv1.is_a?(::String)
getseconds = 0
unitoftime = TimeUnit.keys.join
mathconsts = MathematicalConstant.keys.join
if cr = argv1.match(/\A(\d+|\d+[.]\d+)([#{unitoftime}])?\z/)
# 1d, 1.5w
n = cr[1].to_f
u = cr[2] || 'd'
getseconds = n * TimeUnit[u.to_sym].to_f
elsif cr = argv1.match(/\A(\d+|\d+[.]\d+)?([#{mathconsts}])([#{unitoftime}])?\z/)
# 1pd, 1.5pw
n = cr[1].to_f || 1
n = 1 if n.to_i.zero?
m = MathematicalConstant[cr[2].to_sym].to_f
u = cr[3] || 'd'
getseconds = n * m * TimeUnit[u.to_sym].to_f
else
getseconds = 0
end
return getseconds
end
# Month name list
# @param [Integer] argv1 Require full name or not
# @return [Array, String] Month name list or month name
# @example Get the names of each month
# monthname() #=> [ 'Jan', 'Feb', ... ]
# monthname(1) #=> [ 'January', 'February', 'March', ... ]
def monthname(argv1 = 0)
value = argv1 > 0 ? :full : :abbr
return MonthName[value]
end
# List of day of week
# @param [Integer] argv1 Require full name
# @return [Array, String] List of day of week or day of week
# @example Get the names of each day of week
# dayofweek() #=> [ 'Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat' ]
# dayofweek(1) #=> [ 'Sunday', 'Monday', 'Tuesday', ... ]
def dayofweek(argv1 = 0)
value = argv1 > 0 ? :full : :abbr
return DayOfWeek[value]
end
# Hour name list
# @param [Integer] argv1 Require full name
# @return [Array, String] Month name
# @example Get the names of each hour
# hourname() #=> [ 0, 1, 2, ... 23 ]
# hourname(1) #=> [ 'Midnight', 1, 2, ... 'Morning', 7, ... 'Noon', ... 23 ]
def hourname(argv1 = 1)
value = argv1 > 0 ? :full : :abbr
return HourName[value]
end
# Convert from date offset to date string
# @param [Integer] argv1 Offset of
# @param [String] argv2 Delimiter character: default is '-'
# @return [String] Date string
# @example Get the value of n days before(today is 2015/11/04)
# o2d(1) #=> 2015-11-03
# o2d(2,'/') #=> 2015/11/02
def o2d(argv1 = 0, argv2 = '-')
piece = ::DateTime.now
return piece.strftime('%Y/%m/%d') unless argv1 =~ /\A[-]?\d+\z/
epoch = piece.to_time.to_i - argv1 * 86400
if epoch < 0
# Negative value
epoch = 0
elsif epoch >= 2**31
# See http://en.wikipedia.org/wiki/Year_2038_problem
epoch = 2**31 - 1
end
return Time.at(epoch).strftime('%Y' + argv2 + '%m' + argv2 + '%d')
end
# Parse date string; strptime() wrapper
# @param [String] argv1 Date string
# @return [String] Converted date string
# @see http://en.wikipedia.org/wiki/ISO_8601
# @see http://www.ietf.org/rfc/rfc3339.txt
# @example Parse date string and convert to generic format string
# parse("2015-11-03T23:34:45 Tue") #=> Tue, 3 Nov 2015 23:34:45 +0900
# parse("Tue, Nov 3 2015 2:2:2") #=> Tue, 3 Nov 2015 02:02:02 +0900
def parse(argv1)
return nil unless argv1.is_a?(::String)
return nil unless argv1.size > 0
datestring = argv1
datestring = datestring.sub(/[,](\d+)/, ', \1') # Thu,13 -> Thu, 13
datestring = datestring.sub(/(\d{1,2}),/, '\1') # Apr,29 -> Apr 29
timetokens = datestring.split(' ')
afternoon1 = 0 # (Integer) After noon flag
altervalue = {} # (Hash) To store alternative values
v = {
Y: nil, # (Integer) Year
M: nil, # (String) Month Abbr.
d: nil, # (Integer) Day
a: nil, # (String) Day of week, Abbr.
T: nil, # (String) Time
z: nil, # (Integer) Timezone offset
}
while p = timetokens.shift do
# Parse each piece of time
if p =~ /\A[A-Z][a-z]{2}[,]?\z/
# Day of week or Day of week; Thu, Apr, ...
p.chop if p.length == 4 # Thu, -> Thu
if DayOfWeek[:abbr].include?(p)
# Day of week; Mon, Thu, Sun,...
v[:a] = p
elsif MonthName[:abbr].include?(p)
# Month name abbr.; Apr, May, ...
v[:M] = p
end
elsif p =~ /\A\d{1,4}\z/
# Year or Day; 2005, 31, 04, 1, ...
if p.to_i > 31
# The piece is the value of an year
v[:Y] = p
else
# The piece is the value of a day
if v[:d]
# 2-digit year?
altervalue[:Y] = p unless v[:Y]
else
# The value is "day"
v[:d] = p
end
end
elsif cr = p.match(/\A([0-2]\d):([0-5]\d):([0-5]\d)\z/) ||
p.match(/\A(\d{1,2})[-:](\d{1,2})[-:](\d{1,2})\z/)
# Time; 12:34:56, 03:14:15, ...
# Arrival-Date: 2014-03-26 00-01-19
if cr[1].to_i < 24 && cr[2].to_i < 60 && cr[3].to_i < 60
# Valid time format, maybe...
v[:T] = sprintf('%02d:%02d:%02d', cr[1].to_i, cr[2].to_i, cr[3].to_i)
end
elsif cr = p.match(/\A([0-2]\d):([0-5]\d)\z/)
# Time; 12:34 => 12:34:00
if cr[1].to_i < 24 && cr[2].to_i < 60
v[:T] = sprintf('%02d:%02d:00', cr[1], cr[2])
end
elsif cr = p.match(/\A(\d\d?):(\d\d?)\z/)
# Time: 1:4 => 01:04:00
v[:T] = sprintf('%02d:%02d:00', cr[1], cr[2])
elsif p =~ /\A[APap][Mm]\z/
# AM or PM
afternoon1 = 1
else
# Timezone offset and others
if p =~ /\A[-+][01]\d{3}\z/
# Timezone offset; +0000, +0900, -1000, ...
v[:z] ||= p
elsif p =~ /\A[(]?[A-Z]{2,5}[)]?\z/
# Timezone abbreviation; JST, GMT, UTC, ...
v[:z] ||= abbr2tz(p) || '+0000'
else
# Other date format
if cr = p.match(%r|\A(\d{4})[-/](\d{1,2})[-/](\d{1,2})\z|)
# Mail.app(MacOS X)'s faked Bounce, Arrival-Date: 2010-06-18 17:17:52 +0900
v[:Y] = cr[1].to_i
v[:M] = MonthName[:abbr][cr[2].to_i - 1]
v[:d] = cr[3].to_i
elsif cr = p.match(%r|\A(\d{4})[-/](\d{1,2})[-/](\d{1,2})T([0-2]\d):([0-5]\d):([0-5]\d)\z|)
# ISO 8601; 2000-04-29T01:23:45
v[:Y] = cr[1].to_i
v[:M] = MonthName[:abbr][cr[2].to_i - 1]
v[:d] = cr[3].to_i if cr[3].to_i < 32
if cr[4].to_i < 24 && cr[5].to_i < 60 && cr[6].to_i < 60
v[:T] = sprintf('%02d:%02d:%02d', cr[4], cr[5], cr[6])
end
elsif cr = p.match(%r|\A(\d{1,2})/(\d{1,2})/(\d{1,2})\z|)
# 4/29/01 11:34:45 PM
v[:M] = MonthName[:abbr][cr[1].to_i - 1]
v[:d] = cr[2].to_i
v[:Y] = cr[3].to_i + 2000
v[:Y] -= 100 if v[:Y].to_i > ::DateTime.now.year + 1
end
end
end
end # End of while()
if v[:T] && afternoon1 > 0
# +12
t0 = v[:T]
t1 = v[:T].split(':')
v[:T] = sprintf('%02d:%02d:%02d', t1[0].to_i + 12, t1[1], t1[2])
v[:T] = t0 if t1[0].to_i > 12
end
v[:a] ||= 'Thu' # There is no day of week
if !v[:Y].nil? && v[:Y].to_i < 200
# 99 -> 1999, 102 -> 2002
v[:Y] = v[:Y].to_i + 1900
end
v[:z] ||= ::DateTime.now.zone.delete(':')
# Adjust 2-digit Year
if altervalue[:Y] && !v[:Y]
# Check alternative value(Year)
v[:Y] ||= if altervalue[:Y].to_i >= 82
# SMTP was born in 1982
1900 + altervalue[:Y].to_i
else
# 20XX
2000 + altervalue[:Y].to_i
end
end
# Check each piece
if v.value?(nil)
# Strange date format
warn sprintf(' ***warning: Strange date format [%s]', datestring)
return nil
end
if v[:Y].to_i < 1902 || v[:Y].to_i > 2037
# -(2^31) ~ (2^31)
return nil
end
# Build date string
# Thu, 29 Apr 2004 10:01:11 +0900
return sprintf('%s, %s %s %s %s %s', v[:a], v[:d], v[:M], v[:Y], v[:T], v[:z])
end
# Abbreviation -> Tiemzone
# @param [String] argv1 Abbr. e.g.) JST, GMT, PDT
# @return [String, Undef] +0900, +0000, -0600 or Undef if the argument is
# invalid format or not supported abbreviation
# @example Get the timezone string of "JST"
# abbr2tz('JST') #=> '+0900'
def abbr2tz(argv1)
return nil unless argv1.is_a?(::String)
return TimeZoneAbbr[argv1.to_sym]
end
# Convert to second
# @param [String] argv1 Timezone string e.g) +0900
# @return [Integer,Undef] n: seconds or Undef it the argument is invalid
# format string
# @see second2tz
# @example Convert '+0900' to seconds
# tz2second('+0900') #=> 32400
def tz2second(argv1)
return nil unless argv1.is_a?(::String)
ztime = 0
if cr = argv1.match(/\A([-+])(\d)(\d)(\d{2})\z/)
digit = {
:'operator' => cr[1],
:'hour-10' => cr[2].to_i,
:'hour-01' => cr[3].to_i,
:'minutes' => cr[4].to_i,
}
ztime += (digit[:'hour-10'] * 10 + digit[:'hour-01']) * 3600
ztime += (digit[:'minutes'] * 60)
ztime *= -1 if digit[:'operator'] == '-'
return nil if ztime.abs > TZ_OFFSET
return ztime
elsif argv1 =~ /\A[A-Za-z]+\z/
return tz2second(TimeZoneAbbr[argv1.to_sym])
else
return nil
end
end
# Convert to Timezone string
# @param [Integer] argv1 Second to be converted
# @return [String] Timezone offset string
# @see tz2second
# @example Get timezone offset string of specified seconds
# second2tz(12345) #=> '+0325'
def second2tz(argv1)
return '+0000' unless argv1.is_a?(Number)
digit = { :operator => '+' }
return '' if argv1.abs > TZ_OFFSET # UTC+14 + 1(DST?)
digit[:operator] = '-' if argv1 < 0
digit[:hours] = (argv1.abs / 3600).to_i
digit[:minutes] = ((argv1.abs % 3600) / 60).to_i
timez = sprintf('%s%02d%02d', digit[:operator], digit[:hours], digit[:minutes])
return timez
end
end
end
end
Remove space character: Style/SpaceInsidePercentLiteralDelimiters
module Sisimai
# Sisimai::DateTime provide methods for dealing date and time.
module DateTime
# Imported from p5-Sisimail/lib/Sisimai/DateTime.pm
require 'date'
class << self
BASE_D = 86400 # 1 day = 86400 sec
BASE_Y = 365.2425 # 1 year = 365.2425 days
BASE_L = 29.53059 # 1 lunar month = 29.53059 days
CONST_P = 4 * Math.atan2(1, 1) # PI, 3.1415926535
CONST_E = Math.exp(1) # e, Napier's constant
TZ_OFFSET = 54000 # Max time zone offset, 54000 seconds
TimeUnit = {
o: (BASE_D * BASE_Y * 4), # Olympiad, 4 years
y: (BASE_D * BASE_Y), # Year, Gregorian Calendar
q: (BASE_D * BASE_Y / 4), # Quarter, year/4
l: (BASE_D * BASE_L), # Lunar month
f: (BASE_D * 14), # Fortnight, 2 weeks
w: (BASE_D * 7), # Week, 604800 seconds
d: BASE_D, # Day
h: 3600, # Hour
b: 86.4, # Beat, Swatch internet time: 1000b = 1d
m: 60, # Minute,
s: 1, # Second
}.freeze
MathematicalConstant = {
e: CONST_E,
p: CONST_P,
g: CONST_E**CONST_P,
}.freeze
MonthName = {
full: %w|January February March April May June July August September October November December|,
abbr: %w|Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec|,
}.freeze
DayOfWeek = {
full: %w|Sunday Monday Tuesday Wednesday Thursday Friday Saturday|,
abbr: %w|Sun Mon Tue Wed Thu Fri Sat|,
}.freeze
HourName = {
full: [
'Midnight', 1, 2, 3, 4, 5, 'Morning', 7, 8, 9, 10, 11,
'Noon', 13, 14, 15, 16, 17, 'Evening', 19, 20, 21, 22, 23,
],
abbr: [0..23],
}.freeze
TimeZoneAbbr = {
# http://en.wikipedia.org/wiki/List_of_time_zone_abbreviations
# ACDT: '+1030', # Australian Central Daylight Time UTC+10:30
# ACST: '+0930', # Australian Central Standard Time UTC+09:30
# ACT: '+0800', # ASEAN Common Time UTC+08:00
ADT: '-0300', # Atlantic Daylight Time UTC-03:00
# AEDT: '+1100', # Australian Eastern Daylight Time UTC+11:00
# AEST: '+1000', # Australian Eastern Standard Time UTC+10:00
# AFT: '+0430', # Afghanistan Time UTC+04:30
AKDT: '-0800', # Alaska Daylight Time UTC-08:00
AKST: '-0900', # Alaska Standard Time UTC-09:00
# AMST '+0500', # Armenia Summer Time UTC+05:00
# AMT '+0400', # Armenia Time UTC+04:00
# ART '-0300', # Argentina Time UTC+03:00
# AST '+0300', # Arab Standard Time (Kuwait, Riyadh) UTC+03:00
# AST '+0400', # Arabian Standard Time (Abu Dhabi, Muscat) UTC+04:00
# AST '+0300', # Arabic Standard Time (Baghdad) UTC+03:00
AST: '-0400', # Atlantic Standard Time UTC-04:00
# AWDT: '+0900', # Australian Western Daylight Time UTC+09:00
# AWST: '+0800', # Australian Western Standard Time UTC+08:00
# AZOST: '-0100', # Azores Standard Time UTC-01:00
# AZT: '+0400', # Azerbaijan Time UTC+04:00
# BDT: '+0800', # Brunei Time UTC+08:00
# BIOT: '+0600', # British Indian Ocean Time UTC+06:00
# BIT: '-1200', # Baker Island Time UTC-12:00
# BOT: '-0400', # Bolivia Time UTC-04:00
# BRT: '-0300', # Brasilia Time UTC-03:00
# BST: '+0600', # Bangladesh Standard Time UTC+06:00
# BST: '+0100', # British Summer Time (British Standard Time from Feb 1968 to Oct 1971) UTC+01:00
# BTT: '+0600', # Bhutan Time UTC+06:00
# CAT: '+0200', # Central Africa Time UTC+02:00
# CCT: '+0630', # Cocos Islands Time UTC+06:30
CDT: '-0500', # Central Daylight Time (North America) UTC-05:00
# CEDT: '+0200', # Central European Daylight Time UTC+02:00
# CEST: '+0200', # Central European Summer Time UTC+02:00
# CET: '+0100', # Central European Time UTC+01:00
# CHAST: '+1245', # Chatham Standard Time UTC+12:45
# CIST: '-0800', # Clipperton Island Standard Time UTC-08:00
# CKT: '-1000', # Cook Island Time UTC-10:00
# CLST '-0300', # Chile Summer Time UTC-03:00
# CLT '-0400', # Chile Standard Time UTC-04:00
# COST '-0400', # Colombia Summer Time UTC-04:00
# COT '-0500', # Colombia Time UTC-05:00
CST: '-0600', # Central Standard Time (North America) UTC-06:00
# CST: '+0800', # China Standard Time UTC+08:00
# CVT: '-0100', # Cape Verde Time UTC-01:00
# CXT: '+0700', # Christmas Island Time UTC+07:00
# ChST: '+1000', # Chamorro Standard Time UTC+10:00
# DST: '' # Daylight saving time Depending
# DFT: '+0100', # AIX specific equivalent of Central European Time UTC+01:00
# EAST: '-0600', # Easter Island Standard Time UTC-06:00
# EAT: '+0300', # East Africa Time UTC+03:00
# ECT: '-0400', # Eastern Caribbean Time (does not recognise DST) UTC-04:00
# ECT: '-0500', # Ecuador Time UTC-05:00
EDT: '-0400', # Eastern Daylight Time (North America) UTC-04:00
# EEDT: '+0300', # Eastern European Daylight Time UTC+03:00
# EEST: '+0300', # Eastern European Summer Time UTC+03:00
# EET: '+0200', # Eastern European Time UTC+02:00
EST: '+0500', # Eastern Standard Time (North America) UTC-05:00
# FJT: '+1200', # Fiji Time UTC+12:00
# FKST: '-0400', # Falkland Islands Standard Time UTC-04:00
# GALT: '-0600', # Galapagos Time UTC-06:00
# GET: '+0400', # Georgia Standard Time UTC+04:00
# GFT: '-0300', # French Guiana Time UTC-03:00
# GILT: '+1200', # Gilbert Island Time UTC+12:00
# GIT: '-0900', # Gambier Island Time UTC-09:00
GMT: '+0000', # Greenwich Mean Time UTC
# :GST '-0200', # South Georgia and the South Sandwich Islands UTC-02:00
# :GYT '-0400', # Guyana Time UTC-04:00
HADT: '-0900', # Hawaii-Aleutian Daylight Time UTC-09:00
HAST: '-1000', # Hawaii-Aleutian Standard Time UTC-10:00
# HKT: '+0800', # Hong Kong Time UTC+08:00
# HMT: '+0500', # Heard and McDonald Islands Time UTC+05:00
HST: '-1000', # Hawaii Standard Time UTC-10:00
# IRKT: '+0800', # Irkutsk Time UTC+08:00
# IRST: '+0330', # Iran Standard Time UTC+03:30
# IST: '+0530', # Indian Standard Time UTC+05:30
# IST: '+0100', # Irish Summer Time UTC+01:00
# IST: '+0200', # Israel Standard Time UTC+02:00
JST: '+0900', # Japan Standard Time UTC+09:00
# KRAT: '+0700', # Krasnoyarsk Time UTC+07:00
# KST: '+0900', # Korea Standard Time UTC+09:00
# LHST: '+1030', # Lord Howe Standard Time UTC+10:30
# LINT: '+1400', # Line Islands Time UTC+14:00
# MAGT: '+1100', # Magadan Time UTC+11:00
MDT: '-0600', # Mountain Daylight Time(North America) UTC-06:00
# MIT: '-0930', # Marquesas Islands Time UTC-09:30
# MSD: '+0400', # Moscow Summer Time UTC+04:00
# MSK: '+0300', # Moscow Standard Time UTC+03:00
# MST: '+0800', # Malaysian Standard Time UTC+08:00
MST: '-0700', # Mountain Standard Time(North America) UTC-07:00
# MST: '+0630', # Myanmar Standard Time UTC+06:30
# MUT: '+0400', # Mauritius Time UTC+04:00
# NDT: '-0230', # Newfoundland Daylight Time UTC-02:30
# NFT: '+1130', # Norfolk Time[1] UTC+11:30
# NPT: '+0545', # Nepal Time UTC+05:45
# NST: '-0330', # Newfoundland Standard Time UTC-03:30
# NT: '-0330', # Newfoundland Time UTC-03:30
# OMST: '+0600', # Omsk Time UTC+06:00
PDT: '-0700', # Pacific Daylight Time(North America) UTC-07:00
# PETT: '+1200', # Kamchatka Time UTC+12:00
# PHOT: '+1300', # Phoenix Island Time UTC+13:00
# PKT: '+0500', # Pakistan Standard Time UTC+05:00
PST: '-0800', # Pacific Standard Time (North America) UTC-08:00
# PST: '+0800', # Philippine Standard Time UTC+08:00
# RET: '+0400', # Reunion Time UTC+04:00
# SAMT: '+0400', # Samara Time UTC+04:00
# SAST: '+0200', # South African Standard Time UTC+02:00
# SBT: '+1100', # Solomon Islands Time UTC+11:00
# SCT: '+0400', # Seychelles Time UTC+04:00
# SLT: '+0530', # Sri Lanka Time UTC+05:30
# SST: '-1100', # Samoa Standard Time UTC-11:00
# SST: '+0800', # Singapore Standard Time UTC+08:00
# TAHT: '-1000', # Tahiti Time UTC-10:00
# THA: '+0700', # Thailand Standard Time UTC+07:00
UT: '-0000', # Coordinated Universal Time UTC
UTC: '-0000', # Coordinated Universal Time UTC
# UYST: '-0200', # Uruguay Summer Time UTC-02:00
# UYT: '-0300', # Uruguay Standard Time UTC-03:00
# VET: '-0430', # Venezuelan Standard Time UTC-04:30
# VLAT: '+1000', # Vladivostok Time UTC+10:00
# WAT: '+0100', # West Africa Time UTC+01:00
# WEDT: '+0100', # Western European Daylight Time UTC+01:00
# WEST: '+0100', # Western European Summer Time UTC+01:00
# WET: '-0000', # Western European Time UTC
# YAKT: '+0900', # Yakutsk Time UTC+09:00
# YEKT: '+0500', # Yekaterinburg Time UTC+05:00
}.freeze
# Convert to second
# @param [String] argv1 Digit and a unit of time
# @return [Integer] n: seconds
# 0: 0 or invalid unit of time
# @example Get the value of seconds
# to_second('1d') #=> 86400
# to_second('2h') #=> 7200
def to_second(argv1)
return 0 unless argv1.is_a?(::String)
getseconds = 0
unitoftime = TimeUnit.keys.join
mathconsts = MathematicalConstant.keys.join
if cr = argv1.match(/\A(\d+|\d+[.]\d+)([#{unitoftime}])?\z/)
# 1d, 1.5w
n = cr[1].to_f
u = cr[2] || 'd'
getseconds = n * TimeUnit[u.to_sym].to_f
elsif cr = argv1.match(/\A(\d+|\d+[.]\d+)?([#{mathconsts}])([#{unitoftime}])?\z/)
# 1pd, 1.5pw
n = cr[1].to_f || 1
n = 1 if n.to_i.zero?
m = MathematicalConstant[cr[2].to_sym].to_f
u = cr[3] || 'd'
getseconds = n * m * TimeUnit[u.to_sym].to_f
else
getseconds = 0
end
return getseconds
end
# Month name list
# @param [Integer] argv1 Require full name or not
# @return [Array, String] Month name list or month name
# @example Get the names of each month
# monthname() #=> [ 'Jan', 'Feb', ... ]
# monthname(1) #=> [ 'January', 'February', 'March', ... ]
def monthname(argv1 = 0)
value = argv1 > 0 ? :full : :abbr
return MonthName[value]
end
# List of day of week
# @param [Integer] argv1 Require full name
# @return [Array, String] List of day of week or day of week
# @example Get the names of each day of week
# dayofweek() #=> [ 'Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat' ]
# dayofweek(1) #=> [ 'Sunday', 'Monday', 'Tuesday', ... ]
def dayofweek(argv1 = 0)
value = argv1 > 0 ? :full : :abbr
return DayOfWeek[value]
end
# Hour name list
# @param [Integer] argv1 Require full name
# @return [Array, String] Month name
# @example Get the names of each hour
# hourname() #=> [ 0, 1, 2, ... 23 ]
# hourname(1) #=> [ 'Midnight', 1, 2, ... 'Morning', 7, ... 'Noon', ... 23 ]
def hourname(argv1 = 1)
value = argv1 > 0 ? :full : :abbr
return HourName[value]
end
# Convert from date offset to date string
# @param [Integer] argv1 Offset of
# @param [String] argv2 Delimiter character: default is '-'
# @return [String] Date string
# @example Get the value of n days before(today is 2015/11/04)
# o2d(1) #=> 2015-11-03
# o2d(2,'/') #=> 2015/11/02
def o2d(argv1 = 0, argv2 = '-')
piece = ::DateTime.now
return piece.strftime('%Y/%m/%d') unless argv1 =~ /\A[-]?\d+\z/
epoch = piece.to_time.to_i - argv1 * 86400
if epoch < 0
# Negative value
epoch = 0
elsif epoch >= 2**31
# See http://en.wikipedia.org/wiki/Year_2038_problem
epoch = 2**31 - 1
end
return Time.at(epoch).strftime('%Y' + argv2 + '%m' + argv2 + '%d')
end
# Parse date string; strptime() wrapper
# @param [String] argv1 Date string
# @return [String] Converted date string
# @see http://en.wikipedia.org/wiki/ISO_8601
# @see http://www.ietf.org/rfc/rfc3339.txt
# @example Parse date string and convert to generic format string
# parse("2015-11-03T23:34:45 Tue") #=> Tue, 3 Nov 2015 23:34:45 +0900
# parse("Tue, Nov 3 2015 2:2:2") #=> Tue, 3 Nov 2015 02:02:02 +0900
def parse(argv1)
return nil unless argv1.is_a?(::String)
return nil unless argv1.size > 0
datestring = argv1
datestring = datestring.sub(/[,](\d+)/, ', \1') # Thu,13 -> Thu, 13
datestring = datestring.sub(/(\d{1,2}),/, '\1') # Apr,29 -> Apr 29
timetokens = datestring.split(' ')
afternoon1 = 0 # (Integer) After noon flag
altervalue = {} # (Hash) To store alternative values
v = {
Y: nil, # (Integer) Year
M: nil, # (String) Month Abbr.
d: nil, # (Integer) Day
a: nil, # (String) Day of week, Abbr.
T: nil, # (String) Time
z: nil, # (Integer) Timezone offset
}
while p = timetokens.shift do
# Parse each piece of time
if p =~ /\A[A-Z][a-z]{2}[,]?\z/
# Day of week or Day of week; Thu, Apr, ...
p.chop if p.length == 4 # Thu, -> Thu
if DayOfWeek[:abbr].include?(p)
# Day of week; Mon, Thu, Sun,...
v[:a] = p
elsif MonthName[:abbr].include?(p)
# Month name abbr.; Apr, May, ...
v[:M] = p
end
elsif p =~ /\A\d{1,4}\z/
# Year or Day; 2005, 31, 04, 1, ...
if p.to_i > 31
# The piece is the value of an year
v[:Y] = p
else
# The piece is the value of a day
if v[:d]
# 2-digit year?
altervalue[:Y] = p unless v[:Y]
else
# The value is "day"
v[:d] = p
end
end
elsif cr = p.match(/\A([0-2]\d):([0-5]\d):([0-5]\d)\z/) ||
p.match(/\A(\d{1,2})[-:](\d{1,2})[-:](\d{1,2})\z/)
# Time; 12:34:56, 03:14:15, ...
# Arrival-Date: 2014-03-26 00-01-19
if cr[1].to_i < 24 && cr[2].to_i < 60 && cr[3].to_i < 60
# Valid time format, maybe...
v[:T] = sprintf('%02d:%02d:%02d', cr[1].to_i, cr[2].to_i, cr[3].to_i)
end
elsif cr = p.match(/\A([0-2]\d):([0-5]\d)\z/)
# Time; 12:34 => 12:34:00
if cr[1].to_i < 24 && cr[2].to_i < 60
v[:T] = sprintf('%02d:%02d:00', cr[1], cr[2])
end
elsif cr = p.match(/\A(\d\d?):(\d\d?)\z/)
# Time: 1:4 => 01:04:00
v[:T] = sprintf('%02d:%02d:00', cr[1], cr[2])
elsif p =~ /\A[APap][Mm]\z/
# AM or PM
afternoon1 = 1
else
# Timezone offset and others
if p =~ /\A[-+][01]\d{3}\z/
# Timezone offset; +0000, +0900, -1000, ...
v[:z] ||= p
elsif p =~ /\A[(]?[A-Z]{2,5}[)]?\z/
# Timezone abbreviation; JST, GMT, UTC, ...
v[:z] ||= abbr2tz(p) || '+0000'
else
# Other date format
if cr = p.match(%r|\A(\d{4})[-/](\d{1,2})[-/](\d{1,2})\z|)
# Mail.app(MacOS X)'s faked Bounce, Arrival-Date: 2010-06-18 17:17:52 +0900
v[:Y] = cr[1].to_i
v[:M] = MonthName[:abbr][cr[2].to_i - 1]
v[:d] = cr[3].to_i
elsif cr = p.match(%r|\A(\d{4})[-/](\d{1,2})[-/](\d{1,2})T([0-2]\d):([0-5]\d):([0-5]\d)\z|)
# ISO 8601; 2000-04-29T01:23:45
v[:Y] = cr[1].to_i
v[:M] = MonthName[:abbr][cr[2].to_i - 1]
v[:d] = cr[3].to_i if cr[3].to_i < 32
if cr[4].to_i < 24 && cr[5].to_i < 60 && cr[6].to_i < 60
v[:T] = sprintf('%02d:%02d:%02d', cr[4], cr[5], cr[6])
end
elsif cr = p.match(%r|\A(\d{1,2})/(\d{1,2})/(\d{1,2})\z|)
# 4/29/01 11:34:45 PM
v[:M] = MonthName[:abbr][cr[1].to_i - 1]
v[:d] = cr[2].to_i
v[:Y] = cr[3].to_i + 2000
v[:Y] -= 100 if v[:Y].to_i > ::DateTime.now.year + 1
end
end
end
end # End of while()
if v[:T] && afternoon1 > 0
# +12
t0 = v[:T]
t1 = v[:T].split(':')
v[:T] = sprintf('%02d:%02d:%02d', t1[0].to_i + 12, t1[1], t1[2])
v[:T] = t0 if t1[0].to_i > 12
end
v[:a] ||= 'Thu' # There is no day of week
if !v[:Y].nil? && v[:Y].to_i < 200
# 99 -> 1999, 102 -> 2002
v[:Y] = v[:Y].to_i + 1900
end
v[:z] ||= ::DateTime.now.zone.delete(':')
# Adjust 2-digit Year
if altervalue[:Y] && !v[:Y]
# Check alternative value(Year)
v[:Y] ||= if altervalue[:Y].to_i >= 82
# SMTP was born in 1982
1900 + altervalue[:Y].to_i
else
# 20XX
2000 + altervalue[:Y].to_i
end
end
# Check each piece
if v.value?(nil)
# Strange date format
warn sprintf(' ***warning: Strange date format [%s]', datestring)
return nil
end
if v[:Y].to_i < 1902 || v[:Y].to_i > 2037
# -(2^31) ~ (2^31)
return nil
end
# Build date string
# Thu, 29 Apr 2004 10:01:11 +0900
return sprintf('%s, %s %s %s %s %s', v[:a], v[:d], v[:M], v[:Y], v[:T], v[:z])
end
# Abbreviation -> Tiemzone
# @param [String] argv1 Abbr. e.g.) JST, GMT, PDT
# @return [String, Undef] +0900, +0000, -0600 or Undef if the argument is
# invalid format or not supported abbreviation
# @example Get the timezone string of "JST"
# abbr2tz('JST') #=> '+0900'
def abbr2tz(argv1)
return nil unless argv1.is_a?(::String)
return TimeZoneAbbr[argv1.to_sym]
end
# Convert to second
# @param [String] argv1 Timezone string e.g) +0900
# @return [Integer,Undef] n: seconds or Undef it the argument is invalid
# format string
# @see second2tz
# @example Convert '+0900' to seconds
# tz2second('+0900') #=> 32400
def tz2second(argv1)
return nil unless argv1.is_a?(::String)
ztime = 0
if cr = argv1.match(/\A([-+])(\d)(\d)(\d{2})\z/)
digit = {
:'operator' => cr[1],
:'hour-10' => cr[2].to_i,
:'hour-01' => cr[3].to_i,
:'minutes' => cr[4].to_i,
}
ztime += (digit[:'hour-10'] * 10 + digit[:'hour-01']) * 3600
ztime += (digit[:'minutes'] * 60)
ztime *= -1 if digit[:'operator'] == '-'
return nil if ztime.abs > TZ_OFFSET
return ztime
elsif argv1 =~ /\A[A-Za-z]+\z/
return tz2second(TimeZoneAbbr[argv1.to_sym])
else
return nil
end
end
# Convert to Timezone string
# @param [Integer] argv1 Second to be converted
# @return [String] Timezone offset string
# @see tz2second
# @example Get timezone offset string of specified seconds
# second2tz(12345) #=> '+0325'
def second2tz(argv1)
return '+0000' unless argv1.is_a?(Number)
digit = { :operator => '+' }
return '' if argv1.abs > TZ_OFFSET # UTC+14 + 1(DST?)
digit[:operator] = '-' if argv1 < 0
digit[:hours] = (argv1.abs / 3600).to_i
digit[:minutes] = ((argv1.abs % 3600) / 60).to_i
timez = sprintf('%s%02d%02d', digit[:operator], digit[:hours], digit[:minutes])
return timez
end
end
end
end
|
Tahi::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both thread web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
# config.serve_static_assets = false
# Compress JavaScripts and CSS.
# config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
# config.assets.compile = false
# Generate digests for assets URLs.
# config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
# config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
# config.assets.precompile += %w( search.js )
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
config.action_mailer.default_url_options = { host: 'tahi-staging.herokuapp.com' }
ActionMailer::Base.smtp_settings = {
address: 'smtp.sendgrid.net',
port: '587',
authentication: :plain,
user_name: ENV['SENDGRID_USERNAME'],
password: ENV['SENDGRID_PASSWORD'],
domain: 'heroku.com',
enable_starttls_auto: true
}
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
config.s3_bucket = 'tahi-performance'
config.carrierwave_storage = :fog
end
Mirror performance environment with staging
Tahi::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both thread web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_assets = false
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
# config.assets.precompile += %w( search.js )
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
config.action_mailer.default_url_options = { host: 'tahi-performance.herokuapp.com' }
ActionMailer::Base.smtp_settings = {
address: 'smtp.sendgrid.net',
port: '587',
authentication: :plain,
user_name: ENV['SENDGRID_USERNAME'],
password: ENV['SENDGRID_PASSWORD'],
domain: 'heroku.com',
enable_starttls_auto: true
}
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
config.s3_bucket = 'tahi-performance'
config.carrierwave_storage = :fog
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.