CombinedText stringlengths 4 3.42M |
|---|
Themenap::Config.configure do |c|
c.server = 'https://test.ada.edu.au'
c.use_basic_auth = true
c.snippets =
[ { :css => 'title',
:text => '<%= yield :title %>' },
{ :css => 'head',
:text => '<%= render "layouts/css_includes" %>',
:mode => :append },
{ :css => 'body',
:mode => :setattr, :key => 'class', :value => 'social_science' },
{ :css => 'html',
:text => '<%= render "layouts/js_includes" %>',
:mode => :append },
{ :css => 'article',
:text => '<%= render "layouts/body" %>' },
{ :css => 'nav.subnav',
:text => '<%= render "layouts/links" %>' },
{ :css => '.login',
:text => '<%= render "layouts/login" %>' } ]
c.layout_name = 'ada'
end
Do not insist on https for themenap.
Themenap::Config.configure do |c|
c.server = 'http://test.ada.edu.au'
c.use_basic_auth = true
c.snippets =
[ { :css => 'title',
:text => '<%= yield :title %>' },
{ :css => 'head',
:text => '<%= render "layouts/css_includes" %>',
:mode => :append },
{ :css => 'body',
:mode => :setattr, :key => 'class', :value => 'social_science' },
{ :css => 'html',
:text => '<%= render "layouts/js_includes" %>',
:mode => :append },
{ :css => 'article',
:text => '<%= render "layouts/body" %>' },
{ :css => 'nav.subnav',
:text => '<%= render "layouts/links" %>' },
{ :css => '.login',
:text => '<%= render "layouts/login" %>' } ]
c.layout_name = 'ada'
end
|
module Frank
module Cucumber
VERSION = "1.1.4.pre1"
end
end
very non-sem-ver update from 1.1.4.pre to 1.1.5
module Frank
module Cucumber
VERSION = "1.1.5"
end
end
|
module Frank
module Cucumber
VERSION = "0.8.16"
end
end
gem version bump
module Frank
module Cucumber
VERSION = "0.8.17"
end
end
|
#!/usr/bin/env ruby
# description: ManageIQ appliance console
#
# Simulate rubygems adding the top level appliance_console.rb's directory to the path.
$LOAD_PATH.push(File.dirname(__FILE__))
ROOT = [
"/var/www/miq",
File.expand_path(File.join(File.dirname(__FILE__), ".."))
].detect { |f| File.exist?(f) }
# Set up Environment
ENV['BUNDLE_GEMFILE'] ||= "#{ROOT}/vmdb/Gemfile"
require 'bundler'
Bundler.setup
require 'fileutils'
require 'highline/import'
require 'highline/system_extensions'
require 'rubygems'
require 'timeout'
require 'bcrypt'
require 'linux_admin'
require 'pathname'
require 'util/vmdb-logger'
include HighLine::SystemExtensions
require 'i18n'
LOCALES = File.join(File.expand_path(File.dirname(__FILE__)), "appliance_console/locales/*.yml")
I18n.load_path = Dir[LOCALES].sort
I18n.enforce_available_locales = true
I18n.backend.load_translations
TIMEOUT = 300
def ask_with_timeout(*args, &block)
Timeout.timeout(TIMEOUT) { ask_without_timeout(*args, &block) }
end
alias ask_without_timeout ask
alias ask ask_with_timeout
def choose_with_timeout(*args, &block)
Timeout.timeout(TIMEOUT) { choose_without_timeout(*args, &block) }
end
alias choose_without_timeout choose
alias choose choose_with_timeout
def agree_with_timeout(*args, &block)
Timeout.timeout(TIMEOUT) { agree_without_timeout(*args, &block) }
end
alias agree_without_timeout agree
alias agree agree_with_timeout
$terminal.wrap_at = 80
$terminal.page_at = 21
require 'appliance_console/errors'
[:INT, :TERM, :ABRT, :TSTP].each { |s| trap(s) { raise MiqSignalError } }
# Disabled in order to allow rescue of timeout error
HighLine.track_eof = false
RAILS_ROOT = Pathname.new("#{ROOT}/vmdb")
EVM_PID_FILE = RAILS_ROOT.join("tmp/pids/evm.pid")
REGION_FILE = RAILS_ROOT.join("REGION")
VERSION_FILE = RAILS_ROOT.join("VERSION")
BUILD_FILE = RAILS_ROOT.join("BUILD")
LOGFILE = File.join(RAILS_ROOT, "log", "appliance_console.log")
DB_RESTORE_FILE = "/tmp/evm_db.backup"
AS_OPTIONS = I18n.t("advanced_settings.menu_order").collect do |item|
I18n.t("advanced_settings.#{item}")
end
CANCEL = "Cancel"
# Restore database choices
RESTORE_LOCAL = "Local file"
RESTORE_NFS = "Network File System (nfs)"
RESTORE_SMB = "Samba (smb)"
RESTORE_OPTIONS = [RESTORE_LOCAL, RESTORE_NFS, RESTORE_SMB, CANCEL]
# Restart choices
RE_RESTART = "Restart"
RE_DELLOGS = "Restart and Clean Logs"
RE_OPTIONS = [RE_RESTART, RE_DELLOGS, CANCEL]
# Timezone constants
$tzdata = nil
TZ_AREAS = %w{Africa America Asia Atlantic Australia Canada Europe Indian Pacific US}
TZ_AREAS_OPTIONS = ["United States", "Canada", "Africa", "America", "Asia", "Atlantic Ocean", "Australia", "Europe",
"Indian Ocean", "Pacific Ocean", CANCEL]
TZ_AREAS_MAP = Hash.new { |_h, k| k }.merge!(
"United States" => "US",
"Atlantic Ocean" => "Atlantic",
"Pacific Ocean" => "Pacific",
"Indian Ocean" => "Indian"
)
TZ_AREAS_MAP_REV = Hash.new { |_h, k| k }.merge!(TZ_AREAS_MAP.invert)
# Load appliance_console libraries
require 'appliance_console/utilities'
require 'appliance_console/logging'
require 'appliance_console/database_configuration'
require 'appliance_console/internal_database_configuration'
require 'appliance_console/external_database_configuration'
require 'appliance_console/external_httpd_authentication'
require 'appliance_console/temp_storage_configuration'
require 'appliance_console/env'
require 'appliance_console/key_configuration'
require 'appliance_console/scap'
require 'appliance_console/prompts'
include ApplianceConsole::Prompts
module ApplianceConsole
ip = Env["IP"]
# Because it takes a few seconds, get the database information once in the outside loop
configured = ApplianceConsole::DatabaseConfiguration.configured?
dbhost, dbtype, database = ApplianceConsole::Utilities.db_host_type_database if configured
clear_screen
# Calling stty to provide the equivalent line settings when the console is run via an ssh session or
# over the virtual machine console.
system("stty -echoprt ixany iexten echoe echok")
say("#{I18n.t("product.name")} Virtual Appliance\n")
say("To administer this appliance, browse to https://#{ip}\n") if configured
loop do
begin
host = Env["HOST"]
ip = Env["IP"]
mac = Env["MAC"]
mask = Env["MASK"]
gw = Env["GW"]
dns1 = Env["DNS1"]
dns2 = Env["DNS2"]
order = Env["SEARCHORDER"]
# time = Env["TIMESERVER"]
timezone = Env["TIMEZONE"]
region = File.read(REGION_FILE).chomp if File.exist?(REGION_FILE)
version = File.read(VERSION_FILE).chomp if File.exist?(VERSION_FILE)
configured = ApplianceConsole::DatabaseConfiguration.configured?
summary_attributes = [
"Hostname:", host,
"IP Address:", ip,
"Netmask:", mask,
"Gateway:", gw,
"Primary DNS:", dns1,
"Secondary DNS:", dns2,
"Search Order:", order,
"MAC Address:", mac,
"Timezone:", timezone,
"Local Database:", ApplianceConsole::Utilities.pg_status,
"#{I18n.t("product.name")} Database:", configured ? "#{dbtype} @ #{dbhost}" : "not configured",
"Database/Region:", configured ? "#{database} / #{region || 0}" : "not configured",
"External Auth:", ExternalHttpdAuthentication.config_status,
"#{I18n.t("product.name")} Version:", version,
"#{I18n.t("product.name")} Console:", configured ? "https://#{ip}" : "not configured"
]
clear_screen
say(<<-EOL)
Welcome to the #{I18n.t("product.name")} Virtual Appliance.
To modify the configuration, use a web browser to access the management page.
#{$terminal.list(summary_attributes, :columns_across, 2)}
EOL
press_any_key
clear_screen
selection = ask_with_menu("Advanced Setting", AS_OPTIONS, nil, true)
case selection
when I18n.t("advanced_settings.dhcp")
say("DHCP Network Configuration\n\n")
if agree("Apply DHCP network configuration? (Y/N): ")
say("\nApplying DHCP network configuration...")
Env['DHCP'] = true
say("\nAfter completing the appliance configuration, please restart #{I18n.t("product.name")} server processes.")
end
when I18n.t("advanced_settings.static")
say("Static Network Configuration\n\n")
say("Enter the new static network configuration settings.\n\n")
new_ip = ask_for_ip("IP Address", ip)
new_mask = ask_for_ip("Netmask", mask)
new_gw = ask_for_ip("Gateway", gw)
new_dns1 = ask_for_ip("Primary DNS", dns1)
new_dns2 = ask_for_ip_or_none("Secondary DNS (Enter 'none' for no value)")
new_search_order = ask_for_many("domain", "Domain search order", order)
clear_screen
say(<<-EOL)
Static Network Configuration
IP Address: #{new_ip}
Netmask: #{new_mask}
Gateway: #{new_gw}
Primary DNS: #{new_dns1}
Secondary DNS: #{new_dns2}
Search Order: #{new_search_order.join(" ")}
EOL
if agree("Apply static network configuration? (Y/N)")
say("\nApplying static network configuration...")
Env['STATIC'] = new_ip, new_mask, new_gw, new_dns1, new_dns2
# Convert space delimiter to semicolon: manageiq.com galaxy.local to manageiq.com;galaxy.local
# so we can pass it on the command line to miqnet.sh without quoting it
Env['SEARCHORDER'] = new_search_order.join("\\;") unless Env.error?
say("\nAfter completing the appliance configuration, please restart #{I18n.t("product.name")} server processes.")
end
when I18n.t("advanced_settings.testnet")
ApplianceConsole::Utilities.test_network
when I18n.t("advanced_settings.hostname")
say("Hostname Configuration\n\n")
new_host = just_ask("new hostname", host)
if new_host != host
say("Applying new hostname...")
Env['HOST'] = new_host
end
when I18n.t("advanced_settings.datetime")
say("Date and Time Configuration\n\n")
# Cache time zone data the first time
if $tzdata.nil?
$tzdata = {}
TZ_AREAS.each do |a|
$tzdata[a] = ary = []
a = "/usr/share/zoneinfo/#{a}/"
Dir.glob("#{a}*").each do |z|
ary << z[a.length..-1]
end
ary.sort!
end
end
timezone = timezone.split("/")
cur_loc = timezone[0]
cur_city = timezone[1..-1].join("/")
# Prompt for timezone geographic area (with current area as default)
def_loc = TZ_AREAS.include?(cur_loc) ? TZ_AREAS_MAP_REV[cur_loc] : nil
tz_area = ask_with_menu("Geographic Location", TZ_AREAS_OPTIONS, def_loc, false)
next if tz_area == CANCEL
new_loc = TZ_AREAS_MAP[tz_area]
# Prompt for timezone specific city (with current city as default)
default_city = cur_city if $tzdata[new_loc].include?(cur_city) && cur_loc == new_loc
new_city = ask_with_menu("Timezone", $tzdata[new_loc], default_city, true) do |menu|
menu.list_option = :columns_across
end
next if new_city == CANCEL
clear_screen
say("Date and Time Configuration\n\n")
new_date = ask_for_date("current date (YYYY-MM-DD)")
new_time = ask_for_time("current time in 24 hour format (HH:MM:SS)")
clear_screen
say(<<-EOL)
Date and Time Configuration
Timezone area: #{tz_area}
Timezone city: #{new_city}
Date: #{new_date}
Time: #{new_time}
EOL
if agree("Apply time and timezone configuration? (Y/N): ")
say("Applying time and timezone configuration...")
Env['TIMEZONE'] = new_loc, new_city
Env['TIME'] = new_date, new_time unless Env.error?
end
when I18n.t("advanced_settings.httpdauth")
say("#{selection}\n\n")
httpd_auth = ExternalHttpdAuthentication.new(host)
if httpd_auth.ask_questions && httpd_auth.activate
httpd_auth.post_activation
say("\nExternal Authentication configured successfully.\n")
press_any_key
else
say("\nExternal Authentication configuration failed!\n")
press_any_key
raise MiqSignalError
end
when I18n.t("advanced_settings.evmstop")
say("#{selection}\n\n")
if File.exist?(EVM_PID_FILE)
if ask_yn? "\nNote: It may take up to a few minutes for all #{I18n.t("product.name")} server processes to exit gracefully. Stop #{I18n.t("product.name")}"
say("\nStopping #{I18n.t("product.name")} Server...")
Env['STOP'] = true
end
else
say("\nNo #{I18n.t("product.name")} PID file. #{I18n.t("product.name")} Server is not running...")
end
when I18n.t("advanced_settings.evmstart")
say("#{selection}\n\n")
if ask_yn?("\nStart #{I18n.t("product.name")}")
say("\nStarting #{I18n.t("product.name")} Server...")
Env['START'] = true
end
when I18n.t("advanced_settings.dbrestore")
say("#{selection}\n\n")
ApplianceConsole::Utilities.bail_if_db_connections "preventing a database restore"
task_with_opts = ""
uri = nil
# TODO: merge into 1 prompt
case ask_with_menu("Restore Database File", RESTORE_OPTIONS, RESTORE_LOCAL, nil)
when RESTORE_LOCAL
validate = ->(a) { File.exist?(a) }
uri = just_ask("location of the local restore file", DB_RESTORE_FILE, validate, "file that exists")
task_with_opts = "evm:db:restore:local -- --local-file '#{uri}'"
when RESTORE_NFS
uri = ask_for_uri("location of the remote backup file\nExample: #{sample_url('nfs')})", "nfs")
task_with_opts = "evm:db:restore:remote -- --uri '#{uri}'"
when RESTORE_SMB
uri = ask_for_uri("location of the remote backup file\nExample: #{sample_url('smb')}", "smb")
user = just_ask("username with access to this file.\nExample: 'mydomain.com/user'")
pass = ask_for_password("password for #{user}")
task_with_opts = "evm:db:restore:remote -- --uri '#{uri}' --uri-username '#{user}' --uri-password '#{pass}'"
when CANCEL
raise MiqSignalError
end
clear_screen
say("#{selection}\n\n")
delete_agreed = false
if selection == RESTORE_LOCAL
say "The local database restore file is located at: '#{uri}'.\n"
delete_agreed = agree("Should this file be deleted after completing the restore? (Y/N): ")
end
say "\nNote: A database restore cannot be undone. The restore will use the file: #{uri}.\n"
if agree("Are you sure you would like to restore the database? (Y/N): ")
say("\nRestoring the database...")
if Env.rake(task_with_opts) && delete_agreed
say("\nRemoving the database restore file #{DB_RESTORE_FILE}...")
File.delete(DB_RESTORE_FILE)
end
end
when I18n.t("advanced_settings.dbregion_setup")
say("#{selection}\n\n")
unless configured
say("There is no database configured yet, please choose #{I18n.t("advanced_settings.db_config")} instead.")
press_any_key
raise MiqSignalError
end
ApplianceConsole::Utilities.bail_if_db_connections("preventing the setup of a database region")
clear_screen
say("#{selection}\n\n")
say("Note: Each database region number must be unique.\n\n")
region_number = ask_for_integer("database region number")
clear_screen
say "It is recommended to use a new database or backup the existing database first.\n"
say "Warning: SETTING A DATABASE REGION WILL DESTROY ANY EXISTING DATA AND CANNOT BE UNDONE.\n\n"
if agree("Setting Database Region to: #{region_number}\nAre you sure you want to continue? (Y/N): ")
say("Setting Database Region... This process may take a few minutes.\n\n")
if Env.rake("evm:db:region -- --region #{region_number} 1>> #{LOGFILE}")
say("Database region setup complete...\nStart the #{I18n.t("product.name")} server processes via '#{I18n.t("advanced_settings.evmstart")}'.")
end
press_any_key
else
raise MiqSignalError
end
when I18n.t("advanced_settings.key_gen")
say("#{selection}\n\n")
key_config = ApplianceConsole::KeyConfiguration.new
if key_config.ask_question_loop
say("\nEncryption key now configured.")
press_any_key
else
say("\nEncryption key not configured.")
press_any_key
raise MiqSignalError
end
when I18n.t("advanced_settings.db_config")
say("#{selection}\n\n")
key_config = ApplianceConsole::KeyConfiguration.new
unless key_config.key_exist?
say "No encryption key found.\n"
say "For migrations, copy encryption key from a hardened appliance."
say "For worker and multi-region setups, copy key from another appliance.\n"
say "If this is your first appliance, just generate one now.\n\n"
if key_config.ask_question_loop
say("\nEncryption key now configured.\n\n")
else
say("\nEncryption key not configured.")
press_any_key
raise MiqSignalError
end
end
loc_selection = ask_with_menu("Database Location", %w(Internal External), nil, false)
ApplianceConsole::Logging.logger = VMDBLogger.new(LOGFILE)
database_configuration = ApplianceConsole.const_get("#{loc_selection}DatabaseConfiguration").new
begin
database_configuration.ask_questions
rescue ArgumentError => e
say("\nConfiguration failed: #{e.message}\n")
press_any_key
raise MiqSignalError
end
clear_screen
say "Activating the configuration using the following settings...\n"
say "#{database_configuration.friendly_inspect}\n"
if database_configuration.activate
database_configuration.post_activation
say("\nConfiguration activated successfully.\n")
dbhost, dbtype, database = ApplianceConsole::Utilities.db_host_type_database
press_any_key
else
say("\nConfiguration activation failed!\n")
press_any_key
raise MiqSignalError
end
when I18n.t("advanced_settings.tmp_config")
say("#{selection}\n\n")
tmp_config = ApplianceConsole::TempStorageConfiguration.new
if tmp_config.ask_questions && tmp_config.activate
say("Temp storage disk configured")
press_any_key
else
say("Temp storage disk not configured")
press_any_key
raise MiqSignalError
end
when I18n.t("advanced_settings.restart")
case ask_with_menu("Restart Option", RE_OPTIONS, nil, false)
when CANCEL
# don't do anything
when RE_RESTART
Env['RESTARTOS'] = are_you_sure?("restart the appliance now")
when RE_DELLOGS
Env['RESTARTOSRMLOGS'] = are_you_sure?("restart the appliance now")
end
when I18n.t("advanced_settings.shutdown")
say("#{selection}\n\n")
if are_you_sure?("shut down the appliance now")
say("\nShutting down appliance... This process may take a few minutes.\n\n")
Env['SHUTDOWN'] = true
end
when I18n.t("advanced_settings.scap")
say("#{selection}\n\n")
ApplianceConsole::Scap.new.lockdown
press_any_key
when I18n.t("advanced_settings.summary")
# Do nothing
when I18n.t("advanced_settings.quit")
break
end
rescue Timeout::Error
break
rescue MiqSignalError
# If a signal is caught anywhere in the inner (after login) loop, go back to the summary screen
next
ensure
if Env.changed?
if (errtext = Env.error)
say("\nAn error occurred:\n\n#{errtext}")
else
say("\nCompleted successfully.")
end
press_any_key
end
Env.clear_errors
end
end
end
Removed unused timeserver comment from console
#!/usr/bin/env ruby
# description: ManageIQ appliance console
#
# Simulate rubygems adding the top level appliance_console.rb's directory to the path.
$LOAD_PATH.push(File.dirname(__FILE__))
ROOT = [
"/var/www/miq",
File.expand_path(File.join(File.dirname(__FILE__), ".."))
].detect { |f| File.exist?(f) }
# Set up Environment
ENV['BUNDLE_GEMFILE'] ||= "#{ROOT}/vmdb/Gemfile"
require 'bundler'
Bundler.setup
require 'fileutils'
require 'highline/import'
require 'highline/system_extensions'
require 'rubygems'
require 'timeout'
require 'bcrypt'
require 'linux_admin'
require 'pathname'
require 'util/vmdb-logger'
include HighLine::SystemExtensions
require 'i18n'
LOCALES = File.join(File.expand_path(File.dirname(__FILE__)), "appliance_console/locales/*.yml")
I18n.load_path = Dir[LOCALES].sort
I18n.enforce_available_locales = true
I18n.backend.load_translations
TIMEOUT = 300
def ask_with_timeout(*args, &block)
Timeout.timeout(TIMEOUT) { ask_without_timeout(*args, &block) }
end
alias ask_without_timeout ask
alias ask ask_with_timeout
def choose_with_timeout(*args, &block)
Timeout.timeout(TIMEOUT) { choose_without_timeout(*args, &block) }
end
alias choose_without_timeout choose
alias choose choose_with_timeout
def agree_with_timeout(*args, &block)
Timeout.timeout(TIMEOUT) { agree_without_timeout(*args, &block) }
end
alias agree_without_timeout agree
alias agree agree_with_timeout
$terminal.wrap_at = 80
$terminal.page_at = 21
require 'appliance_console/errors'
[:INT, :TERM, :ABRT, :TSTP].each { |s| trap(s) { raise MiqSignalError } }
# Disabled in order to allow rescue of timeout error
HighLine.track_eof = false
RAILS_ROOT = Pathname.new("#{ROOT}/vmdb")
EVM_PID_FILE = RAILS_ROOT.join("tmp/pids/evm.pid")
REGION_FILE = RAILS_ROOT.join("REGION")
VERSION_FILE = RAILS_ROOT.join("VERSION")
BUILD_FILE = RAILS_ROOT.join("BUILD")
LOGFILE = File.join(RAILS_ROOT, "log", "appliance_console.log")
DB_RESTORE_FILE = "/tmp/evm_db.backup"
AS_OPTIONS = I18n.t("advanced_settings.menu_order").collect do |item|
I18n.t("advanced_settings.#{item}")
end
CANCEL = "Cancel"
# Restore database choices
RESTORE_LOCAL = "Local file"
RESTORE_NFS = "Network File System (nfs)"
RESTORE_SMB = "Samba (smb)"
RESTORE_OPTIONS = [RESTORE_LOCAL, RESTORE_NFS, RESTORE_SMB, CANCEL]
# Restart choices
RE_RESTART = "Restart"
RE_DELLOGS = "Restart and Clean Logs"
RE_OPTIONS = [RE_RESTART, RE_DELLOGS, CANCEL]
# Timezone constants
$tzdata = nil
TZ_AREAS = %w{Africa America Asia Atlantic Australia Canada Europe Indian Pacific US}
TZ_AREAS_OPTIONS = ["United States", "Canada", "Africa", "America", "Asia", "Atlantic Ocean", "Australia", "Europe",
"Indian Ocean", "Pacific Ocean", CANCEL]
TZ_AREAS_MAP = Hash.new { |_h, k| k }.merge!(
"United States" => "US",
"Atlantic Ocean" => "Atlantic",
"Pacific Ocean" => "Pacific",
"Indian Ocean" => "Indian"
)
TZ_AREAS_MAP_REV = Hash.new { |_h, k| k }.merge!(TZ_AREAS_MAP.invert)
# Load appliance_console libraries
require 'appliance_console/utilities'
require 'appliance_console/logging'
require 'appliance_console/database_configuration'
require 'appliance_console/internal_database_configuration'
require 'appliance_console/external_database_configuration'
require 'appliance_console/external_httpd_authentication'
require 'appliance_console/temp_storage_configuration'
require 'appliance_console/env'
require 'appliance_console/key_configuration'
require 'appliance_console/scap'
require 'appliance_console/prompts'
include ApplianceConsole::Prompts
module ApplianceConsole
ip = Env["IP"]
# Because it takes a few seconds, get the database information once in the outside loop
configured = ApplianceConsole::DatabaseConfiguration.configured?
dbhost, dbtype, database = ApplianceConsole::Utilities.db_host_type_database if configured
clear_screen
# Calling stty to provide the equivalent line settings when the console is run via an ssh session or
# over the virtual machine console.
system("stty -echoprt ixany iexten echoe echok")
say("#{I18n.t("product.name")} Virtual Appliance\n")
say("To administer this appliance, browse to https://#{ip}\n") if configured
loop do
begin
host = Env["HOST"]
ip = Env["IP"]
mac = Env["MAC"]
mask = Env["MASK"]
gw = Env["GW"]
dns1 = Env["DNS1"]
dns2 = Env["DNS2"]
order = Env["SEARCHORDER"]
timezone = Env["TIMEZONE"]
region = File.read(REGION_FILE).chomp if File.exist?(REGION_FILE)
version = File.read(VERSION_FILE).chomp if File.exist?(VERSION_FILE)
configured = ApplianceConsole::DatabaseConfiguration.configured?
summary_attributes = [
"Hostname:", host,
"IP Address:", ip,
"Netmask:", mask,
"Gateway:", gw,
"Primary DNS:", dns1,
"Secondary DNS:", dns2,
"Search Order:", order,
"MAC Address:", mac,
"Timezone:", timezone,
"Local Database:", ApplianceConsole::Utilities.pg_status,
"#{I18n.t("product.name")} Database:", configured ? "#{dbtype} @ #{dbhost}" : "not configured",
"Database/Region:", configured ? "#{database} / #{region || 0}" : "not configured",
"External Auth:", ExternalHttpdAuthentication.config_status,
"#{I18n.t("product.name")} Version:", version,
"#{I18n.t("product.name")} Console:", configured ? "https://#{ip}" : "not configured"
]
clear_screen
say(<<-EOL)
Welcome to the #{I18n.t("product.name")} Virtual Appliance.
To modify the configuration, use a web browser to access the management page.
#{$terminal.list(summary_attributes, :columns_across, 2)}
EOL
press_any_key
clear_screen
selection = ask_with_menu("Advanced Setting", AS_OPTIONS, nil, true)
case selection
when I18n.t("advanced_settings.dhcp")
say("DHCP Network Configuration\n\n")
if agree("Apply DHCP network configuration? (Y/N): ")
say("\nApplying DHCP network configuration...")
Env['DHCP'] = true
say("\nAfter completing the appliance configuration, please restart #{I18n.t("product.name")} server processes.")
end
when I18n.t("advanced_settings.static")
say("Static Network Configuration\n\n")
say("Enter the new static network configuration settings.\n\n")
new_ip = ask_for_ip("IP Address", ip)
new_mask = ask_for_ip("Netmask", mask)
new_gw = ask_for_ip("Gateway", gw)
new_dns1 = ask_for_ip("Primary DNS", dns1)
new_dns2 = ask_for_ip_or_none("Secondary DNS (Enter 'none' for no value)")
new_search_order = ask_for_many("domain", "Domain search order", order)
clear_screen
say(<<-EOL)
Static Network Configuration
IP Address: #{new_ip}
Netmask: #{new_mask}
Gateway: #{new_gw}
Primary DNS: #{new_dns1}
Secondary DNS: #{new_dns2}
Search Order: #{new_search_order.join(" ")}
EOL
if agree("Apply static network configuration? (Y/N)")
say("\nApplying static network configuration...")
Env['STATIC'] = new_ip, new_mask, new_gw, new_dns1, new_dns2
# Convert space delimiter to semicolon: manageiq.com galaxy.local to manageiq.com;galaxy.local
# so we can pass it on the command line to miqnet.sh without quoting it
Env['SEARCHORDER'] = new_search_order.join("\\;") unless Env.error?
say("\nAfter completing the appliance configuration, please restart #{I18n.t("product.name")} server processes.")
end
when I18n.t("advanced_settings.testnet")
ApplianceConsole::Utilities.test_network
when I18n.t("advanced_settings.hostname")
say("Hostname Configuration\n\n")
new_host = just_ask("new hostname", host)
if new_host != host
say("Applying new hostname...")
Env['HOST'] = new_host
end
when I18n.t("advanced_settings.datetime")
say("Date and Time Configuration\n\n")
# Cache time zone data the first time
if $tzdata.nil?
$tzdata = {}
TZ_AREAS.each do |a|
$tzdata[a] = ary = []
a = "/usr/share/zoneinfo/#{a}/"
Dir.glob("#{a}*").each do |z|
ary << z[a.length..-1]
end
ary.sort!
end
end
timezone = timezone.split("/")
cur_loc = timezone[0]
cur_city = timezone[1..-1].join("/")
# Prompt for timezone geographic area (with current area as default)
def_loc = TZ_AREAS.include?(cur_loc) ? TZ_AREAS_MAP_REV[cur_loc] : nil
tz_area = ask_with_menu("Geographic Location", TZ_AREAS_OPTIONS, def_loc, false)
next if tz_area == CANCEL
new_loc = TZ_AREAS_MAP[tz_area]
# Prompt for timezone specific city (with current city as default)
default_city = cur_city if $tzdata[new_loc].include?(cur_city) && cur_loc == new_loc
new_city = ask_with_menu("Timezone", $tzdata[new_loc], default_city, true) do |menu|
menu.list_option = :columns_across
end
next if new_city == CANCEL
clear_screen
say("Date and Time Configuration\n\n")
new_date = ask_for_date("current date (YYYY-MM-DD)")
new_time = ask_for_time("current time in 24 hour format (HH:MM:SS)")
clear_screen
say(<<-EOL)
Date and Time Configuration
Timezone area: #{tz_area}
Timezone city: #{new_city}
Date: #{new_date}
Time: #{new_time}
EOL
if agree("Apply time and timezone configuration? (Y/N): ")
say("Applying time and timezone configuration...")
Env['TIMEZONE'] = new_loc, new_city
Env['TIME'] = new_date, new_time unless Env.error?
end
when I18n.t("advanced_settings.httpdauth")
say("#{selection}\n\n")
httpd_auth = ExternalHttpdAuthentication.new(host)
if httpd_auth.ask_questions && httpd_auth.activate
httpd_auth.post_activation
say("\nExternal Authentication configured successfully.\n")
press_any_key
else
say("\nExternal Authentication configuration failed!\n")
press_any_key
raise MiqSignalError
end
when I18n.t("advanced_settings.evmstop")
say("#{selection}\n\n")
if File.exist?(EVM_PID_FILE)
if ask_yn? "\nNote: It may take up to a few minutes for all #{I18n.t("product.name")} server processes to exit gracefully. Stop #{I18n.t("product.name")}"
say("\nStopping #{I18n.t("product.name")} Server...")
Env['STOP'] = true
end
else
say("\nNo #{I18n.t("product.name")} PID file. #{I18n.t("product.name")} Server is not running...")
end
when I18n.t("advanced_settings.evmstart")
say("#{selection}\n\n")
if ask_yn?("\nStart #{I18n.t("product.name")}")
say("\nStarting #{I18n.t("product.name")} Server...")
Env['START'] = true
end
when I18n.t("advanced_settings.dbrestore")
say("#{selection}\n\n")
ApplianceConsole::Utilities.bail_if_db_connections "preventing a database restore"
task_with_opts = ""
uri = nil
# TODO: merge into 1 prompt
case ask_with_menu("Restore Database File", RESTORE_OPTIONS, RESTORE_LOCAL, nil)
when RESTORE_LOCAL
validate = ->(a) { File.exist?(a) }
uri = just_ask("location of the local restore file", DB_RESTORE_FILE, validate, "file that exists")
task_with_opts = "evm:db:restore:local -- --local-file '#{uri}'"
when RESTORE_NFS
uri = ask_for_uri("location of the remote backup file\nExample: #{sample_url('nfs')})", "nfs")
task_with_opts = "evm:db:restore:remote -- --uri '#{uri}'"
when RESTORE_SMB
uri = ask_for_uri("location of the remote backup file\nExample: #{sample_url('smb')}", "smb")
user = just_ask("username with access to this file.\nExample: 'mydomain.com/user'")
pass = ask_for_password("password for #{user}")
task_with_opts = "evm:db:restore:remote -- --uri '#{uri}' --uri-username '#{user}' --uri-password '#{pass}'"
when CANCEL
raise MiqSignalError
end
clear_screen
say("#{selection}\n\n")
delete_agreed = false
if selection == RESTORE_LOCAL
say "The local database restore file is located at: '#{uri}'.\n"
delete_agreed = agree("Should this file be deleted after completing the restore? (Y/N): ")
end
say "\nNote: A database restore cannot be undone. The restore will use the file: #{uri}.\n"
if agree("Are you sure you would like to restore the database? (Y/N): ")
say("\nRestoring the database...")
if Env.rake(task_with_opts) && delete_agreed
say("\nRemoving the database restore file #{DB_RESTORE_FILE}...")
File.delete(DB_RESTORE_FILE)
end
end
when I18n.t("advanced_settings.dbregion_setup")
say("#{selection}\n\n")
unless configured
say("There is no database configured yet, please choose #{I18n.t("advanced_settings.db_config")} instead.")
press_any_key
raise MiqSignalError
end
ApplianceConsole::Utilities.bail_if_db_connections("preventing the setup of a database region")
clear_screen
say("#{selection}\n\n")
say("Note: Each database region number must be unique.\n\n")
region_number = ask_for_integer("database region number")
clear_screen
say "It is recommended to use a new database or backup the existing database first.\n"
say "Warning: SETTING A DATABASE REGION WILL DESTROY ANY EXISTING DATA AND CANNOT BE UNDONE.\n\n"
if agree("Setting Database Region to: #{region_number}\nAre you sure you want to continue? (Y/N): ")
say("Setting Database Region... This process may take a few minutes.\n\n")
if Env.rake("evm:db:region -- --region #{region_number} 1>> #{LOGFILE}")
say("Database region setup complete...\nStart the #{I18n.t("product.name")} server processes via '#{I18n.t("advanced_settings.evmstart")}'.")
end
press_any_key
else
raise MiqSignalError
end
when I18n.t("advanced_settings.key_gen")
say("#{selection}\n\n")
key_config = ApplianceConsole::KeyConfiguration.new
if key_config.ask_question_loop
say("\nEncryption key now configured.")
press_any_key
else
say("\nEncryption key not configured.")
press_any_key
raise MiqSignalError
end
when I18n.t("advanced_settings.db_config")
say("#{selection}\n\n")
key_config = ApplianceConsole::KeyConfiguration.new
unless key_config.key_exist?
say "No encryption key found.\n"
say "For migrations, copy encryption key from a hardened appliance."
say "For worker and multi-region setups, copy key from another appliance.\n"
say "If this is your first appliance, just generate one now.\n\n"
if key_config.ask_question_loop
say("\nEncryption key now configured.\n\n")
else
say("\nEncryption key not configured.")
press_any_key
raise MiqSignalError
end
end
loc_selection = ask_with_menu("Database Location", %w(Internal External), nil, false)
ApplianceConsole::Logging.logger = VMDBLogger.new(LOGFILE)
database_configuration = ApplianceConsole.const_get("#{loc_selection}DatabaseConfiguration").new
begin
database_configuration.ask_questions
rescue ArgumentError => e
say("\nConfiguration failed: #{e.message}\n")
press_any_key
raise MiqSignalError
end
clear_screen
say "Activating the configuration using the following settings...\n"
say "#{database_configuration.friendly_inspect}\n"
if database_configuration.activate
database_configuration.post_activation
say("\nConfiguration activated successfully.\n")
dbhost, dbtype, database = ApplianceConsole::Utilities.db_host_type_database
press_any_key
else
say("\nConfiguration activation failed!\n")
press_any_key
raise MiqSignalError
end
when I18n.t("advanced_settings.tmp_config")
say("#{selection}\n\n")
tmp_config = ApplianceConsole::TempStorageConfiguration.new
if tmp_config.ask_questions && tmp_config.activate
say("Temp storage disk configured")
press_any_key
else
say("Temp storage disk not configured")
press_any_key
raise MiqSignalError
end
when I18n.t("advanced_settings.restart")
case ask_with_menu("Restart Option", RE_OPTIONS, nil, false)
when CANCEL
# don't do anything
when RE_RESTART
Env['RESTARTOS'] = are_you_sure?("restart the appliance now")
when RE_DELLOGS
Env['RESTARTOSRMLOGS'] = are_you_sure?("restart the appliance now")
end
when I18n.t("advanced_settings.shutdown")
say("#{selection}\n\n")
if are_you_sure?("shut down the appliance now")
say("\nShutting down appliance... This process may take a few minutes.\n\n")
Env['SHUTDOWN'] = true
end
when I18n.t("advanced_settings.scap")
say("#{selection}\n\n")
ApplianceConsole::Scap.new.lockdown
press_any_key
when I18n.t("advanced_settings.summary")
# Do nothing
when I18n.t("advanced_settings.quit")
break
end
rescue Timeout::Error
break
rescue MiqSignalError
# If a signal is caught anywhere in the inner (after login) loop, go back to the summary screen
next
ensure
if Env.changed?
if (errtext = Env.error)
say("\nAn error occurred:\n\n#{errtext}")
else
say("\nCompleted successfully.")
end
press_any_key
end
Env.clear_errors
end
end
end
|
class Selenium::WebDriver::Element
# NoMethodError: undefined method `name' for #<Selenium::WebDriver::Element
def name
self.attribute :name
end
# NoMethodError: undefined method `type' for #<Selenium::WebDriver::Element
def type
self.attribute :type
end
# Tag name appears to be the same as type.
# Selenium::WebDriver::Error::UnknownError: Not yet implemented
def tag_name
type
end
end
Print JSON sent to Appium
class Selenium::WebDriver::Element
# NoMethodError: undefined method `name' for #<Selenium::WebDriver::Element
def name
self.attribute :name
end
# NoMethodError: undefined method `type' for #<Selenium::WebDriver::Element
def type
self.attribute :type
end
# Tag name appears to be the same as type.
# Selenium::WebDriver::Error::UnknownError: Not yet implemented
def tag_name
type
end
end
# Print JSON posted to Appium
# Requires from lib/selenium/webdriver/remote.rb
require 'selenium/webdriver/remote/capabilities'
require 'selenium/webdriver/remote/bridge'
require 'selenium/webdriver/remote/server_error'
require 'selenium/webdriver/remote/response'
require 'selenium/webdriver/remote/commands'
require 'selenium/webdriver/remote/http/common'
require 'selenium/webdriver/remote/http/default'
module Selenium::WebDriver::Remote
class Bridge
# Code from lib/selenium/webdriver/remote/bridge.rb
def raw_execute(command, opts = {}, command_hash = nil)
verb, path = COMMANDS[command] || raise(ArgumentError, "unknown command: #{command.inspect}")
path = path.dup
path[':session_id'] = @session_id if path.include?(":session_id")
begin
opts.each { |key, value| path[key.inspect] = escaper.escape(value.to_s) }
rescue IndexError
raise ArgumentError, "#{opts.inspect} invalid for #{command.inspect}"
end
puts verb
puts path
puts command_hash.to_json
puts "-> #{verb.to_s.upcase} #{path}" if $DEBUG
http.call verb, path, command_hash
end # def
end # class
end # module |
name "private-chef"
maintainer "Chef Software, Inc."
homepage "http://www.getchef.com"
replaces "private-chef-full"
install_dir "/opt/opscode"
build_version Omnibus::BuildVersion.new.semver
build_iteration 1
override :rebar, version: "2.0.0"
override :berkshelf2, version: "2.0.18"
override :rabbitmq, version: "3.3.4"
# creates required build directories
dependency "preparation"
# needs to be before postgresql, otherwise build problems...
dependency "postgresql91" # for pg_upgrade
# global
dependency "chef-gem" # for embedded chef-solo
dependency "private-chef-scripts" # assorted scripts used by installed instance
dependency "private-chef-ctl" # additional project-specific private-chef-ctl subcommands
dependency "openresty"
dependency "redis-rb" # gem for interacting with redis
dependency "openresty-lpeg" # lua-based routing
dependency "runit"
dependency "unicorn"
# the backend
dependency "couchdb"
dependency "postgresql92"
dependency "rabbitmq"
dependency "redis" # dynamic routing controls
dependency "opscode-solr4"
dependency "opscode-expander"
# We are transitioning away from Sequel toward Sqitch for managing
# Erchef's schema. We still need the old code ('chef-sql-schema') for
# existing upgrades. However, after Enterprise Chef 11's release,
# that will be removed entirely in favor of the new code
# ('enterprise-chef-server-schema').
dependency "chef-sql-schema" # EOL
dependency "keepalived"
dependency "bookshelf"
# the front-end services
dependency "opscode-account"
dependency "oc_bifrost"
dependency "opscode-org-creator"
dependency "opscode-certificate"
dependency "opscode-platform-debug"
dependency "opscode-test"
dependency "oc_id"
# log management
dependency "logrotate"
# partybus and upgrade scripts
dependency "partybus"
dependency "oc_authz_migrator" # migrate authz to bifrost
# used in osc to ec upgrade path
dependency "knife-ec-backup-gem"
# most frequently changed dependencies
# by placing these deps at the end of the build, we can take
# advantage of the git caching and increase build times
# for situations where we're changing these components.
# These are roughly sorted by build time and change frequency,
# with the quickest builds coming last.
dependency "opscode-chef-mover"
dependency "oc_erchef"
dependency "oc-chef-pedant"
dependency "private-chef-upgrades"
dependency "enterprise-chef-server-schema"
dependency "private-chef-cookbooks"
# version manifest file
dependency "version-manifest"
exclude "\.git*"
exclude "bundler\/git"
build erlang R16B03-1
name "private-chef"
maintainer "Chef Software, Inc."
homepage "http://www.getchef.com"
replaces "private-chef-full"
install_dir "/opt/opscode"
build_version Omnibus::BuildVersion.new.semver
build_iteration 1
override :rebar, version: "2.0.0"
override :berkshelf2, version: "2.0.18"
override :rabbitmq, version: "3.3.4"
override :erlang, version: "R16B03-1"
# creates required build directories
dependency "preparation"
# needs to be before postgresql, otherwise build problems...
dependency "postgresql91" # for pg_upgrade
# global
dependency "chef-gem" # for embedded chef-solo
dependency "private-chef-scripts" # assorted scripts used by installed instance
dependency "private-chef-ctl" # additional project-specific private-chef-ctl subcommands
dependency "openresty"
dependency "redis-rb" # gem for interacting with redis
dependency "openresty-lpeg" # lua-based routing
dependency "runit"
dependency "unicorn"
# the backend
dependency "couchdb"
dependency "postgresql92"
dependency "rabbitmq"
dependency "redis" # dynamic routing controls
dependency "opscode-solr4"
dependency "opscode-expander"
# We are transitioning away from Sequel toward Sqitch for managing
# Erchef's schema. We still need the old code ('chef-sql-schema') for
# existing upgrades. However, after Enterprise Chef 11's release,
# that will be removed entirely in favor of the new code
# ('enterprise-chef-server-schema').
dependency "chef-sql-schema" # EOL
dependency "keepalived"
dependency "bookshelf"
# the front-end services
dependency "opscode-account"
dependency "oc_bifrost"
dependency "opscode-org-creator"
dependency "opscode-certificate"
dependency "opscode-platform-debug"
dependency "opscode-test"
dependency "oc_id"
# log management
dependency "logrotate"
# partybus and upgrade scripts
dependency "partybus"
dependency "oc_authz_migrator" # migrate authz to bifrost
# used in osc to ec upgrade path
dependency "knife-ec-backup-gem"
# most frequently changed dependencies
# by placing these deps at the end of the build, we can take
# advantage of the git caching and increase build times
# for situations where we're changing these components.
# These are roughly sorted by build time and change frequency,
# with the quickest builds coming last.
dependency "opscode-chef-mover"
dependency "oc_erchef"
dependency "oc-chef-pedant"
dependency "private-chef-upgrades"
dependency "enterprise-chef-server-schema"
dependency "private-chef-cookbooks"
# version manifest file
dependency "version-manifest"
exclude "\.git*"
exclude "bundler\/git"
|
add common chef-cleanup definition
Signed-off-by: Lamont Granquist <0ab8dc438f73addc98d9ad5925ec8f2b97991703@scriptkiddie.org>
#
# Copyright:: Copyright (c) 2014-2018, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Common cleanup routines for chef + chef-dk omnibus packages
#
name "chef-cleanup"
license :project_license
skip_transitive_dependency_licensing true
build do
# Clear the now-unnecessary git caches, cached gems, and git-checked-out gems
block "Delete bundler git cache and git installs" do
gemdir = shellout!("#{install_dir}/embedded/bin/gem environment gemdir", env: env).stdout.chomp
remove_directory "#{gemdir}/cache"
remove_directory "#{gemdir}/bundler"
remove_directory "#{gemdir}/doc"
end
# Clean up docs
delete "#{install_dir}/embedded/docs"
delete "#{install_dir}/embedded/share/man"
delete "#{install_dir}/embedded/share/doc"
delete "#{install_dir}/embedded/share/gtk-doc"
delete "#{install_dir}/embedded/ssl/man"
delete "#{install_dir}/embedded/man"
delete "#{install_dir}/embedded/info"
end
|
#
# Copyright:: Copyright (c) 2013 Robby Dyer
# Copyright:: Copyright (c) 2014 GitLab.com
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name "mysql-client"
default_version "5.5.37"
dependency "openssl"
dependency "zlib"
dependency "ncurses"
source :url => "http://dev.mysql.com/get/Downloads/MySQL-5.5/mysql-5.5.37.tar.gz",
:md5 => "bf1d80c66d4822ec6036300399a33c03"
relative_path "mysql-#{version}"
env = with_standard_compiler_flags(with_embedded_path)
env.merge!(
"CXXFLAGS" => "-L#{install_dir}/embedded/lib -I#{install_dir}/embedded/include",
"CPPFLAGS" => "-L#{install_dir}/embedded/lib -I#{install_dir}/embedded/include",
)
# Force CentOS-5 to use gcc/g++ v4.4
if ohai['platform'] =~ /centos/ and ohai['platform_version'] =~ /^5/
env.merge!( {
"CC" => "gcc44",
"CXX" => "g++44"
})
end
build do
command [
"cmake",
"-DCMAKE_SKIP_RPATH=YES",
"-DCMAKE_INSTALL_PREFIX=#{install_dir}/embedded",
"-DWITH_SSL=system",
"-DOPENSSL_INCLUDE_DIR:PATH=#{install_dir}/embedded/include",
"-DOPENSSL_LIBRARIES:FILEPATH=#{install_dir}/embedded/lib/libssl.so",
"-DWITH_ZLIB=system",
"-DZLIB_INCLUDE_DIR:PATH=#{install_dir}/embedded/include",
"-DZLIB_LIBRARY:FILEPATH=#{install_dir}/embedded/lib/libz.so",
"-DCRYPTO_LIBRARY:FILEPATH=#{install_dir}/embedded/lib/libcrypto.so",
".",
].join(" "), :env => env
%w{libmysql client include}.each do |target|
command "make -j #{workers} install", :env => env, :cwd => "#{project_dir}/#{target}"
end
end
Use bundled readline library to prevent system readline from causing issues
This config option goes away in MySQL 5.6.5.
Closes #1482
#
# Copyright:: Copyright (c) 2013 Robby Dyer
# Copyright:: Copyright (c) 2014 GitLab.com
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name "mysql-client"
default_version "5.5.37"
dependency "openssl"
dependency "zlib"
dependency "ncurses"
source :url => "http://dev.mysql.com/get/Downloads/MySQL-5.5/mysql-5.5.37.tar.gz",
:md5 => "bf1d80c66d4822ec6036300399a33c03"
relative_path "mysql-#{version}"
env = with_standard_compiler_flags(with_embedded_path)
env.merge!(
"CXXFLAGS" => "-L#{install_dir}/embedded/lib -I#{install_dir}/embedded/include",
"CPPFLAGS" => "-L#{install_dir}/embedded/lib -I#{install_dir}/embedded/include",
)
# Force CentOS-5 to use gcc/g++ v4.4
if ohai['platform'] =~ /centos/ and ohai['platform_version'] =~ /^5/
env.merge!( {
"CC" => "gcc44",
"CXX" => "g++44"
})
end
build do
command [
"cmake",
"-DCMAKE_SKIP_RPATH=YES",
"-DCMAKE_INSTALL_PREFIX=#{install_dir}/embedded",
"-DWITH_SSL=system",
"-DWITH_READLINE=1",
"-DOPENSSL_INCLUDE_DIR:PATH=#{install_dir}/embedded/include",
"-DOPENSSL_LIBRARIES:FILEPATH=#{install_dir}/embedded/lib/libssl.so",
"-DWITH_ZLIB=system",
"-DZLIB_INCLUDE_DIR:PATH=#{install_dir}/embedded/include",
"-DZLIB_LIBRARY:FILEPATH=#{install_dir}/embedded/lib/libz.so",
"-DCRYPTO_LIBRARY:FILEPATH=#{install_dir}/embedded/lib/libcrypto.so",
".",
].join(" "), :env => env
%w{libmysql client include}.each do |target|
command "make -j #{workers} install", :env => env, :cwd => "#{project_dir}/#{target}"
end
end
|
# frozen_string_literal: true
class API
# API for Observation
# rubocop:disable Metrics/ClassLength
class ObservationAPI < ModelAPI
self.model = Observation
self.high_detail_page_length = 10
self.low_detail_page_length = 100
self.put_page_length = 1000
self.delete_page_length = 1000
self.high_detail_includes = [
:comments,
{ images: [ :license, :observations ] },
:location,
:name,
{ namings: [:name, { votes: :user }] },
:collection_numbers,
{ herbarium_records: :herbarium },
:sequences,
:user
]
self.low_detail_includes = [
:location,
:name,
:user
]
# rubocop:disable Metrics/AbcSize
# rubocop:disable Metrics/MethodLength
def query_params
n, s, e, w = parse_bounding_box!
{
where: sql_id_condition,
created_at: parse_range(:time, :created_at),
updated_at: parse_range(:time, :updated_at),
date: parse_range(:date, :date, help: :when_seen),
users: parse_array(:user, :user, help: :observer),
names: parse_array(:name, :name, as: :id),
locations: parse_array(:location, :location, as: :id),
herbaria: parse_array(:herbarium, :herbarium, as: :id),
herbarium_records: parse_array(:herbarium_record, :herbarium_record,
as: :id),
projects: parse_array(:project, :project, as: :id),
species_lists: parse_array(:species_list, :species_list, as: :id),
confidence: parse(:confidence, :confidence),
is_collection_location: parse(:boolean, :is_collection_location,
help: 1),
gps_hidden: parse(:boolean, :gps_hidden, help: 1),
has_images: parse(:boolean, :has_images),
has_location: parse(:boolean, :has_location),
has_name: parse(:boolean, :has_name, help: :min_rank),
has_comments: parse(:boolean, :has_comments, limit: true),
has_specimen: parse(:boolean, :has_specimen),
has_notes: parse(:boolean, :has_notes),
has_notes_fields: parse_array(:string, :has_notes_field, help: 1),
notes_has: parse(:string, :notes_has, help: 1),
comments_has: parse(:string, :comments_has, help: 1),
north: n,
south: s,
east: e,
west: w,
region: parse(:string, :region, help: 1)
}.merge(parse_names_parameters)
end
# rubocop:enable Metrics/AbcSize
# rubocop:enable Metrics/MethodLength
def create_params
parse_create_params!
{
when: parse(:date, :date) || Time.zone.today,
place_name: @location,
lat: @latitude,
long: @longitude,
alt: @altitude,
specimen: @has_specimen,
is_collection_location: parse(:boolean, :is_collection_location,
default: true, help: 1),
gps_hidden: parse(:boolean, :gps_hidden, default: false,
help: 1),
notes: @notes,
thumb_image: @thumbnail,
images: @images,
projects: parse_array(:project, :projects, must_be_member: true) || [],
species_lists: parse_array(:species_list, :species_lists,
must_have_edit_permission: true) || [],
name: @name,
user: @user
}
end
def update_params
parse_update_params!
{
when: parse(:date, :set_date),
place_name: parse(:place_name, :set_location, limit: 1024,
not_blank: true),
lat: @latitude,
long: @longitude,
alt: @altitude,
specimen: parse(:boolean, :set_has_specimen),
is_collection_location: parse(:boolean, :set_is_collection_location,
help: 1),
gps_hidden: parse(:boolean, :gps_hidden, help: 1),
thumb_image: @thumbnail
}
end
def validate_create_params!(params)
make_sure_both_latitude_and_longitude!
make_sure_has_specimen_set!
make_sure_location_provided!
check_for_unknown_location!(params)
end
def after_create(obs)
obs.log(:log_observation_created) if @log
create_specimen_records(obs) if obs.specimen
naming = obs.namings.create(name: @name)
obs.change_vote(naming, @vote, user)
end
def validate_update_params!(params)
check_for_unknown_location!(params)
raise(MissingSetParameters.new) if params.empty? && no_adds_or_removes?
end
def build_setter(params)
lambda do |obs|
must_have_edit_permission!(obs)
update_notes_fields(obs)
obs.update!(params)
update_images(obs)
update_projects(obs)
update_species_lists(obs)
obs.log(:log_observation_updated) if @log
obs
end
end
############################################################################
private
def create_specimen_records(obs)
provide_specimen_defaults(obs)
if @collection_number
CollectionNumber.create!(
user: user,
name: @collectors_name,
number: @collection_number
).add_observation(obs)
end
return unless @herbarium
HerbariumRecord.create!(
herbarium: @herbarium,
user: user,
initial_det: @initial_det,
accession_number: @accession_number
).add_observation(obs)
end
def provide_specimen_defaults(obs)
@herbarium ||= user.personal_herbarium ||
user.create_personal_herbarium
@collectors_name ||= user.legal_name
@initial_det ||= @name.text_name
# Disable cop because we're creating a bunch of instance variables,
# rather than trying to memoize provide_specimen_defaults
# rubocop:disable Naming/MemoizedInstanceVariableName
@accession_number ||= if @collection_number
"#{@collectors_name} #{@collection_number}"
else
"MO #{obs.id}"
end
# rubocop:enable Naming/MemoizedInstanceVariableName
end
def update_notes_fields(obs)
@notes.each do |key, val|
if val.blank?
obs.notes.delete(key)
else
obs.notes[key] = val
end
end
end
def update_images(obs)
@add_images.each do |img|
obs.images << img unless obs.images.include?(img)
end
obs.images.delete(*@remove_images)
return unless @remove_images.include?(obs.thumb_image)
obs.update!(thumb_image: obs.images.first)
end
def update_projects(obs)
return unless @add_to_project || @remove_from_project
raise(MustBeOwner.new(obs)) if obs.user != @user
obs.projects.push(@add_to_project) if @add_to_project
obs.projects.delete(@remove_from_project) if @remove_from_project
end
def update_species_lists(obs)
obs.species_lists.push(@add_to_list) if @add_to_list
obs.species_lists.delete(@remove_from_list) if @remove_from_list
end
# --------------------
# Parsing
# --------------------
def parse_create_params!
@name = parse(:name, :name, default: Name.unknown)
@vote = parse(:float, :vote, default: Vote.maximum_vote)
@log = parse(:boolean, :log, default: true, help: 1)
@notes = parse_notes_fields!
parse_herbarium_and_specimen!
parse_location_and_coordinates!
parse_images_and_pick_thumbnail
end
def parse_update_params!
@log = parse(:boolean, :log, default: true, help: 1)
parse_set_coordinates!
parse_set_images!
parse_set_projects!
parse_set_species_lists!
@notes = parse_notes_fields!(:set)
end
def parse_images_and_pick_thumbnail
@images = parse_array(:image, :images) || []
@thumbnail = parse(:image, :thumbnail) || @images.first
return if !@thumbnail || @images.include?(@thumbnail)
@images.unshift(@thumbnail)
end
def parse_notes_fields!(set = false)
prefix = set ? "set_" : ""
notes = Observation.no_notes
other = parse(:string, :"#{prefix}notes")
notes[Observation.other_notes_key] = other unless other.nil?
params.each do |key, val|
next unless (match = key.to_s.match(/^#{prefix}notes\[(.*)\]$/))
field = parse_notes_field_parameter!(match[1])
notes[field] = val.to_s.strip
ignore_parameter(key)
end
declare_parameter(:"#{prefix}notes[$field]", :string, help: :notes_field)
return notes if set
notes.delete_if { |_key, val| val.blank? }
notes
end
def parse_notes_field_parameter!(str)
keys = User.parse_notes_template(str)
return keys.first.to_sym if keys.length == 1
raise(BadNotesFieldParameter.new(str))
end
def parse_set_coordinates!
@latitude = parse(:latitude, :set_latitude)
@longitude = parse(:longitude, :set_longitude)
@altitude = parse(:altitude, :set_altitude)
return unless @latitude && !@longitude || @longitude && !@latitude
raise(LatLongMustBothBeSet.new)
end
def parse_set_images!
@thumbnail = parse(:image, :set_thumbnail,
must_have_edit_permission: true)
@add_images = parse_array(:image, :add_images,
must_have_edit_permission: true) || []
@remove_images = parse_array(:image, :remove_images) || []
return if !@thumbnail || @add_images.include?(@thumbnail)
@add_images.unshift(@thumbnail)
end
def parse_set_projects!
@add_to_project = parse(:project, :add_to_project,
must_be_member: true)
@remove_from_project = parse(:project, :remove_from_project)
end
def parse_set_species_lists!
@add_to_list = parse(:species_list, :add_to_species_list,
must_have_edit_permission: true)
@remove_from_list = parse(:species_list, :remove_from_species_list,
must_have_edit_permission: true)
end
def parse_location_and_coordinates!
@location = parse(:place_name, :location, limit: 1024)
@latitude = parse(:latitude, :latitude)
@longitude = parse(:longitude, :longitude)
@altitude = parse(:altitude, :altitude)
end
def parse_herbarium_and_specimen!
@herbarium = parse(:herbarium, :herbarium)
@collectors_name = parse(:string, :collectors_name)
@collection_number = parse(:string, :collection_number)
@initial_det = parse(:string, :initial_det, help: 1)
@accession_number = parse(:string, :accession_number, help: 1)
default = @herbarium || @collection_number ||
@accession_number || false
@has_specimen = parse(:boolean, :has_specimen)
@has_specimen = default if @has_specimen.nil?
end
# --------------------
# Validation
# --------------------
def no_adds_or_removes?
@add_images.empty? && @remove_images.empty? &&
!@add_to_project && !@remove_from_project &&
!@add_to_list && !@remove_from_list &&
@notes.empty?
end
def make_sure_both_latitude_and_longitude!
return if @latitude && @longitude || !@longitude && !@latitude
raise(LatLongMustBothBeSet.new)
end
def make_sure_has_specimen_set!
return if @has_specimen
error_class = CanOnlyUseThisFieldIfHasSpecimen
raise(error_class.new(:herbarium)) if @herbarium
raise(error_class.new(:collectors_name)) if @collectors_name
raise(error_class.new(:collection_number)) if @collection_number
raise(error_class.new(:initial_det)) if @initial_det
raise(error_class.new(:accession_number)) if @accession_number
end
def make_sure_location_provided!
raise(MissingParameter.new(:location)) unless @location
end
def check_for_unknown_location!(params)
place = params[:place_name]
return unless place && Location.is_unknown?(place)
params[:place_name] = Location.unknown.name
end
end
# rubocop:enable Metrics/ClassLength
end
Fix Codeclimate issue left over from PR 662
# frozen_string_literal: true
class API
# API for Observation
# rubocop:disable Metrics/ClassLength
class ObservationAPI < ModelAPI
self.model = Observation
self.high_detail_page_length = 10
self.low_detail_page_length = 100
self.put_page_length = 1000
self.delete_page_length = 1000
self.high_detail_includes = [
:comments,
{ images: [:license, :observations] },
:location,
:name,
{ namings: [:name, { votes: :user }] },
:collection_numbers,
{ herbarium_records: :herbarium },
:sequences,
:user
]
self.low_detail_includes = [
:location,
:name,
:user
]
# rubocop:disable Metrics/AbcSize
# rubocop:disable Metrics/MethodLength
def query_params
n, s, e, w = parse_bounding_box!
{
where: sql_id_condition,
created_at: parse_range(:time, :created_at),
updated_at: parse_range(:time, :updated_at),
date: parse_range(:date, :date, help: :when_seen),
users: parse_array(:user, :user, help: :observer),
names: parse_array(:name, :name, as: :id),
locations: parse_array(:location, :location, as: :id),
herbaria: parse_array(:herbarium, :herbarium, as: :id),
herbarium_records: parse_array(:herbarium_record, :herbarium_record,
as: :id),
projects: parse_array(:project, :project, as: :id),
species_lists: parse_array(:species_list, :species_list, as: :id),
confidence: parse(:confidence, :confidence),
is_collection_location: parse(:boolean, :is_collection_location,
help: 1),
gps_hidden: parse(:boolean, :gps_hidden, help: 1),
has_images: parse(:boolean, :has_images),
has_location: parse(:boolean, :has_location),
has_name: parse(:boolean, :has_name, help: :min_rank),
has_comments: parse(:boolean, :has_comments, limit: true),
has_specimen: parse(:boolean, :has_specimen),
has_notes: parse(:boolean, :has_notes),
has_notes_fields: parse_array(:string, :has_notes_field, help: 1),
notes_has: parse(:string, :notes_has, help: 1),
comments_has: parse(:string, :comments_has, help: 1),
north: n,
south: s,
east: e,
west: w,
region: parse(:string, :region, help: 1)
}.merge(parse_names_parameters)
end
# rubocop:enable Metrics/AbcSize
# rubocop:enable Metrics/MethodLength
def create_params
parse_create_params!
{
when: parse(:date, :date) || Time.zone.today,
place_name: @location,
lat: @latitude,
long: @longitude,
alt: @altitude,
specimen: @has_specimen,
is_collection_location: parse(:boolean, :is_collection_location,
default: true, help: 1),
gps_hidden: parse(:boolean, :gps_hidden, default: false,
help: 1),
notes: @notes,
thumb_image: @thumbnail,
images: @images,
projects: parse_array(:project, :projects, must_be_member: true) || [],
species_lists: parse_array(:species_list, :species_lists,
must_have_edit_permission: true) || [],
name: @name,
user: @user
}
end
def update_params
parse_update_params!
{
when: parse(:date, :set_date),
place_name: parse(:place_name, :set_location, limit: 1024,
not_blank: true),
lat: @latitude,
long: @longitude,
alt: @altitude,
specimen: parse(:boolean, :set_has_specimen),
is_collection_location: parse(:boolean, :set_is_collection_location,
help: 1),
gps_hidden: parse(:boolean, :gps_hidden, help: 1),
thumb_image: @thumbnail
}
end
def validate_create_params!(params)
make_sure_both_latitude_and_longitude!
make_sure_has_specimen_set!
make_sure_location_provided!
check_for_unknown_location!(params)
end
def after_create(obs)
obs.log(:log_observation_created) if @log
create_specimen_records(obs) if obs.specimen
naming = obs.namings.create(name: @name)
obs.change_vote(naming, @vote, user)
end
def validate_update_params!(params)
check_for_unknown_location!(params)
raise(MissingSetParameters.new) if params.empty? && no_adds_or_removes?
end
def build_setter(params)
lambda do |obs|
must_have_edit_permission!(obs)
update_notes_fields(obs)
obs.update!(params)
update_images(obs)
update_projects(obs)
update_species_lists(obs)
obs.log(:log_observation_updated) if @log
obs
end
end
############################################################################
private
def create_specimen_records(obs)
provide_specimen_defaults(obs)
if @collection_number
CollectionNumber.create!(
user: user,
name: @collectors_name,
number: @collection_number
).add_observation(obs)
end
return unless @herbarium
HerbariumRecord.create!(
herbarium: @herbarium,
user: user,
initial_det: @initial_det,
accession_number: @accession_number
).add_observation(obs)
end
def provide_specimen_defaults(obs)
@herbarium ||= user.personal_herbarium ||
user.create_personal_herbarium
@collectors_name ||= user.legal_name
@initial_det ||= @name.text_name
# Disable cop because we're creating a bunch of instance variables,
# rather than trying to memoize provide_specimen_defaults
# rubocop:disable Naming/MemoizedInstanceVariableName
@accession_number ||= if @collection_number
"#{@collectors_name} #{@collection_number}"
else
"MO #{obs.id}"
end
# rubocop:enable Naming/MemoizedInstanceVariableName
end
def update_notes_fields(obs)
@notes.each do |key, val|
if val.blank?
obs.notes.delete(key)
else
obs.notes[key] = val
end
end
end
def update_images(obs)
@add_images.each do |img|
obs.images << img unless obs.images.include?(img)
end
obs.images.delete(*@remove_images)
return unless @remove_images.include?(obs.thumb_image)
obs.update!(thumb_image: obs.images.first)
end
def update_projects(obs)
return unless @add_to_project || @remove_from_project
raise(MustBeOwner.new(obs)) if obs.user != @user
obs.projects.push(@add_to_project) if @add_to_project
obs.projects.delete(@remove_from_project) if @remove_from_project
end
def update_species_lists(obs)
obs.species_lists.push(@add_to_list) if @add_to_list
obs.species_lists.delete(@remove_from_list) if @remove_from_list
end
# --------------------
# Parsing
# --------------------
def parse_create_params!
@name = parse(:name, :name, default: Name.unknown)
@vote = parse(:float, :vote, default: Vote.maximum_vote)
@log = parse(:boolean, :log, default: true, help: 1)
@notes = parse_notes_fields!
parse_herbarium_and_specimen!
parse_location_and_coordinates!
parse_images_and_pick_thumbnail
end
def parse_update_params!
@log = parse(:boolean, :log, default: true, help: 1)
parse_set_coordinates!
parse_set_images!
parse_set_projects!
parse_set_species_lists!
@notes = parse_notes_fields!(:set)
end
def parse_images_and_pick_thumbnail
@images = parse_array(:image, :images) || []
@thumbnail = parse(:image, :thumbnail) || @images.first
return if !@thumbnail || @images.include?(@thumbnail)
@images.unshift(@thumbnail)
end
def parse_notes_fields!(set = false)
prefix = set ? "set_" : ""
notes = Observation.no_notes
other = parse(:string, :"#{prefix}notes")
notes[Observation.other_notes_key] = other unless other.nil?
params.each do |key, val|
next unless (match = key.to_s.match(/^#{prefix}notes\[(.*)\]$/))
field = parse_notes_field_parameter!(match[1])
notes[field] = val.to_s.strip
ignore_parameter(key)
end
declare_parameter(:"#{prefix}notes[$field]", :string, help: :notes_field)
return notes if set
notes.delete_if { |_key, val| val.blank? }
notes
end
def parse_notes_field_parameter!(str)
keys = User.parse_notes_template(str)
return keys.first.to_sym if keys.length == 1
raise(BadNotesFieldParameter.new(str))
end
def parse_set_coordinates!
@latitude = parse(:latitude, :set_latitude)
@longitude = parse(:longitude, :set_longitude)
@altitude = parse(:altitude, :set_altitude)
return unless @latitude && !@longitude || @longitude && !@latitude
raise(LatLongMustBothBeSet.new)
end
def parse_set_images!
@thumbnail = parse(:image, :set_thumbnail,
must_have_edit_permission: true)
@add_images = parse_array(:image, :add_images,
must_have_edit_permission: true) || []
@remove_images = parse_array(:image, :remove_images) || []
return if !@thumbnail || @add_images.include?(@thumbnail)
@add_images.unshift(@thumbnail)
end
def parse_set_projects!
@add_to_project = parse(:project, :add_to_project,
must_be_member: true)
@remove_from_project = parse(:project, :remove_from_project)
end
def parse_set_species_lists!
@add_to_list = parse(:species_list, :add_to_species_list,
must_have_edit_permission: true)
@remove_from_list = parse(:species_list, :remove_from_species_list,
must_have_edit_permission: true)
end
def parse_location_and_coordinates!
@location = parse(:place_name, :location, limit: 1024)
@latitude = parse(:latitude, :latitude)
@longitude = parse(:longitude, :longitude)
@altitude = parse(:altitude, :altitude)
end
def parse_herbarium_and_specimen!
@herbarium = parse(:herbarium, :herbarium)
@collectors_name = parse(:string, :collectors_name)
@collection_number = parse(:string, :collection_number)
@initial_det = parse(:string, :initial_det, help: 1)
@accession_number = parse(:string, :accession_number, help: 1)
default = @herbarium || @collection_number ||
@accession_number || false
@has_specimen = parse(:boolean, :has_specimen)
@has_specimen = default if @has_specimen.nil?
end
# --------------------
# Validation
# --------------------
def no_adds_or_removes?
@add_images.empty? && @remove_images.empty? &&
!@add_to_project && !@remove_from_project &&
!@add_to_list && !@remove_from_list &&
@notes.empty?
end
def make_sure_both_latitude_and_longitude!
return if @latitude && @longitude || !@longitude && !@latitude
raise(LatLongMustBothBeSet.new)
end
def make_sure_has_specimen_set!
return if @has_specimen
error_class = CanOnlyUseThisFieldIfHasSpecimen
raise(error_class.new(:herbarium)) if @herbarium
raise(error_class.new(:collectors_name)) if @collectors_name
raise(error_class.new(:collection_number)) if @collection_number
raise(error_class.new(:initial_det)) if @initial_det
raise(error_class.new(:accession_number)) if @accession_number
end
def make_sure_location_provided!
raise(MissingParameter.new(:location)) unless @location
end
def check_for_unknown_location!(params)
place = params[:place_name]
return unless place && Location.is_unknown?(place)
params[:place_name] = Location.unknown.name
end
end
# rubocop:enable Metrics/ClassLength
end
|
require 'set'
require 'logger'
require 'rb-inotify'
require 'aws-sdk'
require 'thread/pool'
require 'concurrent'
require 'thread_safe'
require_relative 's3_write_stream'
module S3reamer
class DirectoryStreamer
DEFAULT_OPTIONS = {
pool_size: 4,
log_level: Logger::INFO,
reader_sleep_interval: 1,
reader_timeout: 10
}
attr_reader :options
def initialize(options = {})
@options = DEFAULT_OPTIONS.merge(options)
@log = Logger.new(STDOUT)
# @log.level = Logger::DEB@options[:log_level]
end
def stream_directory(directory:, bucket:)
file_statuses = ThreadSafe::Hash.new
dir_watch = INotify::Notifier.new
pool = Thread.pool(options[:pool_size])
dir_watch.watch(directory, :open, :close, :recursive) do |e|
filename = e.absolute_name
next unless File.exists?(filename) and !File.directory?(filename)
# If this is an "open" event, we should only process it if we haven't
# already started on this file.
next if e.flags.include?(:open) and file_statuses.include?(filename)
# If this is a "close" event, we should update the status to inform the
# worker thread
if e.flags.include?(:close) and file_statuses.include?(filename)
file_statuses[filename] = :close
next
end
log.info "File opened: #{filename}"
file_statuses[filename] = :open
pool.process {
log.debug "Starting process for: #{filename}"
obj = bucket.object(filename[1..-1])
io = S3reamer::S3WriteStream.new(obj)
log.debug "Initialized S3 streamer"
open(filename) do |file|
stopped = false
size = 0
start_time = Time.now
while file_statuses[filename] == :open &&
(start_time + options[:reader_timeout]) > Time.now
b = file.read
io.write(b)
log.debug "Read #{b.length} bytes"
end
log.info "File closed. Completing S3 upload: #{filename}"
end
io.close
file_statuses.delete(filename)
}
end
dir_watch.run
pool.shutdown
end
private
def log
@log
end
end
end
add sleep
require 'set'
require 'logger'
require 'rb-inotify'
require 'aws-sdk'
require 'thread/pool'
require 'concurrent'
require 'thread_safe'
require_relative 's3_write_stream'
module S3reamer
class DirectoryStreamer
DEFAULT_OPTIONS = {
pool_size: 4,
log_level: Logger::INFO,
reader_sleep_interval: 1,
reader_timeout: 10
}
attr_reader :options
def initialize(options = {})
@options = DEFAULT_OPTIONS.merge(options)
@log = Logger.new(STDOUT)
# @log.level = Logger::DEB@options[:log_level]
end
def stream_directory(directory:, bucket:)
file_statuses = ThreadSafe::Hash.new
dir_watch = INotify::Notifier.new
pool = Thread.pool(options[:pool_size])
dir_watch.watch(directory, :open, :close, :recursive) do |e|
filename = e.absolute_name
next unless File.exists?(filename) and !File.directory?(filename)
# If this is an "open" event, we should only process it if we haven't
# already started on this file.
next if e.flags.include?(:open) and file_statuses.include?(filename)
# If this is a "close" event, we should update the status to inform the
# worker thread
if e.flags.include?(:close) and file_statuses.include?(filename)
file_statuses[filename] = :close
next
end
log.info "File opened: #{filename}"
file_statuses[filename] = :open
pool.process {
log.debug "Starting process for: #{filename}"
obj = bucket.object(filename[1..-1])
io = S3reamer::S3WriteStream.new(obj)
log.debug "Initialized S3 streamer"
open(filename) do |file|
stopped = false
size = 0
start_time = Time.now
while file_statuses[filename] == :open &&
(start_time + options[:reader_timeout]) > Time.now
b = file.read
io.write(b)
log.debug "Read #{b.length} bytes"
sleep options[:reader_sleep_interval]
end
log.info "File closed. Completing S3 upload: #{filename}"
end
io.close
file_statuses.delete(filename)
}
end
dir_watch.run
pool.shutdown
end
private
def log
@log
end
end
end
|
Gem::Specification.new do |s|
s.name = 'voldemort-rb'
s.version = '0.1.8'
s.summary = %{A Ruby client for the Voldemort distributed key value store}
s.description = %Q{voldemort-rb allows you to connect to the Voldemort descentralized key value store.}
s.authors = ["Alejandro Crosa"]
s.email = ["alejandrocrosa@gmail.com"]
s.homepage = "http://github.com/acrosa/voldemort-rb"
s.files = [
"CHANGELOG",
"LICENSE",
"README.md",
"Rakefile",
"lib/voldemort-rb.rb",
"lib/voldemort-serializer.rb",
"lib/connection/connection.rb",
"lib/connection/tcp_connection.rb",
"lib/connection/voldemort_node.rb",
"lib/protos/voldemort-client.pb.rb",
"lib/protos/voldemort-client.proto",
"spec/connection_spec.rb",
"spec/tcp_connection_spec.rb",
"spec/voldemort_node_spec.rb",
"spec/voldemort_client_spec.rb",
"spec/spec_helper.rb"
]
s.require_paths = ["lib"]
s.add_dependency('ruby_protobuf', '>= 0.3.3')
s.add_dependency('nokogiri', '>= 1.4.3.1')
end
bumped version
Gem::Specification.new do |s|
s.name = 'voldemort-rb'
s.version = '0.1.9'
s.summary = %{A Ruby client for the Voldemort distributed key value store}
s.description = %Q{voldemort-rb allows you to connect to the Voldemort descentralized key value store.}
s.authors = ["Alejandro Crosa"]
s.email = ["alejandrocrosa@gmail.com"]
s.homepage = "http://github.com/acrosa/voldemort-rb"
s.files = [
"CHANGELOG",
"LICENSE",
"README.md",
"Rakefile",
"lib/voldemort-rb.rb",
"lib/voldemort-serializer.rb",
"lib/connection/connection.rb",
"lib/connection/tcp_connection.rb",
"lib/connection/voldemort_node.rb",
"lib/protos/voldemort-client.pb.rb",
"lib/protos/voldemort-client.proto",
"spec/connection_spec.rb",
"spec/tcp_connection_spec.rb",
"spec/voldemort_node_spec.rb",
"spec/voldemort_client_spec.rb",
"spec/spec_helper.rb"
]
s.require_paths = ["lib"]
s.add_dependency('ruby_protobuf', '>= 0.3.3')
s.add_dependency('nokogiri', '>= 1.4.3.1')
end
|
Gem::Specification.new do |s|
s.name = 'voldemort-rb'
s.version = '0.1.1'
s.summary = %{A Ruby client for the Voldemort distributed key value store}
s.description = %Q{voldemort-rb allows you to connect to the Voldemort descentralized key value store.}
s.authors = ["Alejandro Crosa"]
s.email = ["alejandrocrosa@gmail.com"]
s.homepage = "http://github.com/acrosa/voldemort-rb"
s.files = [
"CHANGELOG",
"MIT-LICENSE",
"README.md",
"Rakefile",
"lib/voldemort-rb.rb",
"lib/connection/connection.rb",
"lib/connection/tcp_connection.rb",
"lib/connection/voldemort_node.rb",
"lib/protos/voldemort-client.pb.rb",
"lib/protos/voldemort-client.proto",
"spec/connection_spec.rb",
"spec/tcp_connection_spec.rb",
"spec/voldemort_node_spec.rb",
"spec/voldemort_client_spec.rb",
"spec/spec_helper.rb"
]
s.require_paths = ["lib"]
end
bumped version due to critical fix
Gem::Specification.new do |s|
s.name = 'voldemort-rb'
s.version = '0.1.2'
s.summary = %{A Ruby client for the Voldemort distributed key value store}
s.description = %Q{voldemort-rb allows you to connect to the Voldemort descentralized key value store.}
s.authors = ["Alejandro Crosa"]
s.email = ["alejandrocrosa@gmail.com"]
s.homepage = "http://github.com/acrosa/voldemort-rb"
s.files = [
"CHANGELOG",
"MIT-LICENSE",
"README.md",
"Rakefile",
"lib/voldemort-rb.rb",
"lib/connection/connection.rb",
"lib/connection/tcp_connection.rb",
"lib/connection/voldemort_node.rb",
"lib/protos/voldemort-client.pb.rb",
"lib/protos/voldemort-client.proto",
"spec/connection_spec.rb",
"spec/tcp_connection_spec.rb",
"spec/voldemort_node_spec.rb",
"spec/voldemort_client_spec.rb",
"spec/spec_helper.rb"
]
s.require_paths = ["lib"]
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'direct/api/v5/version'
Gem::Specification.new do |spec|
spec.name = 'direct-api-v5'
spec.version = Direct::API::V5::VERSION
spec.authors = ['Khrebtov Roman']
spec.email = ['roman@alltmb.ru']
spec.summary = 'Client for Yandex Direct API V5'
spec.description = 'Client for Yandex Direct API V5'
spec.homepage = 'https://github.com/Hrom512/direct-api-v5'
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
spec.add_development_dependency 'bundler'
spec.add_development_dependency 'rake'
end
fix gemspec
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'direct/api/v5/version'
Gem::Specification.new do |spec|
spec.name = 'direct-api-v5'
spec.version = Direct::API::V5::VERSION
spec.author = 'Khrebtov Roman'
spec.email = 'roman@alltmb.ru'
spec.summary = 'Yandex Direct API V5'
spec.description = 'Ruby client for Yandex Direct API V5'
spec.homepage = 'https://github.com/Hrom512/direct-api-v5'
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
spec.required_ruby_version = '>= 2.0'
spec.add_development_dependency 'bundler', '~> 1.6'
spec.add_development_dependency 'rake', '~> 10.1'
end
|
add array.rb
# -*- coding: utf-8 -*-
require 'date'
# 同じアイテムは各配列を同じにする
titles = ["本のタイトル1", "本のタイトル2", ]
authors = ["著者1", "著者2", ]
yomies = ["ちょしゃいち", "ちょしゃに", ]
publishers = ["出版社1", "出版社2", ]
pages = ["100", "200", ]
prices = ["1000", "2000", ]
purchase_prices = ["1080", "2160", ]
isbn_10s = ["1234567890", "1234567890", ]
isbn_13s = ["1234567890123", "1234567890123", ]
sizes = ["size1", "size2", ]
publish_dates = [ Date.new( 2005, 1, 25 ), Date.new( 2006, 2, 28 ), ]
purchase_dates = [ Date.new( 2015, 1, 25 ), Date.new( 2015, 2, 28 ), ]
# データ表示
titles.size.times { |i|
puts "------------------"
puts "書籍名: " + titles[i]
puts "著者名: " + authors[i]
puts "よみがな: " + yomies[i]
puts "出版社: " + publishers[i]
puts "ページ数: " + pages[i]
puts "販売価格: " + prices[i].to_s + "ページ"
puts "購入費用: " + purchase_prices[i].to_s + "円"
puts "ISBN_10: " + isbn_10s[i]
puts "ISBN_13: " + isbn_13s[i]
puts "サイズ: " + sizes[i]
puts "発刊日: " + publish_dates[i].to_s
puts "購入日: " + purchase_dates[i].to_s
}
|
class AuthController < ApplicationController
def sign_out_action
sign_out :user
respond_to do |format|
format.json { render json: true }
format.html { redirect_to(root_url) }
end
end
def sign_in_action
return error! "Missing parameters", 400 unless [:email, :password].none? { |x| params[x].blank? }
key = params[:email].include?('@') ? :email : :name
user = User.where("lower(#{key}) = ?", params[:email].downcase).first
if user && user.valid_password?(params[:password])
sign_in :user, user
render json: true
else
error! "Username or password incorrect", 401
end
end
def sign_up_action
return error! "Missing parameters", 400 unless [:username, :email, :password].none? { |x| params[x].blank? }
return error! "User with that email already exists", 409 if User.exists?(['lower(email) = ?', params[:email].downcase])
return error! "User with that name already exists", 409 if User.exists?(['lower(name) = ?', params[:username].downcase])
user = User.new({
name: params[:username],
email: params[:email],
password: params[:password]
})
user.save!
sign_in user
render json: user
end
end
resolve rubocop offenses for auth_controller.rb
class AuthController < ApplicationController
def sign_out_action
sign_out :user
respond_to do |format|
format.json { render json: true }
format.html { redirect_to(root_url) }
end
end
def sign_in_action
unless %i(email password).none? { |x| params[x].blank? }
return error! 'Missing parameters', 400
end
key = params[:email].include?('@') ? :email : :name
user = User.where("lower(#{key}) = ?", params[:email].downcase).first
if user && user.valid_password?(params[:password])
sign_in :user, user
render json: true
else
error! 'Username or password incorrect', 401
end
end
def sign_up_action
unless %i(username email password).none? { |x| params[x].blank? }
return error! 'Missing parameters', 400
end
return error! 'User with that email already exists', 409 if User.exists?(
['lower(email) = ?', params[:email].downcase])
return error! 'User with that name already exists', 409 if User.exists?(
['lower(name) = ?', params[:username].downcase])
user = User.new(
name: params[:username],
email: params[:email],
password: params[:password]
)
user.save!
sign_in user
render json: user
end
end
|
class BotsController < ApplicationController
before_action :set_bot, only: [:show, :edit, :update, :destroy]
before_action :authenticate_user!, except: [:index, :show]
before_action :check_bot_ownership, only: [:edit, :update, :destroy]
# GET /bots
# GET /bots.json
def index
@bots = Bot.all
end
# GET /bots/1
# GET /bots/1.json
def show
end
# GET /bots/new
def new
@bot = Bot.new
end
# GET /bots/1/edit
def edit
@eligible_collaborators = User.where.not(:id => User.with_role(:owner, @bot).pluck(:id))
.where.not(:id => User.with_role(:collaborator, @bot).pluck(:id))
.map{ |u| [u.username, u.id] }
end
# POST /bots
# POST /bots.json
def create
@bot = Bot.new(bot_params)
respond_to do |format|
if @bot.save
current_user.add_role :owner, @bot
format.html { redirect_to bots_path, flash: { success: 'Bot was successfully created.' } }
format.json { render :show, status: :created, location: @bot }
else
format.html { render :new }
format.json { render json: @bot.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /bots/1
# PATCH/PUT /bots/1.json
def update
respond_to do |format|
if @bot.update(bot_params)
format.html { redirect_to bots_path, flash: { success: 'Bot was successfully updated.' } }
format.json { render :show, status: :ok, location: @bot }
else
format.html { render :edit }
format.json { render json: @bot.errors, status: :unprocessable_entity }
end
end
end
# DELETE /bots/1
# DELETE /bots/1.json
def destroy
@bot.destroy
respond_to do |format|
format.html { redirect_to bots_path, flash: { success: 'Bot was successfully destroyed.' } }
format.json { head :no_content }
end
end
# POST /bots/1/collaborators
# POST /bots/1/collaborators.json
def add_collaborator
bot = Bot.find(params[:bot])
unless User.exists?(id: params[:collaborator])
respond_to do |format|
format.html { redirect_to edit_bot_path(bot), flash: { error: 'That user ID does not exist.' } }
format.json { render json: 'That user ID does not exist.', status: :not_found }
end
return
end
collaborator = User.find(params[:collaborator])
if collaborator.has_role?(:collaborator, bot)
respond_to do |format|
format.html { redirect_to edit_bot_path(bot), flash: { error: 'That user is already a collaborator.' } }
format.json { render json: 'That user is already a collaborator.', status: :unprocessable_entity }
end
return
end
collaborator.add_role :collaborator, bot
respond_to do |format|
format.html { redirect_to edit_bot_path(bot), flash: { success: 'Collaborator was successfully added.' } }
format.json { head :no_content }
end
end
def remove_collaborator
bot = Bot.find(params[:bot])
collaborator = User.find(params[:collaborator])
unless collaborator.has_role?(:collaborator, bot)
respond_to do |format|
format.html { redirect_to edit_bot_path(bot), flash: { error: 'That user is not a collaborator.' } }
format.json { render json: 'That user is not a collaborator.', status: :unprocessable_entity }
end
return
end
collaborator.remove_role :collaborator, bot
respond_to do |format|
format.html { redirect_to edit_bot_path(bot), flash: { success: 'Collaborator was successfully removed.' } }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_bot
@bot = Bot.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def bot_params
params.require(:bot).permit(:name)
end
def check_bot_ownership
render :status => :forbidden, :plain => "You don't own this bot" and return unless current_user.has_role? :owner, @bot
end
end
sort users alphabetically in the picker
class BotsController < ApplicationController
before_action :set_bot, only: [:show, :edit, :update, :destroy]
before_action :authenticate_user!, except: [:index, :show]
before_action :check_bot_ownership, only: [:edit, :update, :destroy]
# GET /bots
# GET /bots.json
def index
@bots = Bot.all
end
# GET /bots/1
# GET /bots/1.json
def show
end
# GET /bots/new
def new
@bot = Bot.new
end
# GET /bots/1/edit
def edit
@eligible_collaborators = User.where.not(:id => User.with_role(:owner, @bot).pluck(:id))
.where.not(:id => User.with_role(:collaborator, @bot).pluck(:id))
.order(:username)
.map{ |u| [u.username, u.id] }
end
# POST /bots
# POST /bots.json
def create
@bot = Bot.new(bot_params)
respond_to do |format|
if @bot.save
current_user.add_role :owner, @bot
format.html { redirect_to bots_path, flash: { success: 'Bot was successfully created.' } }
format.json { render :show, status: :created, location: @bot }
else
format.html { render :new }
format.json { render json: @bot.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /bots/1
# PATCH/PUT /bots/1.json
def update
respond_to do |format|
if @bot.update(bot_params)
format.html { redirect_to bots_path, flash: { success: 'Bot was successfully updated.' } }
format.json { render :show, status: :ok, location: @bot }
else
format.html { render :edit }
format.json { render json: @bot.errors, status: :unprocessable_entity }
end
end
end
# DELETE /bots/1
# DELETE /bots/1.json
def destroy
@bot.destroy
respond_to do |format|
format.html { redirect_to bots_path, flash: { success: 'Bot was successfully destroyed.' } }
format.json { head :no_content }
end
end
# POST /bots/1/collaborators
# POST /bots/1/collaborators.json
def add_collaborator
bot = Bot.find(params[:bot])
unless User.exists?(id: params[:collaborator])
respond_to do |format|
format.html { redirect_to edit_bot_path(bot), flash: { error: 'That user ID does not exist.' } }
format.json { render json: 'That user ID does not exist.', status: :not_found }
end
return
end
collaborator = User.find(params[:collaborator])
if collaborator.has_role?(:collaborator, bot)
respond_to do |format|
format.html { redirect_to edit_bot_path(bot), flash: { error: 'That user is already a collaborator.' } }
format.json { render json: 'That user is already a collaborator.', status: :unprocessable_entity }
end
return
end
collaborator.add_role :collaborator, bot
respond_to do |format|
format.html { redirect_to edit_bot_path(bot), flash: { success: 'Collaborator was successfully added.' } }
format.json { head :no_content }
end
end
def remove_collaborator
bot = Bot.find(params[:bot])
collaborator = User.find(params[:collaborator])
unless collaborator.has_role?(:collaborator, bot)
respond_to do |format|
format.html { redirect_to edit_bot_path(bot), flash: { error: 'That user is not a collaborator.' } }
format.json { render json: 'That user is not a collaborator.', status: :unprocessable_entity }
end
return
end
collaborator.remove_role :collaborator, bot
respond_to do |format|
format.html { redirect_to edit_bot_path(bot), flash: { success: 'Collaborator was successfully removed.' } }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_bot
@bot = Bot.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def bot_params
params.require(:bot).permit(:name)
end
def check_bot_ownership
render :status => :forbidden, :plain => "You don't own this bot" and return unless current_user.has_role? :owner, @bot
end
end
|
class CarsController < ApplicationController
def index
@deals = Car.where(hidden: false).select do |car|
car.ed_price - car.cl_price > 1000
end
end
def hide
car = Car.find_by(id: params[:id])
car.hidden = true
car.save
redirect_to "/cars"
end
end
not displaying cars w/ cl_price < 100
class CarsController < ApplicationController
def index
@deals = Car.where(hidden: false).select do |car|
car.ed_price - car.cl_price > 1000 && car.cl_price > 100
end
end
def hide
car = Car.find_by(id: params[:id])
car.hidden = true
car.save
redirect_to "/cars"
end
end |
# -*- coding: utf-8 -*-
class ChatController < WebsocketRails::BaseController
def initialize_session
# perform application setup here
#controller_store[:message_count] = 0
logger.debug "start chat session"
end
def client_connected
logger.debug "user connected"
#過去ログ表示
# Message.all.asc(:created_at).limit(500).each do |message|
# obj = { room_id: message.room_id, user_id: message.user_id, name: message.user_name, body: message.body }
# WebsocketRails[message.room_id].trigger :new_message, obj
# end
end
def enter_room
puts "enter room: #{message}"
# 過去ログ表示
Message.where(room_id: message[:room_id]).limit(500).each do |log|
send_message :new_message, log
end
end
def new_message
puts "called new_message: #{message}"
parse(message) unless message[:room_id] == '0'
# ログ記録
Message.create message
end
def room_message
puts "called room_message"
parse(message)
WebsocketRails[message[:room_id]].trigger(:room_message, message)
end
def private_message
at_user_name = message[:body].match(/\A@(.+)\s/)
at_user = User.find_by!(name: at_user_name[1])
WebsocketRails[at_user.channel_key].trigger :new_message, message
parse message
send_message :new_message, message
rescue ActiveRecord::RecordNotFound => e
send_message :new_message, {user_name: "system", body: "ユーザーが存在しません"}
end
private
def room(room_id)
unless message[:room_id] == '0'
@room ||= Room.find(room_id)
return @room
else
@room = nil
end
end
def parse(message)
unless message[:room_id] == '0'
message[:body] = room(message[:room_id]).parse(message[:body], room(message[:room_id]).dice)
else # ロビーはダイスなし
message[:body] = Obscenity.sanitize(message[:body])
end
return message
end
end
ログ記録されなくなっていたのを修正
# -*- coding: utf-8 -*-
class ChatController < WebsocketRails::BaseController
def initialize_session
# perform application setup here
#controller_store[:message_count] = 0
logger.debug "start chat session"
end
def client_connected
logger.debug "user connected"
#過去ログ表示
# Message.all.asc(:created_at).limit(500).each do |message|
# obj = { room_id: message.room_id, user_id: message.user_id, name: message.user_name, body: message.body }
# WebsocketRails[message.room_id].trigger :new_message, obj
# end
end
def enter_room
puts "enter room: #{message}"
# 過去ログ表示
Message.where(room_id: message[:room_id]).limit(500).each do |log|
send_message :new_message, log
end
end
def new_message
puts "called new_message: #{message}"
parse(message) unless message[:room_id] == '0'
# ログ記録
Message.create message
end
def room_message
puts "called room_message"
parse(message)
# ログ記録
Message.create message
WebsocketRails[message[:room_id]].trigger(:room_message, message)
end
def private_message
at_user_name = message[:body].match(/\A@(.+)\s/)
at_user = User.find_by!(name: at_user_name[1])
WebsocketRails[at_user.channel_key].trigger :new_message, message
parse message
send_message :new_message, message
rescue ActiveRecord::RecordNotFound => e
send_message :new_message, {user_name: "system", body: "ユーザーが存在しません"}
end
private
def room(room_id)
unless message[:room_id] == '0'
@room ||= Room.find(room_id)
return @room
else
@room = nil
end
end
def parse(message)
unless message[:room_id] == '0'
message[:body] = room(message[:room_id]).parse(message[:body], room(message[:room_id]).dice)
else # ロビーはダイスなし
message[:body] = Obscenity.sanitize(message[:body])
end
return message
end
end
|
class CrudController < ApplicationController
before_filter :setup, except: :autocomplete
private
def setup
if params[:associacao]
@crud_associacao = Module.const_get("#{params[:model].to_s.singularize}_crud".camelize)
@model = Module.const_get(params[:model].camelize).find(params[:id]).send(params[:associacao])
c_helper = Module.const_get(params[:model].camelize).reflect_on_association(params[:associacao]).class_name
@crud_helper = Module.const_get("#{c_helper}Crud") unless params[:render] == "modal" and params[:action] == "new"
@url = crud_associacao_models_path(model: params[:model], id: params[:id], associacao: params[:associacao], page: params[:page], q: params[:q])
@clean_url = crud_associacao_models_path(model: params[:model], id: params[:id], associacao: params[:associacao])
@model_permission = c_helper.constantize
@id = params[:associacao_id] if params[:associacao_id]
else
@model = Module.const_get(params[:model].camelize)
@model_permission = @model
@crud_helper = Module.const_get("#{params[:model]}_crud".camelize) unless params[:render] == "modal" and params[:action] == "new"
@url = crud_models_path(model: params[:model], page: params[:page], q: params[:q])
@clean_url = crud_models_path(model: params[:model])
@id = params[:id] if params[:id]
end
end
public
def index
authorize! :read, @model_permission if respond_to?(:current_usuario)
if params[:scope].present?
@q = @model.send(params[:scope]).search(params[:q])
else
@q = @model.search(params[:q])
end
if @q.sorts.empty?
if "#{@crud_helper.order_field}".include?("desc") or "#{@crud_helper.order_field}".include?("asc")
@q.sorts = "#{@crud_helper.order_field}"
else
@q.sorts = "#{@crud_helper.order_field} asc"
end
end
if respond_to?(:current_usuario)
@records = @q.result.accessible_by(current_ability, :read).page(params[:page]).per(@crud_helper.per_page)
else
@records = @q.result.page(params[:page]).per(@crud_helper.per_page)
end
@titulo = @model.name.pluralize
render partial: 'records' if request.respond_to?(:wiselinks_partial?) && request.wiselinks_partial?
end
def new
if params[:render] == "modal"
if @model.reflect_on_association(params[:attribute].to_s).present?
@model = @model.reflect_on_association(params[:attribute].to_s).class_name.constantize
else
@model = params[:attribute].to_s.camelcase.constantize
end
@url = crud_models_path(model: @model.name.underscore)
@clean_url = @url
@model_permission = @model
@crud_helper = Module.const_get("#{@model}Crud".camelize)
end
authorize! :new, @model_permission if respond_to?(:current_usuario)
@record = @model.new
end
def edit
@record = @model.find(@id)
authorize! :edit, @record if respond_to?(:current_usuario)
end
def show
@record = @model.find(@id)
authorize! :read, @record if respond_to?(:current_usuario)
end
def action
@record = @model.find(@id)
authorize! :create_or_update, @record if respond_to?(:current_usuario)
if @model.method_defined?(params[:acao])
if @record.send(params[:acao])
flash.now[:success] = "Ação #{params[:acao]} efetuada com sucesso."
else
flash.now[:error] = "Erro ao tentar executar a ação #{params[:acao]}."
end
index
else
@titulo = @record.to_s
@texto = params[:acao]
render partial: "/#{@model.name.underscore.pluralize}/#{params[:acao]}" if request.respond_to?(:wiselinks_partial?) && request.wiselinks_partial?
end
end
def create
@saved = false
if @id
@record = @model.find(@id)
authorize! :update, @record if respond_to?(:current_usuario)
@saved = @record.update(params_permitt)
else
@record = @model.new(params_permitt)
authorize! :create, @model_permission if respond_to?(:current_usuario)
@saved = @record.save
end
respond_to do |format|
if @saved
flash[:success] = params[:id].present? ? "Cadastro alterado com sucesso." : "Cadastro efetuado com sucesso."
format.html { redirect_to @url }
unless params[:render] == 'modal'
format.js { render action: :index}
else
format.js
end
else
action = (params[:id]) ? :edit : :new
format.html { render action: action }
format.js
end
end
end
def destroy
@record = @model.find(@id)
authorize! :destroy, @record if respond_to?(:current_usuario)
if @record.destroy
respond_to do |format|
flash[:success] = "Cadastro removido com sucesso."
format.html { redirect_to @url }
format.js { render action: :index }
end
else
respond_to do |format|
flash[:error] = @record.errors.full_messages.join(", ")
format.html { redirect_to @url }
format.js { render action: :index }
end
end
end
def query
authorize! :read, @model_permission if respond_to?(:current_usuario)
@resource = @model
@q = @resource.search(params[:q])
@q.sorts = 'updated_at desc' if @q.sorts.empty?
if respond_to?(:current_usuario)
results = @q.result.accessible_by(current_ability).page(params[:page])
else
results = @q.result.page(params[:page])
end
instance_variable_set("@#{params[:var]}", results)
if request.respond_to?(:wiselinks_partial?) && request.wiselinks_partial?
render :partial => params[:partial]
else
render :index, controller: request[:controller]
end
end
def autocomplete
@model = Module.const_get(params[:model].camelize)
authorize! :read, @model if respond_to?(:current_usuario)
parametros = {}
parametros["#{params[:campo]}_#{params[:tipo]}"] = params[:term]
@q = @model.search(parametros)
@q.sorts = 'updated_at desc' if @q.sorts.empty?
if respond_to?(:current_usuario)
results = @q.result.accessible_by(current_ability).page(params[:page])
else
results = @q.result.page(params[:page])
end
method_label = params[:label]
render json: results.map {|result| {id: result.id, label: result.send(method_label), value: result.send(method_label)} }
end
def listing
authorize! :read, @model_permission if respond_to?(:current_usuario)
@q = @model.search(params[:q])
if respond_to?(:current_usuario)
@records = @q.result.accessible_by(current_ability)
else
@records = @q.result
end
report_name = "Listagem de #{@crud_helper.title} #{DateTime.now.strftime('%Y%m%d')}"
respond_to do |format|
format.xls {headers["Content-Disposition"] = "attachment; filename=#{report_name}.xls"}
format.pdf do
pdf = WickedPdf.new.pdf_from_string(
render_to_string('crud/listing.pdf.erb'),
encoding: 'UTF-8',
page_size: 'A4',
show_as_html: params[:debug],
margin: { top: 20, bottom: 20 }
)
send_data(pdf, filename: "#{report_name}.pdf", type: "application/pdf", disposition: "inline")
end
format.html
end
end
private
def params_permitt
params.require(@model.name.underscore.to_sym).permit(fields_model)
end
def fields_model
fields = []
@crud_helper.form_fields.each do |field|
if field[:sf].present? && field[:sf][:grupo].present?
fields << permitt_group(fields, field[:attribute], field[:sf][:fields],@model)
else
if @model.reflect_on_association(field[:attribute])
if @model.reflect_on_association(field[:attribute]).macro == :belongs_to
fields << @model.reflect_on_association(field[:attribute]).foreign_key
else
fields << {"#{field[:attribute].to_s.singularize}_ids".to_sym => []}
end
elsif @model.columns_hash[field[:attribute].to_s]
fields << field[:attribute]
end
end
end
#TODO - Deprecated
@crud_helper.form_groups.each do |key, groups|
fields << permitt_group(fields, key, groups[:fields],@model)
end
#Fim - Deprecated
if @model.respond_to?(:params_permitt)
@model.params_permitt.each do |field|
fields << field
end
end
fields
end
def permitt_group(fields, key, groups,mod)
chave = "#{key}_attributes"
group = {chave => [:id, :_destroy]}
groups.each do |field|
if field[:sf].present? && field[:sf][:grupo].present?
group[chave] << permitt_group(fields, field[:attribute], field[:sf][:fields], mod.reflect_on_association(key.to_s).class_name.constantize)
else
modelo = mod.reflect_on_association(key.to_s).class_name.constantize
if modelo.reflect_on_association(field[:attribute])
if modelo.reflect_on_association(field[:attribute]).macro == :belongs_to
group[chave] << "#{field[:attribute]}_id".to_sym
else
group[chave] << {"#{field[:attribute].to_s.singularize}_ids".to_sym => []}
end
elsif (modelo.columns_hash[field[:attribute].to_s] || (modelo.respond_to?(:params_permitt) && modelo.params_permitt.include?(field[:attribute].to_sym)))
group[chave] << field[:attribute]
end
end
end
group
end
end
valida acoes a serem chamadas no controller
class CrudController < ApplicationController
before_filter :setup, except: :autocomplete
def index
authorize! :read, @model_permission if respond_to?(:current_usuario)
if params[:scope].present? && valid_method?(params[:scope])
@q = @model.send(params[:scope]).search(params[:q])
else
@q = @model.search(params[:q])
end
if @q.sorts.empty?
if "#{@crud_helper.order_field}".include?("desc") or "#{@crud_helper.order_field}".include?("asc")
@q.sorts = "#{@crud_helper.order_field}"
else
@q.sorts = "#{@crud_helper.order_field} asc"
end
end
if respond_to?(:current_usuario)
@records = @q.result.accessible_by(current_ability, :read).page(params[:page]).per(@crud_helper.per_page)
else
@records = @q.result.page(params[:page]).per(@crud_helper.per_page)
end
@titulo = @model.name.pluralize
render partial: 'records' if request.respond_to?(:wiselinks_partial?) && request.wiselinks_partial?
end
def new
if params[:render] == "modal"
if @model.reflect_on_association(params[:attribute].to_s).present?
@model = @model.reflect_on_association(params[:attribute].to_s).class_name.constantize
else
@model = params[:attribute].to_s.camelcase.constantize
end
@url = crud_models_path(model: @model.name.underscore)
@clean_url = @url
@model_permission = @model
@crud_helper = Module.const_get("#{@model}Crud".camelize)
end
authorize! :new, @model_permission if respond_to?(:current_usuario)
@record = @model.new
end
def edit
@record = @model.find(@id)
authorize! :edit, @record if respond_to?(:current_usuario)
end
def show
@record = @model.find(@id)
authorize! :read, @record if respond_to?(:current_usuario)
end
def action
@record = @model.find(@id)
authorize! :create_or_update, @record if respond_to?(:current_usuario)
if valid_instance_method?(params[:acao])
if @record.send(params[:acao])
flash.now[:success] = "Ação #{params[:acao]} efetuada com sucesso."
else
flash.now[:error] = "Erro ao tentar executar a ação #{params[:acao]}."
end
index
else
@titulo = @record.to_s
@texto = params[:acao]
render partial: "/#{@model.name.underscore.pluralize}/#{params[:acao]}" if request.respond_to?(:wiselinks_partial?) && request.wiselinks_partial?
end
end
def create
@saved = false
if @id
@record = @model.find(@id)
authorize! :update, @record if respond_to?(:current_usuario)
@saved = @record.update(params_permitt)
else
@record = @model.new(params_permitt)
authorize! :create, @model_permission if respond_to?(:current_usuario)
@saved = @record.save
end
respond_to do |format|
if @saved
flash[:success] = params[:id].present? ? "Cadastro alterado com sucesso." : "Cadastro efetuado com sucesso."
format.html { redirect_to @url }
unless params[:render] == 'modal'
format.js { render action: :index}
else
format.js
end
else
action = (params[:id]) ? :edit : :new
format.html { render action: action }
format.js
end
end
end
def destroy
@record = @model.find(@id)
authorize! :destroy, @record if respond_to?(:current_usuario)
if @record.destroy
respond_to do |format|
flash[:success] = "Cadastro removido com sucesso."
format.html { redirect_to @url }
format.js { render action: :index }
end
else
respond_to do |format|
flash[:error] = @record.errors.full_messages.join(", ")
format.html { redirect_to @url }
format.js { render action: :index }
end
end
end
def query
authorize! :read, @model_permission if respond_to?(:current_usuario)
@resource = @model
@q = @resource.search(params[:q])
@q.sorts = 'updated_at desc' if @q.sorts.empty?
if respond_to?(:current_usuario)
results = @q.result.accessible_by(current_ability).page(params[:page])
else
results = @q.result.page(params[:page])
end
instance_variable_set("@#{params[:var]}", results)
if request.respond_to?(:wiselinks_partial?) && request.wiselinks_partial?
render :partial => params[:partial]
else
render :index, controller: request[:controller]
end
end
def autocomplete
@model = Module.const_get(params[:model].camelize)
authorize! :read, @model if respond_to?(:current_usuario)
parametros = {}
parametros["#{params[:campo]}_#{params[:tipo]}"] = params[:term]
@q = @model.search(parametros)
@q.sorts = 'updated_at desc' if @q.sorts.empty?
if respond_to?(:current_usuario)
results = @q.result.accessible_by(current_ability).page(params[:page])
else
results = @q.result.page(params[:page])
end
if valid_instance_method?(params[:label])
method_label = params[:label]
else
raise "Ação inválida"
end
render json: results.map {|result| {id: result.id, label: result.send(method_label), value: result.send(method_label)} }
end
def listing
authorize! :read, @model_permission if respond_to?(:current_usuario)
@q = @model.search(params[:q])
if respond_to?(:current_usuario)
@records = @q.result.accessible_by(current_ability)
else
@records = @q.result
end
report_name = "Listagem de #{@crud_helper.title} #{DateTime.now.strftime('%Y%m%d')}"
respond_to do |format|
format.xls {headers["Content-Disposition"] = "attachment; filename=#{report_name}.xls"}
format.pdf do
pdf = WickedPdf.new.pdf_from_string(
render_to_string('crud/listing.pdf.erb'),
encoding: 'UTF-8',
page_size: 'A4',
show_as_html: params[:debug],
margin: { top: 20, bottom: 20 }
)
send_data(pdf, filename: "#{report_name}.pdf", type: "application/pdf", disposition: "inline")
end
format.html
end
end
private
def setup
if params[:associacao]
@crud_associacao = Module.const_get("#{params[:model].to_s.singularize}_crud".camelize)
if Module.const_get(params[:model].camelize).reflect_on_association(params[:associacao])
@model = Module.const_get(params[:model].camelize).find(params[:id]).send(params[:associacao])
else
raise "Ação inválida"
end
c_helper = Module.const_get(params[:model].camelize).reflect_on_association(params[:associacao]).class_name
@crud_helper = Module.const_get("#{c_helper}Crud") unless params[:render] == "modal" and params[:action] == "new"
@url = crud_associacao_models_path(model: params[:model], id: params[:id], associacao: params[:associacao], page: params[:page], q: params[:q])
@clean_url = crud_associacao_models_path(model: params[:model], id: params[:id], associacao: params[:associacao])
@model_permission = c_helper.constantize
@id = params[:associacao_id] if params[:associacao_id]
else
@model = Module.const_get(params[:model].camelize)
@model_permission = @model
@crud_helper = Module.const_get("#{params[:model]}_crud".camelize) unless params[:render] == "modal" and params[:action] == "new"
@url = crud_models_path(model: params[:model], page: params[:page], q: params[:q])
@clean_url = crud_models_path(model: params[:model])
@id = params[:id] if params[:id]
end
end
def params_permitt
params.require(@model.name.underscore.to_sym).permit(fields_model)
end
def fields_model
fields = []
@crud_helper.form_fields.each do |field|
if field[:sf].present? && field[:sf][:grupo].present?
fields << permitt_group(fields, field[:attribute], field[:sf][:fields],@model)
else
if @model.reflect_on_association(field[:attribute])
if @model.reflect_on_association(field[:attribute]).macro == :belongs_to
fields << @model.reflect_on_association(field[:attribute]).foreign_key
else
fields << {"#{field[:attribute].to_s.singularize}_ids".to_sym => []}
end
elsif @model.columns_hash[field[:attribute].to_s]
fields << field[:attribute]
end
end
end
#TODO - Deprecated
@crud_helper.form_groups.each do |key, groups|
fields << permitt_group(fields, key, groups[:fields],@model)
end
#Fim - Deprecated
if @model.respond_to?(:params_permitt)
@model.params_permitt.each do |field|
fields << field
end
end
fields
end
def permitt_group(fields, key, groups,mod)
chave = "#{key}_attributes"
group = {chave => [:id, :_destroy]}
groups.each do |field|
if field[:sf].present? && field[:sf][:grupo].present?
group[chave] << permitt_group(fields, field[:attribute], field[:sf][:fields], mod.reflect_on_association(key.to_s).class_name.constantize)
else
modelo = mod.reflect_on_association(key.to_s).class_name.constantize
if modelo.reflect_on_association(field[:attribute])
if modelo.reflect_on_association(field[:attribute]).macro == :belongs_to
group[chave] << "#{field[:attribute]}_id".to_sym
else
group[chave] << {"#{field[:attribute].to_s.singularize}_ids".to_sym => []}
end
elsif (modelo.columns_hash[field[:attribute].to_s] || (modelo.respond_to?(:params_permitt) && modelo.params_permitt.include?(field[:attribute].to_sym)))
group[chave] << field[:attribute]
end
end
end
group
end
def valid_method?(method)
list_methods = []
@model.ancestors.each do |m|
list_methods << m.methods(false).reject{ |m| /^_/ =~ m.to_s }
break if m.superclass.to_s == "ActiveRecord::Base"
end
list_methods.flatten.include? method.to_sym
end
def valid_instance_method?(method)
list_methods = []
@model.ancestors.each do |m|
list_methods << m.instance_methods(false).reject{ |m| /^_/ =~ m.to_s }
break if m.superclass.to_s == "ActiveRecord::Base"
end
list_methods.flatten.include? method.to_sym
end
end
|
class DataController < ApplicationController
respond_to :csv
before_filter :find_testing_ground
before_filter :set_csv_headers
skip_before_filter :verify_authenticity_token
def price_curve
@merit_curve = merit_order.price_curve
end
def load_curves
@merit_curve = merit_order.load_curves
end
def electricity_storage
respond_with @summary = TestingGround::StorageSummary.new(
calculator.network(:electricity)
)
end
private
def calculator
@calculator ||= TestingGround::Calculator.new(
@testing_ground,
strategies: @testing_ground.selected_strategy.attributes
)
end
def merit_order
Market::MeritCurveBuilder.new(
@testing_ground, calculator.network(:electricity)
).merit
end
def set_csv_headers
name = [params[:action], @testing_ground.id, 'csv'].join('.')
headers['Content-Disposition'] = "attachment; filename=\"#{name}\""
headers['Content-Type'] ||= 'text/csv'
end
def find_testing_ground
@testing_ground = TestingGround.find(params[:testing_ground_id])
session[:testing_ground_id] = params[:testing_ground_id]
authorize @testing_ground
end
end
Ensure high-res curves are loaded in Merit CSV actions
class DataController < ApplicationController
respond_to :csv
before_filter :find_testing_ground
before_filter :set_csv_headers
skip_before_filter :verify_authenticity_token
def price_curve
@merit_curve = merit_order.price_curve
end
def load_curves
@merit_curve = merit_order.load_curves
end
def electricity_storage
respond_with @summary = TestingGround::StorageSummary.new(
calculator.network(:electricity)
)
end
private
def calculator
@calculator ||= TestingGround::Calculator.new(
@testing_ground,
strategies: @testing_ground.selected_strategy.attributes,
resolution: :high
)
end
def merit_order
Market::MeritCurveBuilder.new(
@testing_ground, calculator.network(:electricity)
).merit
end
def set_csv_headers
name = [params[:action], @testing_ground.id, 'csv'].join('.')
headers['Content-Disposition'] = "attachment; filename=\"#{name}\""
headers['Content-Type'] ||= 'text/csv'
end
def find_testing_ground
@testing_ground = TestingGround.find(params[:testing_ground_id])
session[:testing_ground_id] = params[:testing_ground_id]
authorize @testing_ground
end
end
|
class DaysController < ApplicationController
before_filter :check_logged_in
def today
# #RESTFUL redirecting
# if request.fullpath != '/today'
# flash.keep
# redirect_to today_path
# end
@date = Date.today
@day = Day.new({:date => @date,
:reason => "",
:approved => true,
:denied => false})
@day.activities.append(Activity.new())
end
def past_days
@month = Month.new()
day = Day.new()
@month.days.append(day)
day.activities.append(Activity.new())
today = Date.today
@end_date = today.prev_day
@start_date = today.strftime("%d").to_i < 6 ? today.ago(1.month).beginning_of_month : today.beginning_of_month
# @previously_inputted = []
# month containing end date if it exists
# month containing start date if it exists.
# populate list with .strftime("%m/%d/%Y") from each day in these months.
end
def check_simple_captcha
if Rails.env.production?
return simple_captcha_valid?
else
return true
end
end
def bad_captcha
flash[:notice] = "Bro, your captcha was so wrong dude."
raise Exception
end
def empty_fields_notice
flash[:notice] = "Fields are empty"
raise Exception
end
def add_today
begin
add(false, :day, :activities_attributes)
rescue Exception
redirect_to today_path
end
end
def add_days
begin
add(true, :month, :days_attributes)
rescue Exception
redirect_to past_days_path
end
end
def add(has_past_days, outer_sym, inner_sym)
bad_captcha unless check_simple_captcha
empty_fields_notice() if params[outer_sym] == nil || params[outer_sym][inner_sym] == nil
if has_past_days then create_past_days() else create_single_day(params[outer_sym], true) end
redirect_to profile_path #success
end
def create_single_day(day, approved)
@day = Day.new({:approved => approved,
:total_time => 0,
:denied => false,
:reason => params[:days][:reason]})
unless approved
@day.date = Time.strptime(day[:date], "%m/%d/%Y")
else
@day.date = day[:date]
end
save_single_day(validate_single_day(day[:activities_attributes], @day), @day)
update_month(@day)
end
def create_past_days
params[:month][:days_attributes].each do |id, day|
empty_fields_notice() if day[:activities_attributes] == nil
check_day(day)
end
save_past_days()
end
def check_day(day)
begin
date = Time.strptime(day[:date], "%m/%d/%Y")
@day = Day.new({:date => date,
:approved => false,
:denied => false,
:total_time => 0,
:reason => params[:days][:reason]})
validate_single_day(day[:activities_attributes], @day)
rescue ArgumentError
#case where Date.parse throws an ArgumentError for having invalid date field
flash[:notice] = "Date is invalid"
raise Exception
end
end
def validate_single_day(activity_list, day)
activities = validate_single_day_activities(activity_list, day)
check_date_already_input(day.date)
validate_model(day)
return activities
end
def validate_single_day_activities(activity_list, day)
activities = []
activity_list.each do |id, activity|
new_activity = create_activity(activity, day)
validate_model(new_activity)
activities += [new_activity]
end
return activities
end
def create_activity(activity, day)
name = activity[:name].lstrip
duration = activity[:duration]
day.total_time += duration.to_i
if name == "" then name = "A Healthy Activity" end
@activity = Activity.new({:name => name,
:duration => duration})
end
def save_past_days
params[:month][:days_attributes].each do |id, day|
create_single_day(day, false)
end
end
def save_single_day(activities, day)
notice = ""
activities.each do |activity|
day.save!
activity.day_id = day.id
activity.save!
notice += "#{activity.name} for #{activity.duration} minutes has been recorded for #{day.date.strftime("%m/%d/%Y")}\n"
end
if flash[:notice] == nil then flash[:notice] = notice else flash[:notice] = flash[:notice] + notice end
end
def validate_model(model)
unless model.valid?
flash[:notice] = model.errors.full_messages[0]
raise Exception
end
end
def check_date_already_input(date)
month = current_user.months.where(:month => date.strftime("%m").to_i, :year => date.strftime("%Y").to_i).first
month.days.each do |day|
if day.date.strftime("%m/%d/%Y") == date.strftime("%m/%d/%Y")
notice = "#{date.strftime("%m/%d/%Y")} has already been inputted"
if flash[:notice] == nil then flash[:notice] = notice else flash[:notice] = flash[:notice] + notice end
raise Exception
end
end
end
def update_month(day)
month = day.date.strftime("%m").to_i
year = day.date.strftime("%Y").to_i
month_model = Month.where(user_id: current_user.id, month: month, year: year).first
if month_model == nil
month_model = Month.create({:user_id => current_user.id,
:month => month,
:year => year,
:printed_form => false,
:received_form => false,
:num_of_days => 0})
end
month_model.num_of_days += 1
month_model.save!
day.month_id = month_model.id
day.save!
end
end
current tests passing with validation added
class DaysController < ApplicationController
before_filter :check_logged_in
def today
# #RESTFUL redirecting
# if request.fullpath != '/today'
# flash.keep
# redirect_to today_path
# end
@date = Date.today
@day = Day.new({:date => @date,
:reason => "",
:approved => true,
:denied => false})
@day.activities.append(Activity.new())
end
def past_days
@month = Month.new()
day = Day.new()
@month.days.append(day)
day.activities.append(Activity.new())
today = Date.today
@end_date = today.prev_day
@start_date = today.strftime("%d").to_i < 6 ? today.ago(1.month).beginning_of_month : today.beginning_of_month
# @previously_inputted = []
# month containing end date if it exists
# month containing start date if it exists.
# populate list with .strftime("%m/%d/%Y") from each day in these months.
end
def check_simple_captcha
if Rails.env.production?
return simple_captcha_valid?
else
return true
end
end
def bad_captcha
flash[:notice] = "Bro, your captcha was so wrong dude."
raise Exception
end
def empty_fields_notice
flash[:notice] = "Fields are empty"
raise Exception
end
def add_today
begin
add(false, :day, :activities_attributes)
rescue Exception
redirect_to today_path
end
end
def add_days
begin
add(true, :month, :days_attributes)
rescue Exception
redirect_to past_days_path
end
end
def add(has_past_days, outer_sym, inner_sym)
bad_captcha unless check_simple_captcha
empty_fields_notice() if params[outer_sym] == nil || params[outer_sym][inner_sym] == nil
if has_past_days then create_past_days() else create_single_day(params[outer_sym], true) end
redirect_to profile_path #success
end
def create_single_day(day, approved)
@day = Day.new({:approved => approved,
:total_time => 0,
:denied => false,
:reason => params[:days][:reason]})
unless approved
@day.date = Time.strptime(day[:date], "%m/%d/%Y")
else
@day.date = day[:date]
end
save_single_day(validate_single_day(day[:activities_attributes], @day), @day)
update_month(@day)
end
def create_past_days
params[:month][:days_attributes].each do |id, day|
empty_fields_notice() if day[:activities_attributes] == nil
check_day(day)
end
save_past_days()
end
def check_day(day)
begin
date = Time.strptime(day[:date], "%m/%d/%Y")
@day = Day.new({:date => date,
:approved => false,
:denied => false,
:total_time => 0,
:reason => params[:days][:reason]})
validate_single_day(day[:activities_attributes], @day)
rescue ArgumentError
#case where Date.parse throws an ArgumentError for having invalid date field
flash[:notice] = "Date is invalid"
raise Exception
end
end
def validate_single_day(activity_list, day)
activities = validate_single_day_activities(activity_list, day)
# check_date_already_input(day.date)
validate_model(day)
return activities
end
def validate_single_day_activities(activity_list, day)
activities = []
activity_list.each do |id, activity|
new_activity = create_activity(activity, day)
validate_model(new_activity)
activities += [new_activity]
end
return activities
end
def create_activity(activity, day)
name = activity[:name].lstrip
duration = activity[:duration]
day.total_time += duration.to_i
if name == "" then name = "A Healthy Activity" end
@activity = Activity.new({:name => name,
:duration => duration})
end
def save_past_days
params[:month][:days_attributes].each do |id, day|
create_single_day(day, false)
end
end
def save_single_day(activities, day)
notice = ""
activities.each do |activity|
day.save!
activity.day_id = day.id
activity.save!
notice += "#{activity.name} for #{activity.duration} minutes has been recorded for #{day.date.strftime("%m/%d/%Y")}\n"
end
if flash[:notice] == nil then flash[:notice] = notice else flash[:notice] = flash[:notice] + notice end
end
def validate_model(model)
unless model.valid?
flash[:notice] = model.errors.full_messages[0]
raise Exception
end
end
def check_date_already_input(date)
month_num = day.date.strftime("%m").to_i
year = day.date.strftime("%Y").to_i
month = current_user.months.where(:month => month_num, :year => year).first
unless month == nil
month.days.each do |day|
if day.date.strftime("%m/%d/%Y") == date.strftime("%m/%d/%Y")
flash[:notice] = "#{date.strftime("%m/%d/%Y")} has already been inputted"
raise Exception
end
end
end
end
def update_month(day)
month = day.date.strftime("%m").to_i
year = day.date.strftime("%Y").to_i
month_model = Month.where(user_id: current_user.id, month: month, year: year).first
if month_model == nil
month_model = Month.create({:user_id => current_user.id,
:month => month,
:year => year,
:printed_form => false,
:received_form => false,
:num_of_days => 0})
end
month_model.num_of_days += 1
month_model.save!
day.month_id = month_model.id
day.save!
end
end
|
class GameController < ApplicationController
include ActionView::Helpers::TextHelper
before_filter :set_params
before_filter :get_game, :except => [:create]
before_filter :calculate_round_ball_and_pins, :only => [:update]
def create
@ball = 1
@round = 10
@game = BowlingGame.create
render :show
end
def update
@game.attempt(@pins_down)
@game.save
if game_finished?
redirect_to finished_game_url(@game.id.to_s)
else
flash[:notice] = get_notice(@pins_down)
redirect_to show_game_url(@game.id.to_s, @round, @ball)
end
end
def show
if last_hit = @game.hits.last.to_i
@pins_availables -= last_hit if last_hit < 10
end
end
def finished
end
private
def game_finished?
(@round == 11 && @ball == 1 && @pins_down < 10) ||
(@round == 11 && @ball == 3)
end
def get_notice(pins_down)
if strike? pins_down
"STRIKE!"
elsif spare? pins_down
"SPARE!"
else
"#{pluralize(pins_down, 'pin')} knocked down."
end
end
def spare?(pins_down)
last_hit = @game.hits[-2].to_i
strike?(pins_down + last_hit)
end
def calculate_round_ball_and_pins
if extra_round? && @pins_down >= 0
@ball = @ball + 1
elsif strike?(@pins_down) || @ball == 2
@round = @round + 1
@ball = 1
elsif @pins_down >= 0
@ball = @ball + 1
end
end
def extra_round?
@round == 11
end
def strike?(pins_down)
pins_down == 10
end
def get_game
@game = BowlingGame.find(params[:id]) rescue BowlingGame.new
end
def set_params
@round = params[:round_id].present? ? params[:round_id].to_i : 1
@ball = params[:ball_id].present? ? params[:ball_id].to_i : 1
@pins_down = params[:pins].present? ? params[:pins].to_i : -1
@pins_availables = 10
end
end
after create, redirect to show and load variables
class GameController < ApplicationController
include ActionView::Helpers::TextHelper
before_filter :set_params
before_filter :get_game, :except => [:create]
before_filter :calculate_round_ball_and_pins, :only => [:update]
def create
@game = BowlingGame.create
redirect_to show_game_url(@game.id.to_s, @round, @ball)
end
def update
@game.attempt(@pins_down)
@game.save
if game_finished?
redirect_to finished_game_url(@game.id.to_s)
else
flash[:notice] = get_notice(@pins_down)
redirect_to show_game_url(@game.id.to_s, @round, @ball)
end
end
def show
if @ball > 1 && !extra_round?
last_hit = @game.hits.last.to_i
@pins_availables -= last_hit if last_hit < 10
end
end
def finished
end
private
def game_finished?
(@round == 11 && @ball == 1 && @pins_down < 10) ||
(@round == 11 && @ball == 3)
end
def get_notice(pins_down)
if strike? pins_down
"STRIKE!"
elsif spare? pins_down
"SPARE!"
else
"#{pluralize(pins_down, 'pin')} knocked down."
end
end
def spare?(pins_down)
last_hit = @game.hits[-2].to_i
strike?(pins_down + last_hit)
end
def calculate_round_ball_and_pins
if extra_round? && @pins_down >= 0
@ball = @ball + 1
elsif strike?(@pins_down) || @ball == 2
@round = @round + 1
@ball = 1
elsif @pins_down >= 0
@ball = @ball + 1
end
end
def extra_round?
@round == 11
end
def strike?(pins_down)
pins_down == 10
end
def get_game
@game = BowlingGame.find(params[:id]) rescue BowlingGame.new
end
def set_params
@round = params[:round_id].present? ? params[:round_id].to_i : 1
@ball = params[:ball_id].present? ? params[:ball_id].to_i : 1
@pins_down = params[:pins].present? ? params[:pins].to_i : -1
@pins_availables = 10
end
end |
# ===GLSAMaker v2
# Copyright (C) 2010-11 Alex Legler <a3li@gentoo.org>
# Copyright (C) 2009 Pierre-Yves Rofes <py@gentoo.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# For more information, see the LICENSE file.
# GLSA controller
class GlsaController < ApplicationController
def requests
@pageID = "requests"
@pageTitle = "Pooled GLSA requests"
@glsas = Glsa.where(:status => 'request').order('updated_at DESC')
end
def drafts
@pageID = "drafts"
@pageTitle = "Pooled GLSA drafts"
@glsas = Glsa.where(:status => 'draft').order('updated_at DESC')
end
def archive
@pageID = "archive"
@pageTitle = "GLSA archive"
respond_to do |format|
format.html {
@month = (params[:month] || Date.today.month).to_i
@year = (params[:year] || Date.today.year).to_i
month_start = Date.new(@year, @month, 1)
month_end = Date.new(@year, @month + 1, 1) - 1
@glsas = Glsa.where(:status => 'release', :first_released_at => month_start..month_end).order('updated_at DESC')
}
format.js {
@month = params[:view]['month(2i)'].to_i
@year = params[:view]['month(1i)'].to_i
month_start = Date.new(@year, @month, 1)
month_end = Date.new(@year, @month + 1, 1) - 1
@glsas = Glsa.where(:status => 'release', :first_released_at => month_start..month_end).order('updated_at DESC')
@table = render_to_string :partial => "glsa_row", :collection => @glsas, :as => :glsa, :locals => { :view => :drafts }
}
end
end
def new
@pageID = "new"
@pageTitle = "New GLSA"
# TODO: Straight-to-draft editing
render :action => "new-request"
return
if params[:what] == "request"
render :action => "new-request"
elsif params[:what] == "draft"
render :action => "new-draft"
else
render
end
end
def create
if params[:what] == "request"
begin
glsa = Glsa.new_request(params[:title], params[:bugs], params[:comment], params[:access], (params[:import_references].to_i == 1), current_user)
Glsamaker::Mail.request_notification(glsa, current_user)
flash[:notice] = "Successfully created GLSA #{glsa.glsa_id}"
redirect_to :action => "requests"
rescue Exception => e
log_error e
flash.now[:error] = e.message
render :action => "new-request"
end
end
end
def show
@glsa = Glsa.find(params[:id])
return unless check_object_access!(@glsa)
@rev = params[:rev_id].nil? ? @glsa.last_revision : @glsa.revisions.find_by_revid(params[:rev_id])
if @rev == nil
flash[:error] = "Invalid revision ID"
redirect_to :action => "show"
return
end
respond_to do |wants|
wants.html { render }
wants.xml { }
wants.txt { render }
end
end
def download
@glsa = Glsa.find(params[:id])
return unless check_object_access!(@glsa)
@rev = params[:rev_id].nil? ? @glsa.last_revision : @glsa.revisions.find_by_revid(params[:rev_id])
if @rev == nil
flash[:error] = "Invalid revision ID"
redirect_to :action => "show"
return
end
text = nil
respond_to do |wants|
wants.xml { text = render_to_string(:action => :show, :format => 'xml')}
wants.txt { text = render_to_string(:action => :show, :format => 'txt')}
wants.html { render :text => "Cannot download HTML format. Pick .xml or .txt"; return }
end
send_data(text, :filename => "glsa-#{@glsa.glsa_id}.#{params[:format]}")
end
def edit
@glsa = Glsa.find(params[:id])
return unless check_object_access!(@glsa)
@rev = @glsa.last_revision
# Packages
@rev.vulnerable_packages.build(:comp => "<", :arch => "*") if @rev.vulnerable_packages.length == 0
@rev.unaffected_packages.build(:comp => ">=", :arch => "*") if @rev.unaffected_packages.length == 0
# References
@rev.references.build if @rev.references.length == 0
@templates = {}
GLSAMAKER_TEMPLATE_TARGETS.each do |target|
@templates[target] = Template.where(:target => target).all
end
end
def update
@glsa = Glsa.find(params[:id])
return unless check_object_access!(@glsa)
@prev_latest_rev = @glsa.last_revision
if @glsa.nil?
flash[:error] = "Unknown GLSA ID"
redirect_to :action => "index"
return
end
# GLSA object
# The first editor is submitter
# TODO: Maybe take a better condition (adding bugs would make so. the submitter)
if @glsa.submitter.nil?
@glsa.submitter = current_user
end
@glsa.status = "draft" if @glsa.status == "request"
@glsa.restricted = (params[:glsa][:restricted] == "confidential")
# Force update
@glsa.updated_at = 0
revision = Revision.new
revision.revid = @glsa.next_revid
revision.glsa = @glsa
revision.user = current_user
revision.title = params[:glsa][:title]
revision.synopsis = params[:glsa][:synopsis]
revision.access = params[:glsa][:access]
revision.severity = params[:glsa][:severity]
revision.product = params[:glsa][:product]
revision.description = params[:glsa][:description]
revision.background = params[:glsa][:background]
revision.impact = params[:glsa][:impact]
revision.workaround = params[:glsa][:workaround]
revision.resolution = params[:glsa][:resolution]
unless revision.save
flash.now[:error] = "Errors occurred while saving the Revision object: #{revision.errors.full_messages.join ', '}"
render :action => "edit"
return
end
unless @glsa.save
flash[:error] = "Errors occurred while saving the GLSA object"
render :action => "edit"
end
# Bugs
bugzilla_warning = false
if params[:glsa][:bugs]
bugs = params[:glsa][:bugs].map {|bug| bug.to_i }
bugs.each do |bug|
begin
b = Glsamaker::Bugs::Bug.load_from_id(bug)
revision.bugs.create(
:bug_id => bug,
:title => b.summary,
:whiteboard => b.status_whiteboard,
:arches => b.arch_cc.join(', ')
)
rescue Exception => e
log_error e
logger.info { e.inspect }
# In case of bugzilla errors, just keep the bug #
revision.bugs.create(
:bug_id => bug
)
bugzilla_warning = true
end
end
end
logger.debug params[:glsa][:package].inspect
# Packages
params[:glsa][:package].each do |package|
logger.debug package.inspect
next if package[:atom].strip == ''
revision.packages.create(package)
end
# References
params[:glsa][:reference].each do |reference|
logger.debug reference.inspect
next if reference[:title].strip == ''
revision.references.create(reference)
end
# Comments
@glsa.comments.each do |comment|
comment.read = params["commentread-#{comment.id}"] == "true"
comment.save
end
# Sending emails
Glsamaker::Mail.edit_notification(@glsa, rev_diff(@glsa, @glsa.revisions[-2], revision), current_user)
flash[:notice] = "Saving was successful. #{'NOTE: Bugzilla integration is not available, only plain bug numbers.' if bugzilla_warning}"
redirect_to :action => 'show', :id => @glsa
end
def prepare_release
@glsa = Glsa.find(params[:id])
return unless check_object_access!(@glsa)
if current_user.access < 2
deny_access "Tried to prepare release"
return
end
if @glsa.status == 'request'
flash[:error] = 'You cannot release a request. Draft the advisory first.'
redirect_to :action => "show", :id => @glsa
return
end
if @glsa.restricted
flash[:error] = 'You cannot release a confidential draft. Make it public first.'
redirect_to :action => "show", :id => @glsa
return
end
@rev = @glsa.last_revision
@comments_override = (current_user.is_el_jefe? and params[:override_approvals].to_i == 1) || false
end
def release
@glsa = Glsa.find(params[:id])
return unless check_object_access!(@glsa)
if current_user.access < 2
deny_access "Tried to prepare release"
return
end
if @glsa.status == 'request'
flash[:error] = 'You cannot release a request. Draft the advisory first.'
redirect_to :action => "show", :id => @glsa
return
end
if @glsa.restricted
flash[:error] = 'You cannot release a confidential draft. Make it public first.'
redirect_to :action => "show", :id => @glsa
return
end
@rev = @glsa.last_revision
begin
if current_user.is_el_jefe?
@glsa.release!
else
@glsa.release
end
@glsa.invalidate_last_revision_cache
if params[:email] == '1'
of = @template_format
@template_format = 'txt'
Glsamaker::Mail.send_text(
render_to_string({:template => 'glsa/show.txt.erb', :format => :txt, :layout => false}),
"[ GLSA #{@glsa.glsa_id} ] #{@rev.title}",
current_user,
false
)
@template_format = of
end
rescue GLSAReleaseError => e
flash[:error] = "Internal error: #{e.message}. Cannot release advisory."
redirect_to :action => "show", :id => @glsa
return
end
# ugly hack, but necessary to switch back to html
@real_format = 'html'
render(:format => :html, :layout => 'application')
end
def diff
@glsa = Glsa.find(params[:id])
return unless check_object_access!(@glsa)
rev_old = @glsa.revisions.find_by_revid(params[:old])
rev_new = @glsa.revisions.find_by_revid(params[:new])
@diff = with_format(:xml) { rev_diff(@glsa, rev_old, rev_new) }
end
def update_cache
@glsa = Glsa.find(params[:id])
return unless check_object_access!(@glsa)
@rev = @glsa.last_revision
@rev.update_cached_bug_metadata
flash[:notice] = "Successfully updated all caches."
if params[:redirect]
redirect_to params[:redirect]
else
redirect_to :action => 'show', :id => @glsa unless params[:no_redirect]
end
rescue Exception => e
log_error e
flash[:notice] = "Could not update caches: #{e.message}"
if params[:redirect]
redirect_to params[:redirect]
else
redirect_to :action => 'show', :id => @glsa unless params[:no_redirect]
end
end
def destroy
end
def comment
end
def import_references
begin
if params[:go].to_s == '1'
glsa = Glsa.find(Integer(params[:id]))
return unless check_object_access!(glsa)
refs = []
params[:import][:cve].each do |cve_id|
cve = Cve.find_by_cve_id cve_id
refs << {:title => cve.cve_id, :url => cve.url}
end
glsa.add_references refs
flash[:notice] = "Imported #{refs.count} references."
redirect_to :action => "show", :id => glsa.id
return
else
@glsa = Glsa.find(Integer(params[:id]))
return unless check_object_access!(@glsa)
@cves = @glsa.related_cves
end
rescue Exception => e
render :text => "Error: #{e.message}", :status => 500
log_error e
return
end
render :layout => false
end
protected
def rev_diff(glsa, rev_old, rev_new, format = :unified, context_lines = 3)
@glsa = glsa
@rev = rev_old
old_text = Glsamaker::XML.indent(
render_to_string(
:template => 'glsa/_glsa.xml.builder',
:locals => {:glsa => @glsa, :rev => @rev},
:layout => 'none'
),
{:indent => 2, :maxcols => 80}
)
@rev = rev_new
new_text = Glsamaker::XML.indent(
render_to_string(
:template => 'glsa/_glsa.xml.builder',
:locals => {:glsa => @glsa, :rev => @rev},
:layout => 'none'
),
{:indent => 2, :maxcols => 80}
)
Glsamaker::Diff.diff(old_text, new_text, format, context_lines)
end
end
Archive: Fix issue with decembers
# ===GLSAMaker v2
# Copyright (C) 2010-11 Alex Legler <a3li@gentoo.org>
# Copyright (C) 2009 Pierre-Yves Rofes <py@gentoo.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# For more information, see the LICENSE file.
# GLSA controller
class GlsaController < ApplicationController
def requests
@pageID = "requests"
@pageTitle = "Pooled GLSA requests"
@glsas = Glsa.where(:status => 'request').order('updated_at DESC')
end
def drafts
@pageID = "drafts"
@pageTitle = "Pooled GLSA drafts"
@glsas = Glsa.where(:status => 'draft').order('updated_at DESC')
end
def archive
@pageID = "archive"
@pageTitle = "GLSA archive"
respond_to do |format|
format.html {
@month = (params[:month] || Date.today.month).to_i
@year = (params[:year] || Date.today.year).to_i
month_start = Date.new(@year, @month, 1)
month_end = Date.new(@year, @month + 1, 1) - 1
@glsas = Glsa.where(:status => 'release', :first_released_at => month_start..month_end).order('updated_at DESC')
}
format.js {
@month = params[:view]['month(2i)'].to_i
@year = params[:view]['month(1i)'].to_i
month_start = Date.new(@year, @month, 1)
month_end = nil
if @month == 12
month_end = Date.new(@year + 1, 1, 1) -1
else
month_end = Date.new(@year, @month + 1, 1) - 1
end
@glsas = Glsa.where(:status => 'release', :first_released_at => month_start..month_end).order('updated_at DESC')
@table = render_to_string :partial => "glsa_row", :collection => @glsas, :as => :glsa, :locals => { :view => :drafts }
}
end
end
def new
@pageID = "new"
@pageTitle = "New GLSA"
# TODO: Straight-to-draft editing
render :action => "new-request"
return
if params[:what] == "request"
render :action => "new-request"
elsif params[:what] == "draft"
render :action => "new-draft"
else
render
end
end
def create
if params[:what] == "request"
begin
glsa = Glsa.new_request(params[:title], params[:bugs], params[:comment], params[:access], (params[:import_references].to_i == 1), current_user)
Glsamaker::Mail.request_notification(glsa, current_user)
flash[:notice] = "Successfully created GLSA #{glsa.glsa_id}"
redirect_to :action => "requests"
rescue Exception => e
log_error e
flash.now[:error] = e.message
render :action => "new-request"
end
end
end
def show
@glsa = Glsa.find(params[:id])
return unless check_object_access!(@glsa)
@rev = params[:rev_id].nil? ? @glsa.last_revision : @glsa.revisions.find_by_revid(params[:rev_id])
if @rev == nil
flash[:error] = "Invalid revision ID"
redirect_to :action => "show"
return
end
respond_to do |wants|
wants.html { render }
wants.xml { }
wants.txt { render }
end
end
def download
@glsa = Glsa.find(params[:id])
return unless check_object_access!(@glsa)
@rev = params[:rev_id].nil? ? @glsa.last_revision : @glsa.revisions.find_by_revid(params[:rev_id])
if @rev == nil
flash[:error] = "Invalid revision ID"
redirect_to :action => "show"
return
end
text = nil
respond_to do |wants|
wants.xml { text = render_to_string(:action => :show, :format => 'xml')}
wants.txt { text = render_to_string(:action => :show, :format => 'txt')}
wants.html { render :text => "Cannot download HTML format. Pick .xml or .txt"; return }
end
send_data(text, :filename => "glsa-#{@glsa.glsa_id}.#{params[:format]}")
end
def edit
@glsa = Glsa.find(params[:id])
return unless check_object_access!(@glsa)
@rev = @glsa.last_revision
# Packages
@rev.vulnerable_packages.build(:comp => "<", :arch => "*") if @rev.vulnerable_packages.length == 0
@rev.unaffected_packages.build(:comp => ">=", :arch => "*") if @rev.unaffected_packages.length == 0
# References
@rev.references.build if @rev.references.length == 0
@templates = {}
GLSAMAKER_TEMPLATE_TARGETS.each do |target|
@templates[target] = Template.where(:target => target).all
end
end
def update
@glsa = Glsa.find(params[:id])
return unless check_object_access!(@glsa)
@prev_latest_rev = @glsa.last_revision
if @glsa.nil?
flash[:error] = "Unknown GLSA ID"
redirect_to :action => "index"
return
end
# GLSA object
# The first editor is submitter
# TODO: Maybe take a better condition (adding bugs would make so. the submitter)
if @glsa.submitter.nil?
@glsa.submitter = current_user
end
@glsa.status = "draft" if @glsa.status == "request"
@glsa.restricted = (params[:glsa][:restricted] == "confidential")
# Force update
@glsa.updated_at = 0
revision = Revision.new
revision.revid = @glsa.next_revid
revision.glsa = @glsa
revision.user = current_user
revision.title = params[:glsa][:title]
revision.synopsis = params[:glsa][:synopsis]
revision.access = params[:glsa][:access]
revision.severity = params[:glsa][:severity]
revision.product = params[:glsa][:product]
revision.description = params[:glsa][:description]
revision.background = params[:glsa][:background]
revision.impact = params[:glsa][:impact]
revision.workaround = params[:glsa][:workaround]
revision.resolution = params[:glsa][:resolution]
unless revision.save
flash.now[:error] = "Errors occurred while saving the Revision object: #{revision.errors.full_messages.join ', '}"
render :action => "edit"
return
end
unless @glsa.save
flash[:error] = "Errors occurred while saving the GLSA object"
render :action => "edit"
end
# Bugs
bugzilla_warning = false
if params[:glsa][:bugs]
bugs = params[:glsa][:bugs].map {|bug| bug.to_i }
bugs.each do |bug|
begin
b = Glsamaker::Bugs::Bug.load_from_id(bug)
revision.bugs.create(
:bug_id => bug,
:title => b.summary,
:whiteboard => b.status_whiteboard,
:arches => b.arch_cc.join(', ')
)
rescue Exception => e
log_error e
logger.info { e.inspect }
# In case of bugzilla errors, just keep the bug #
revision.bugs.create(
:bug_id => bug
)
bugzilla_warning = true
end
end
end
logger.debug params[:glsa][:package].inspect
# Packages
params[:glsa][:package].each do |package|
logger.debug package.inspect
next if package[:atom].strip == ''
revision.packages.create(package)
end
# References
params[:glsa][:reference].each do |reference|
logger.debug reference.inspect
next if reference[:title].strip == ''
revision.references.create(reference)
end
# Comments
@glsa.comments.each do |comment|
comment.read = params["commentread-#{comment.id}"] == "true"
comment.save
end
# Sending emails
Glsamaker::Mail.edit_notification(@glsa, rev_diff(@glsa, @glsa.revisions[-2], revision), current_user)
flash[:notice] = "Saving was successful. #{'NOTE: Bugzilla integration is not available, only plain bug numbers.' if bugzilla_warning}"
redirect_to :action => 'show', :id => @glsa
end
def prepare_release
@glsa = Glsa.find(params[:id])
return unless check_object_access!(@glsa)
if current_user.access < 2
deny_access "Tried to prepare release"
return
end
if @glsa.status == 'request'
flash[:error] = 'You cannot release a request. Draft the advisory first.'
redirect_to :action => "show", :id => @glsa
return
end
if @glsa.restricted
flash[:error] = 'You cannot release a confidential draft. Make it public first.'
redirect_to :action => "show", :id => @glsa
return
end
@rev = @glsa.last_revision
@comments_override = (current_user.is_el_jefe? and params[:override_approvals].to_i == 1) || false
end
def release
@glsa = Glsa.find(params[:id])
return unless check_object_access!(@glsa)
if current_user.access < 2
deny_access "Tried to prepare release"
return
end
if @glsa.status == 'request'
flash[:error] = 'You cannot release a request. Draft the advisory first.'
redirect_to :action => "show", :id => @glsa
return
end
if @glsa.restricted
flash[:error] = 'You cannot release a confidential draft. Make it public first.'
redirect_to :action => "show", :id => @glsa
return
end
@rev = @glsa.last_revision
begin
if current_user.is_el_jefe?
@glsa.release!
else
@glsa.release
end
@glsa.invalidate_last_revision_cache
if params[:email] == '1'
of = @template_format
@template_format = 'txt'
Glsamaker::Mail.send_text(
render_to_string({:template => 'glsa/show.txt.erb', :format => :txt, :layout => false}),
"[ GLSA #{@glsa.glsa_id} ] #{@rev.title}",
current_user,
false
)
@template_format = of
end
rescue GLSAReleaseError => e
flash[:error] = "Internal error: #{e.message}. Cannot release advisory."
redirect_to :action => "show", :id => @glsa
return
end
# ugly hack, but necessary to switch back to html
@real_format = 'html'
render(:format => :html, :layout => 'application')
end
def diff
@glsa = Glsa.find(params[:id])
return unless check_object_access!(@glsa)
rev_old = @glsa.revisions.find_by_revid(params[:old])
rev_new = @glsa.revisions.find_by_revid(params[:new])
@diff = with_format(:xml) { rev_diff(@glsa, rev_old, rev_new) }
end
def update_cache
@glsa = Glsa.find(params[:id])
return unless check_object_access!(@glsa)
@rev = @glsa.last_revision
@rev.update_cached_bug_metadata
flash[:notice] = "Successfully updated all caches."
if params[:redirect]
redirect_to params[:redirect]
else
redirect_to :action => 'show', :id => @glsa unless params[:no_redirect]
end
rescue Exception => e
log_error e
flash[:notice] = "Could not update caches: #{e.message}"
if params[:redirect]
redirect_to params[:redirect]
else
redirect_to :action => 'show', :id => @glsa unless params[:no_redirect]
end
end
def destroy
end
def comment
end
def import_references
begin
if params[:go].to_s == '1'
glsa = Glsa.find(Integer(params[:id]))
return unless check_object_access!(glsa)
refs = []
params[:import][:cve].each do |cve_id|
cve = Cve.find_by_cve_id cve_id
refs << {:title => cve.cve_id, :url => cve.url}
end
glsa.add_references refs
flash[:notice] = "Imported #{refs.count} references."
redirect_to :action => "show", :id => glsa.id
return
else
@glsa = Glsa.find(Integer(params[:id]))
return unless check_object_access!(@glsa)
@cves = @glsa.related_cves
end
rescue Exception => e
render :text => "Error: #{e.message}", :status => 500
log_error e
return
end
render :layout => false
end
protected
def rev_diff(glsa, rev_old, rev_new, format = :unified, context_lines = 3)
@glsa = glsa
@rev = rev_old
old_text = Glsamaker::XML.indent(
render_to_string(
:template => 'glsa/_glsa.xml.builder',
:locals => {:glsa => @glsa, :rev => @rev},
:layout => 'none'
),
{:indent => 2, :maxcols => 80}
)
@rev = rev_new
new_text = Glsamaker::XML.indent(
render_to_string(
:template => 'glsa/_glsa.xml.builder',
:locals => {:glsa => @glsa, :rev => @rev},
:layout => 'none'
),
{:indent => 2, :maxcols => 80}
)
Glsamaker::Diff.diff(old_text, new_text, format, context_lines)
end
end |
#
# Author:: Dennis Klein
# Author:: Victor Penso
#
# Copyright:: 2013-15, GSI HPC department
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
action :add do
# Deploy the APT key if true
deploy_flag = true
newkey = new_resource.key
# Remove leading white-spaces
newkey = newkey.gsub(/^ */,'')
# fingerprint for the key as defined by the client code, remove white spaces
fingerprint_command = "echo '#{newkey}' | gpg --with-fingerprint --no-options"
fingerprint_command += " 2>/dev/null | grep fingerprint | cut -d= -f2 | tr -d ' '"
cmd = Mixlib::ShellOut.new(fingerprint_command)
cmd.run_command
# TODO:
#cmd.error!
fingerprint = cmd.stdout.chomp || nil
unless fingerprint.nil?
# Get a list of all key fingerprints in the system, remove white spaces
fingerprints_command = "apt-key finger 2>/dev/null | grep fingerprint |"
fingerprints_command += " tr -s ' ' | cut -d= -f2"
cmd = Mixlib::ShellOut.new(fingerprints_command)
cmd.run_command
# TODO:
#cmd.error!
fingerprints = cmd.stdout.split("\n").map { |f| f.delete(' ') }
# If the fingerprints exists, assume the key is deployed already
deploy_flag = false if fingerprints.include? fingerprint
end
ruby_block "Add APT key with fingerpint #{fingerprint}" do
block do
cmd = Mixlib::ShellOut.new("echo '#{newkey}' | apt-key add - >/dev/null")
cmd.run_command
cmd.error!
end
only_if do deploy_flag end
end
new_resource.updated_by_last_action(deploy_flag)
end
action :remove do
fingerprint = new_resource.key.gsub(/^ */,'')
fp_suffix = fingerprint[-8..-1]
execute "Remove APT key with fingerprint #{fingerprint}" do
command "apt-key del '#{fp_suffix}' >/dev/null"
only_if "apt-key list | grep '#{fp_suffix}' >/dev/null"
end
new_resource.updated_by_last_action(true)
end
Fix typo
#
# Author:: Dennis Klein
# Author:: Victor Penso
#
# Copyright:: 2013-15, GSI HPC department
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
action :add do
# Deploy the APT key if true
deploy_flag = true
newkey = new_resource.key
# Remove leading white-spaces
newkey = newkey.gsub(/^ */,'')
# fingerprint for the key as defined by the client code, remove white spaces
fingerprint_command = "echo '#{newkey}' | gpg --with-fingerprint --no-options"
fingerprint_command += " 2>/dev/null | grep fingerprint | cut -d= -f2 | tr -d ' '"
cmd = Mixlib::ShellOut.new(fingerprint_command)
cmd.run_command
# TODO:
#cmd.error!
fingerprint = cmd.stdout.chomp || nil
unless fingerprint.nil?
# Get a list of all key fingerprints in the system, remove white spaces
fingerprints_command = "apt-key finger 2>/dev/null | grep fingerprint |"
fingerprints_command += " tr -s ' ' | cut -d= -f2"
cmd = Mixlib::ShellOut.new(fingerprints_command)
cmd.run_command
# TODO:
#cmd.error!
fingerprints = cmd.stdout.split("\n").map { |f| f.delete(' ') }
# If the fingerprints exists, assume the key is deployed already
deploy_flag = false if fingerprints.include? fingerprint
end
ruby_block "Add APT key with fingerprint #{fingerprint}" do
block do
cmd = Mixlib::ShellOut.new("echo '#{newkey}' | apt-key add - >/dev/null")
cmd.run_command
cmd.error!
end
only_if do deploy_flag end
end
new_resource.updated_by_last_action(deploy_flag)
end
action :remove do
fingerprint = new_resource.key.gsub(/^ */,'')
fp_suffix = fingerprint[-8..-1]
execute "Remove APT key with fingerprint #{fingerprint}" do
command "apt-key del '#{fp_suffix}' >/dev/null"
only_if "apt-key list | grep '#{fp_suffix}' >/dev/null"
end
new_resource.updated_by_last_action(true)
end
|
# encoding: UTF-8
class HomeController < ApplicationController
caches_page :index, :if => lambda { flash.empty? && !user_signed_in? }
caches_page :press, :join, :support, :people, :about_method, :member
def index
@topic_columns = Topic.column_groups
@parties = Party.order(:name)
end
def about
if params[:lang] == "en"
render :about, :locale => "en"
end
end
# don't override Object#method
def about_method
end
def press
end
def login_status
render layout: false
end
def join
end
def support
end
def member
end
def people
@board = [
Person.new('Jari Bakken', 'jari@holderdeord.no', 'hdo/jari.jpg', 'er sjefsutvikler i Holder de ord. Han jobber til daglig hos FINN.no, hovedsakelig med interne verktøy og testautomasjon. Jari programmerer for det meste i Ruby og JavaScript, og står bak <a href="http://github.com/jarib">mange populære open source-prosjekt</a> med flere millioner nedlastinger. Han er selvlært som utvikler, og har en bachelor i utøvende jazzgitar fra Norges Musikkhøgskole.'),
Person.new('Eva Jørgensen', nil, 'hdo/eva-cecilie.jpg', 'jobber med regnskap og økonomistyring i Holder de ord. Hun har en mastergrad i politisk økonomi fra BI i Oslo. Eva har også to bachelorgrader: en i Europastudier fra Universitetet i Oslo, og en i økonomi og administrasjon fra Høyskolen i Oslo. Eva sitter også i Holder de ords styre.'),
Person.new('Morten Kjelkenes', nil, nil, 'er teknisk ansvarlig og driftssjef i Holder de ord. Morten jobber til daglig med prosjektledelse av både telecom- og IT-prosjekter, og har god teknisk erfaring innen konsolidering og virtualisering av tjenester for store kunder. Han har lang erfaring med kompliserte og heterogene miljøer og tjenester, samt fra oppbygging av testmiljøer for disse.'),
Person.new('Daniel Rees', 'daniel@holderdeord.no', 'hdo/daniel.jpg', 'er daglig leder og en av grunnleggerne av Holder de ord. Han har en mastergrad i statsvitenskap fra NTNU i Trondheim, og har bakgrunn fra TNS Gallup hvor han har jobbet med opinionsundersøkelser og kommunikasjonsanalyse. Daniel har bred erfaring som frilansjournalist, fra NRK Her & Nå, og har tidligere jobbet med å utvikle nettsteder for FNs informasjonskontor for Norden og landsdekkende organisasjoner i Norge. Daniel er også styreleder i Holder de ord.'),
Person.new('Kristofer Rees', 'kristofer@holderdeord.no', 'hdo/kristofer.jpg', 'er sjef for metode og analyse i Holder de ord. Han har en bachelorgrad i statsvitenskap, og har tidligere studert musikk ved NTNU i Trondheim og Det Kgl. Danske Musikkonservatorium i København. Kristofer sitter også i Holder de ords styre.'),
Person.new('Tiina Ruohonen', 'tiina@holderdeord.no', 'hdo/tiina.jpg', 'er en av grunnleggerne av Holder de ord, nestleder og sjef for kommunikasjon, presse, og partnerskap. Tiina har en Cand. Mag. i statsvitenskap og juss, og en mastergrad i bærekraftig utvikling og etikk. Hun jobbet i flere år som prosjektleder og rådgiver på klimaområdet, og driver i dag sitt eget selskap som hjelper kunder med utfordringer innenfor klimaspørsmål, etikk, samfunnsansvar, og demokratisk medvirkning. Tiina sitter også i Holder de ords styre.'),
Person.new('Linn Skorge', 'linn@holderdeord.no', 'hdo/linn.jpg', 'jobber med salg og finansiering i Holder de ord. Linn tar for øyeblikket en mastergrad i politisk økonomi på BI i Oslo. Fra tidligere har hun en bachelorgrad i internasjonal markedsføring, også fra Handelshøyskolen BI. Linn sitter også i Holder de ords styre.'),
Person.new('Guro Øistensen', nil, nil, 'jobber med metode og det faglige innholdet i Holder de ord. Til daglig er hun Communications Manager i IT-bedriften Logica, hvor hun jobber med internett, intranett, PR og intern kommunikasjon. Guro er utdannet sosiolog fra Universitetet i Oslo. Hun sitter også i Holder de ords styre.')
]
@contributors = [
Person.new('Kat Aquino'),
Person.new('Martin Bekkelund'),
Person.new('Anders Berg Hansen'),
Person.new('Cathrine Berg-Nielsen'),
Person.new('Kristian Bysheim'),
Person.new('Linn Katrine Erstad'),
Person.new('Inge Olav Fure'),
Person.new('Svein Halvor Halvorsen'),
Person.new('Arne Hassel'),
Person.new('Jostein Holje'),
Person.new('Vegard Karevoll'),
Person.new('Markus Krüger'),
Person.new('Joanna Merker'),
Person.new('Sara Mjelva'),
Person.new('Salve J. Nilsen'),
Person.new('Gustav Oddsson'),
Person.new('Endre Ottosen'),
Person.new('Petter Reinholdtsen'),
Person.new('Jonathan Ronen'),
Person.new('Carl Martin Rosenberg'),
Person.new('Erik Seierstad'),
Person.new('Osman Siddique'),
Person.new('Cosimo Streppone'),
Person.new('Hanna Welde Tranås'),
Person.new('Ingrid Ødegaard')
]
@alumni = [
Person.new('Tage Augustson'),
Person.new('Anne Raaum Christensen'),
Person.new('Madeleine Skjelland Eriksen'),
Person.new('Marte Haabeth Grindaker'),
Person.new('Vilde Grønn'),
Person.new('Rigmor Haga'),
Person.new('Vegar Heir'),
Person.new('Dina Hestad'),
Person.new('Thomas Huang'),
Person.new('Elida Høeg'),
Person.new('Tor Håkon Inderberg'),
Person.new('Esben Jensen'),
Person.new('Nina Jensen'),
Person.new('Einar Kjerschow'),
Person.new('Øystein Jerkø Kostøl'),
Person.new('Ingrid Lomelde'),
Person.new('Ellen M. E. Lundring'),
Person.new('Liv Arntzen Løchen'),
Person.new('Magnus Løseth'),
Person.new('Marit Sjøvaag Marino'),
Person.new('Silje Nyløkken'),
Person.new('Tommy Steinsli'),
Person.new('Einar Sundin'),
Person.new('Eirik Swensen'),
Person.new('Ole Martin Volle')
]
end
class Person
attr_reader :name, :image, :email, :bio
def initialize(name, email = nil, image = nil, bio = nil)
@name, @email, @image, @bio = name, email, image, bio
end
end
end
Add Alex to contributors.
# encoding: UTF-8
class HomeController < ApplicationController
caches_page :index, :if => lambda { flash.empty? && !user_signed_in? }
caches_page :press, :join, :support, :people, :about_method, :member
def index
@topic_columns = Topic.column_groups
@parties = Party.order(:name)
end
def about
if params[:lang] == "en"
render :about, :locale => "en"
end
end
# don't override Object#method
def about_method
end
def press
end
def login_status
render layout: false
end
def join
end
def support
end
def member
end
def people
@board = [
Person.new('Jari Bakken', 'jari@holderdeord.no', 'hdo/jari.jpg', 'er sjefsutvikler i Holder de ord. Han jobber til daglig hos FINN.no, hovedsakelig med interne verktøy og testautomasjon. Jari programmerer for det meste i Ruby og JavaScript, og står bak <a href="http://github.com/jarib">mange populære open source-prosjekt</a> med flere millioner nedlastinger. Han er selvlært som utvikler, og har en bachelor i utøvende jazzgitar fra Norges Musikkhøgskole.'),
Person.new('Eva Jørgensen', nil, 'hdo/eva-cecilie.jpg', 'jobber med regnskap og økonomistyring i Holder de ord. Hun har en mastergrad i politisk økonomi fra BI i Oslo. Eva har også to bachelorgrader: en i Europastudier fra Universitetet i Oslo, og en i økonomi og administrasjon fra Høyskolen i Oslo. Eva sitter også i Holder de ords styre.'),
Person.new('Morten Kjelkenes', nil, nil, 'er teknisk ansvarlig og driftssjef i Holder de ord. Morten jobber til daglig med prosjektledelse av både telecom- og IT-prosjekter, og har god teknisk erfaring innen konsolidering og virtualisering av tjenester for store kunder. Han har lang erfaring med kompliserte og heterogene miljøer og tjenester, samt fra oppbygging av testmiljøer for disse.'),
Person.new('Daniel Rees', 'daniel@holderdeord.no', 'hdo/daniel.jpg', 'er daglig leder og en av grunnleggerne av Holder de ord. Han har en mastergrad i statsvitenskap fra NTNU i Trondheim, og har bakgrunn fra TNS Gallup hvor han har jobbet med opinionsundersøkelser og kommunikasjonsanalyse. Daniel har bred erfaring som frilansjournalist, fra NRK Her & Nå, og har tidligere jobbet med å utvikle nettsteder for FNs informasjonskontor for Norden og landsdekkende organisasjoner i Norge. Daniel er også styreleder i Holder de ord.'),
Person.new('Kristofer Rees', 'kristofer@holderdeord.no', 'hdo/kristofer.jpg', 'er sjef for metode og analyse i Holder de ord. Han har en bachelorgrad i statsvitenskap, og har tidligere studert musikk ved NTNU i Trondheim og Det Kgl. Danske Musikkonservatorium i København. Kristofer sitter også i Holder de ords styre.'),
Person.new('Tiina Ruohonen', 'tiina@holderdeord.no', 'hdo/tiina.jpg', 'er en av grunnleggerne av Holder de ord, nestleder og sjef for kommunikasjon, presse, og partnerskap. Tiina har en Cand. Mag. i statsvitenskap og juss, og en mastergrad i bærekraftig utvikling og etikk. Hun jobbet i flere år som prosjektleder og rådgiver på klimaområdet, og driver i dag sitt eget selskap som hjelper kunder med utfordringer innenfor klimaspørsmål, etikk, samfunnsansvar, og demokratisk medvirkning. Tiina sitter også i Holder de ords styre.'),
Person.new('Linn Skorge', 'linn@holderdeord.no', 'hdo/linn.jpg', 'jobber med salg og finansiering i Holder de ord. Linn tar for øyeblikket en mastergrad i politisk økonomi på BI i Oslo. Fra tidligere har hun en bachelorgrad i internasjonal markedsføring, også fra Handelshøyskolen BI. Linn sitter også i Holder de ords styre.'),
Person.new('Guro Øistensen', nil, nil, 'jobber med metode og det faglige innholdet i Holder de ord. Til daglig er hun Communications Manager i IT-bedriften Logica, hvor hun jobber med internett, intranett, PR og intern kommunikasjon. Guro er utdannet sosiolog fra Universitetet i Oslo. Hun sitter også i Holder de ords styre.')
]
@contributors = [
Person.new('Alex Asensi'),
Person.new('Kat Aquino'),
Person.new('Martin Bekkelund'),
Person.new('Anders Berg Hansen'),
Person.new('Cathrine Berg-Nielsen'),
Person.new('Kristian Bysheim'),
Person.new('Linn Katrine Erstad'),
Person.new('Inge Olav Fure'),
Person.new('Svein Halvor Halvorsen'),
Person.new('Arne Hassel'),
Person.new('Jostein Holje'),
Person.new('Vegard Karevoll'),
Person.new('Markus Krüger'),
Person.new('Joanna Merker'),
Person.new('Sara Mjelva'),
Person.new('Salve J. Nilsen'),
Person.new('Gustav Oddsson'),
Person.new('Endre Ottosen'),
Person.new('Petter Reinholdtsen'),
Person.new('Jonathan Ronen'),
Person.new('Carl Martin Rosenberg'),
Person.new('Erik Seierstad'),
Person.new('Osman Siddique'),
Person.new('Cosimo Streppone'),
Person.new('Hanna Welde Tranås'),
Person.new('Ingrid Ødegaard')
]
@alumni = [
Person.new('Tage Augustson'),
Person.new('Anne Raaum Christensen'),
Person.new('Madeleine Skjelland Eriksen'),
Person.new('Marte Haabeth Grindaker'),
Person.new('Vilde Grønn'),
Person.new('Rigmor Haga'),
Person.new('Vegar Heir'),
Person.new('Dina Hestad'),
Person.new('Thomas Huang'),
Person.new('Elida Høeg'),
Person.new('Tor Håkon Inderberg'),
Person.new('Esben Jensen'),
Person.new('Nina Jensen'),
Person.new('Einar Kjerschow'),
Person.new('Øystein Jerkø Kostøl'),
Person.new('Ingrid Lomelde'),
Person.new('Ellen M. E. Lundring'),
Person.new('Liv Arntzen Løchen'),
Person.new('Magnus Løseth'),
Person.new('Marit Sjøvaag Marino'),
Person.new('Silje Nyløkken'),
Person.new('Tommy Steinsli'),
Person.new('Einar Sundin'),
Person.new('Eirik Swensen'),
Person.new('Ole Martin Volle')
]
end
class Person
attr_reader :name, :image, :email, :bio
def initialize(name, email = nil, image = nil, bio = nil)
@name, @email, @image, @bio = name, email, image, bio
end
end
end
|
# encoding: utf-8
class HomeController < ApplicationController
def index
client = Instagram::Client.new(
format: 'json',
client_id: Instagram.client_id,
client_secret: Instagram.client_secret
)
tag = URI.encode('ねこ')
begin
@response = client.tag_recent_media(tag, max_tag_id: params[:max_tag_id])
rescue
@response = nil # TODO: exception handling
end
end
end
Change search tagname temporarily (#19)
# encoding: utf-8
class HomeController < ApplicationController
def index
client = Instagram::Client.new(
format: 'json',
client_id: Instagram.client_id,
client_secret: Instagram.client_secret
)
tag = URI.encode('nuko')
begin
@response = client.tag_recent_media(tag, max_tag_id: params[:max_tag_id])
rescue
@response = nil # TODO: exception handling
end
end
end
|
require 'securerandom'
action :run do
#Create cookie secret
if (!node.attribute? "google_auth.cookie_secret.#{new_resource.name}" )
node.set_unless[:google_auth][:cookie_secret][new_resource.name] = SecureRandom.base64 34
node.save
end
service_name = "google_auth_proxy_#{new_resource.name}"
golang_package "github.com/klamontagne/google_auth_proxy"
template "/etc/init/#{service_name}.conf" do
source "upstart.conf.erb"
mode 0600
owner "root"
cookbook "google_auth_proxy"
variables(
:client_id => new_resource.client_id,
:client_secret => new_resource.client_secret,
:cookie_domain => new_resource.cookie_domain,
:cookie_secret => node[:google_auth][:cookie_secret][new_resource.name],
:user => new_resource.user,
:google_apps_domain => new_resource.google_apps_domain,
:listen_address => new_resource.listen_address,
:redirect_url => new_resource.redirect_url,
:upstreams => new_resource.upstream.first #TODO test multiple upstreams
)
end
service service_name do
provider Chef::Provider::Service::Upstart
action [ :enable, :start ]
end
end
restart service on conf change
require 'securerandom'
action :run do
#Create cookie secret
if (!node.attribute? "google_auth.cookie_secret.#{new_resource.name}" )
node.set_unless[:google_auth][:cookie_secret][new_resource.name] = SecureRandom.base64 34
node.save
end
service_name = "google_auth_proxy_#{new_resource.name}"
golang_package "github.com/klamontagne/google_auth_proxy"
template "#{service_name}-upstart" do
path "/etc/init/#{service_name}.conf"
source "upstart.conf.erb"
mode 0600
owner "root"
cookbook "google_auth_proxy"
variables(
:client_id => new_resource.client_id,
:client_secret => new_resource.client_secret,
:cookie_domain => new_resource.cookie_domain,
:cookie_secret => node[:google_auth][:cookie_secret][new_resource.name],
:user => new_resource.user,
:google_apps_domain => new_resource.google_apps_domain,
:listen_address => new_resource.listen_address,
:redirect_url => new_resource.redirect_url,
:upstreams => new_resource.upstream.first #TODO test multiple upstreams
)
end
service service_name do
provider Chef::Provider::Service::Upstart
action [ :enable, :start ]
subscribes :restart, "template[#{service_name}-upstart]", :delayed
end
end
|
# Homepage
class HomeController < ApplicationController
respond_to :html
before_filter :require_administrator, except: :index
# Show homepage
# === Assigns
# * upcoming_events
# * recent_results: Events with Results within last two weeks
def index
@page_title = RacingAssociation.current.name
assign_home
@photo = @home.photo
@posts = recent_posts
@upcoming_events = Event.upcoming(@home.weeks_of_upcoming_events)
@events_with_recent_results = Event.with_recent_results(@home.weeks_of_recent_results.weeks.ago)
@most_recent_event_with_recent_result = Event.most_recent_with_recent_result(
@home.weeks_of_recent_results.weeks.ago,
RacingAssociation.current.default_sanctioned_by
).first
@news_category = ArticleCategory.where(name: "news").first
if @news_category
@recent_news = Article.recent_news(@home.weeks_of_upcoming_events.weeks.ago, @news_category)
end
render_page
end
def edit
assign_home
end
def update
assign_home
if @home.update(home_params)
redirect_to edit_home_path
else
render :edit
end
end
private
def assign_home
@home = Home.current
end
def home_params
params_without_mobile.require(:home).permit(:photo_id, :weeks_of_recent_results, :weeks_of_upcoming_events)
end
def recent_posts
Post.recent
end
end
Add stale? check for homepage using consolidated updated_at
# Homepage
class HomeController < ApplicationController
respond_to :html
before_filter :require_administrator, except: :index
# Show homepage
# === Assigns
# * upcoming_events
# * recent_results: Events with Results within last two weeks
def index
@page_title = RacingAssociation.current.name
if stale?([ updated_at, @today ], public: true)
assign_home
@photo = @home.photo
@posts = recent_posts
@upcoming_events = Event.upcoming(@home.weeks_of_upcoming_events)
@events_with_recent_results = Event.with_recent_results(@home.weeks_of_recent_results.weeks.ago)
@most_recent_event_with_recent_result = Event.most_recent_with_recent_result(
@home.weeks_of_recent_results.weeks.ago,
RacingAssociation.current.default_sanctioned_by
).first
@news_category = ArticleCategory.where(name: "news").first
if @news_category
@recent_news = Article.recent_news(@home.weeks_of_upcoming_events.weeks.ago, @news_category)
end
render_page
end
end
def edit
assign_home
end
def update
assign_home
if @home.update(home_params)
redirect_to edit_home_path
else
render :edit
end
end
private
def assign_home
@home = Home.current
end
def home_params
params_without_mobile.require(:home).permit(:photo_id, :weeks_of_recent_results, :weeks_of_upcoming_events)
end
def recent_posts
Post.recent
end
# Most recent updated_at for all models shown on homepage
def updated_at
[
Article.maximum(:updated_at),
ArticleCategory.maximum(:updated_at),
Event.maximum(:updated_at),
Home.maximum(:updated_at),
Post.maximum(:updated_at),
Result.maximum(:updated_at)
].compact.max
end
end
|
#
# Copyright Peter Donald
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include Chef::Asadmin
def type_flag
"--type #{new_resource.library_type}"
end
use_inline_resources
action :add do
if new_resource.init_style == 'upstart'
service "glassfish-#{new_resource.domain_name}" do
provider Chef::Provider::Service::Upstart
supports :restart => true, :status => true
action :nothing
end
elsif new_resource.init_style == 'runit'
runit_service "glassfish-#{new_resource.domain_name}" do
sv_timeout 100
supports :restart => true, :status => true
action :nothing
end
else
raise "Unknown init style #{new_resource.init_style}"
end
cached_package_filename = "#{Chef::Config[:file_cache_path]}/#{new_resource.domain_name}_#{Digest::SHA1.hexdigest(new_resource.url)}/#{::File.basename(new_resource.url)}"
check_command = "#{asadmin_command('list-libraries')} #{type_flag} | grep -x -- '#{::File.basename(new_resource.url)}'"
directory ::File.dirname(cached_package_filename) do
not_if check_command
owner new_resource.system_user
group new_resource.system_group
mode '0770'
recursive true
end
remote_file cached_package_filename do
not_if check_command
source new_resource.url
owner new_resource.system_user
group new_resource.system_group
mode '0640'
action :create_if_missing
end
command = []
command << "add-library"
command << type_flag
command << "--upload" << new_resource.upload
command << cached_package_filename
bash "asadmin_add-library #{new_resource.url}" do
not_if check_command
user new_resource.system_user
group new_resource.system_group
code asadmin_command(command.join(' '))
if new_resource.requires_restart
notifies :restart, "service[glassfish-#{new_resource.domain_name}]", :immediate if new_resource.init_style == 'upstart'
notifies :restart, "runit_service[glassfish-#{new_resource.domain_name}]", :immediate if new_resource.init_style == 'runit'
end
end
end
action :remove do
command = []
command << "remove-library"
command << type_flag
command << ::File.basename(new_resource.url)
bash "asadmin_remove-library #{new_resource.url}" do
only_if "#{asadmin_command('list-libraries')} #{type_flag} | grep -x -- '#{::File.basename(new_resource.url)}'"
user new_resource.system_user
group new_resource.system_group
code asadmin_command(command.join(' '))
end
end
Prefer single quotes
#
# Copyright Peter Donald
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include Chef::Asadmin
def type_flag
"--type #{new_resource.library_type}"
end
use_inline_resources
action :add do
if new_resource.init_style == 'upstart'
service "glassfish-#{new_resource.domain_name}" do
provider Chef::Provider::Service::Upstart
supports :restart => true, :status => true
action :nothing
end
elsif new_resource.init_style == 'runit'
runit_service "glassfish-#{new_resource.domain_name}" do
sv_timeout 100
supports :restart => true, :status => true
action :nothing
end
else
raise "Unknown init style #{new_resource.init_style}"
end
cached_package_filename = "#{Chef::Config[:file_cache_path]}/#{new_resource.domain_name}_#{Digest::SHA1.hexdigest(new_resource.url)}/#{::File.basename(new_resource.url)}"
check_command = "#{asadmin_command('list-libraries')} #{type_flag} | grep -x -- '#{::File.basename(new_resource.url)}'"
directory ::File.dirname(cached_package_filename) do
not_if check_command
owner new_resource.system_user
group new_resource.system_group
mode '0770'
recursive true
end
remote_file cached_package_filename do
not_if check_command
source new_resource.url
owner new_resource.system_user
group new_resource.system_group
mode '0640'
action :create_if_missing
end
command = []
command << 'add-library'
command << type_flag
command << "--upload" << new_resource.upload
command << cached_package_filename
bash "asadmin_add-library #{new_resource.url}" do
not_if check_command
user new_resource.system_user
group new_resource.system_group
code asadmin_command(command.join(' '))
if new_resource.requires_restart
notifies :restart, "service[glassfish-#{new_resource.domain_name}]", :immediate if new_resource.init_style == 'upstart'
notifies :restart, "runit_service[glassfish-#{new_resource.domain_name}]", :immediate if new_resource.init_style == 'runit'
end
end
end
action :remove do
command = []
command << "remove-library"
command << type_flag
command << ::File.basename(new_resource.url)
bash "asadmin_remove-library #{new_resource.url}" do
only_if "#{asadmin_command('list-libraries')} #{type_flag} | grep -x -- '#{::File.basename(new_resource.url)}'"
user new_resource.system_user
group new_resource.system_group
code asadmin_command(command.join(' '))
end
end
|
class HomeController < ApplicationController
before_filter :hide_cover_image
def index
if user_signed_in?
respond_to do |format|
format.html do
@forum_topics = Forem::Topic.by_most_recent_post.limit(10)
@recent_anime = current_user.watchlists.where(status: "Currently Watching").includes(:anime).order("last_watched DESC").limit(4)
end
format.json do
@stories = Story.accessible_by(current_ability).order('updated_at DESC').where(user_id: current_user.following.map {|x| x.id } + [current_user.id]).page(params[:page]).per(20)
render :json => Entities::Story.represent(@stories, title_language_preference: user_signed_in? ? current_user.title_language_preference : "canonical")
end
end
else
@hide_footer_ad = ab_test("footer_ad_on_guest_homepage", "show", "hide") == "hide"
render :guest_index
end
end
def dashboard
authenticate_user!
redirect_to user_path(current_user)
end
def recommendations
end
end
Homepage feed remove option.
class HomeController < ApplicationController
before_filter :hide_cover_image
def index
if user_signed_in?
respond_to do |format|
format.html do
@forum_topics = Forem::Topic.by_most_recent_post.limit(10)
@recent_anime = current_user.watchlists.where(status: "Currently Watching").includes(:anime).order("last_watched DESC").limit(4)
end
format.json do
@stories = Story.accessible_by(current_ability).order('updated_at DESC').where(user_id: current_user.following.map {|x| x.id } + [current_user.id]).page(params[:page]).per(20)
render :json => Entities::Story.represent(@stories, current_ability: current_ability, title_language_preference: user_signed_in? ? current_user.title_language_preference : "canonical")
end
end
else
@hide_footer_ad = ab_test("footer_ad_on_guest_homepage", "show", "hide") == "hide"
render :guest_index
end
end
def dashboard
authenticate_user!
redirect_to user_path(current_user)
end
def recommendations
end
end
|
# Provider:: service
#
# Copyright 2013, Holger Amann <holger@fehu.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/mixin/shell_out'
require 'chef/mixin/language'
include Chef::Mixin::ShellOut
action :enable do
template_suffix = case node['platform_family']
when 'implement_me' then node['platform_family']
else 'lsb'
end
cache_service_user = service_user
cache_service_group = service_group
template "#{node['eye']['init_dir']}/#{new_resource.init_script_prefix}#{new_resource.service_name}" do
source "eye_init.#{template_suffix}.erb"
cookbook "eye"
owner cache_service_user
group cache_service_group
mode "0755"
variables(
:service_name => new_resource.service_name,
:eye_bin => eye_bin,
:config_file => config_file,
:user => cache_service_user
)
only_if { ::File.exists?(config_file) }
end
unless @current_resource.enabled
service "#{new_resource.init_script_prefix}#{new_resource.service_name}" do
action [ :enable ]
end
new_resource.updated_by_last_action(true)
end
end
action :load do
unless @current_resource.running
run_command(load_command)
new_resource.updated_by_last_action(true)
end
end
action :reload do
run_command(stop_command) if @current_resource.running
run_command(load_command)
new_resource.updated_by_last_action(true)
end
action :start do
unless @current_resource.running
run_command(start_command)
new_resource.updated_by_last_action(true)
end
end
action :disable do
if @current_resource.enabled
if user_conf_dir
file config_file do
action :delete
end
end
link "#{node['eye']['init_dir']}/#{new_resource.service_name}" do
action :delete
end
new_resource.updated_by_last_action(true)
end
end
action :stop do
if @current_resource.running
run_command(stop_command)
new_resource.updated_by_last_action(true)
end
end
action :restart do
if @current_resource.running
run_command(restart_command)
new_resource.updated_by_last_action(true)
end
end
def load_current_resource
@current_resource = Chef::Resource::EyeService.new(new_resource.name)
@current_resource.service_name(new_resource.service_name)
determine_current_status!
@current_resource
end
protected
def status_command
"#{eye_bin} info #{new_resource.service_name}"
end
def load_command
"#{eye_bin} load #{config_file}"
end
def load_eye
"#{eye_bin} load"
end
def start_command
"#{eye_bin} start #{new_resource.service_name}"
end
def stop_command
"#{eye_bin} stop #{new_resource.service_name}"
end
def restart_command
"#{eye_bin} restart #{new_resource.service_name}"
end
def run_command(command, opts = {})
home = user_home(service_user)
env_variables = { 'HOME' => home }
cmd = shell_out(command, :user => service_user, :group => service_group, :env => env_variables)
cmd.error! unless opts[:dont_raise]
cmd
end
def determine_current_status!
service_running?
service_enabled?
end
def service_running?
begin
# get sure eye master process is running
run_command(load_eye)
if run_command(status_command, { :dont_raise => true }).exitstatus > 0
@current_resource.running false
else
@current_resource.running true
end
rescue Mixlib::ShellOut::ShellCommandFailed, SystemCallError
@current_resource.running false
nil
end
end
def service_enabled?
if ::File.exists?(config_file) &&
::File.exists?("#{node['eye']['init_dir']}/#{new_resource.init_script_prefix}#{new_resource.service_name}")
@current_resource.enabled true
else
@current_resource.enabled false
end
end
def user_home(user)
home = if new_resource.user_srv_home.nil?
node['etc']['passwd'][user]['dir']
else
home = new_resource.user_srv_home
end
home
end
def service_user
new_resource.user_srv ? new_resource.user_srv_uid : node['eye']['user']
end
def service_group
new_resource.user_srv ? new_resource.user_srv_gid : node['eye']['group']
end
def user_conf_dir
::File.join(node['eye']['conf_dir'], service_user) if node['eye']['conf_dir']
end
def user_log_dir
::File.join(node['eye']['log_dir'], service_user) if node['eye']['log_dir']
end
def config_file
new_resource.config_path ||
::File.join(user_conf_dir, "#{new_resource.service_name}.eye")
end
def eye_bin
new_resource.bin || node['eye']['bin']
end
Fix cookbook for chef 12
# Provider:: service
#
# Copyright 2013, Holger Amann <holger@fehu.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/mixin/shell_out'
require 'chef/mixin/language'
include Chef::Mixin::ShellOut
action :enable do
template_suffix = case node['platform_family']
when 'implement_me' then node['platform_family']
else 'lsb'
end
cache_service_user = service_user
cache_service_group = service_group
template "#{node['eye']['init_dir']}/#{new_resource.init_script_prefix}#{new_resource.service_name}" do
source "eye_init.#{template_suffix}.erb"
cookbook "eye"
owner cache_service_user
group cache_service_group
mode "0755"
variables(
:service_name => new_resource.service_name,
:eye_bin => eye_bin,
:config_file => config_file,
:user => cache_service_user
)
only_if { ::File.exists?(config_file) }
end
unless @current_resource.enabled
service "#{new_resource.init_script_prefix}#{new_resource.service_name}" do
action [ :enable ]
end
new_resource.updated_by_last_action(true)
end
end
action :load do
unless @current_resource.running
run_command(load_command)
new_resource.updated_by_last_action(true)
end
end
action :reload do
run_command(stop_command) if @current_resource.running
run_command(load_command)
new_resource.updated_by_last_action(true)
end
action :start do
unless @current_resource.running
run_command(start_command)
new_resource.updated_by_last_action(true)
end
end
action :disable do
if @current_resource.enabled
if user_conf_dir
file config_file do
action :delete
end
end
link "#{node['eye']['init_dir']}/#{new_resource.service_name}" do
action :delete
end
new_resource.updated_by_last_action(true)
end
end
action :stop do
if @current_resource.running
run_command(stop_command)
new_resource.updated_by_last_action(true)
end
end
action :restart do
if @current_resource.running
run_command(restart_command)
new_resource.updated_by_last_action(true)
end
end
def load_current_resource
@current_resource = Chef::Resource::EyeService.new(new_resource.name)
@current_resource.service_name(new_resource.service_name)
determine_current_status!
@current_resource
end
# protected
def status_command
"#{eye_bin} info #{new_resource.service_name}"
end
def load_command
"#{eye_bin} load #{config_file}"
end
def load_eye
"#{eye_bin} load"
end
def start_command
"#{eye_bin} start #{new_resource.service_name}"
end
def stop_command
"#{eye_bin} stop #{new_resource.service_name}"
end
def restart_command
"#{eye_bin} restart #{new_resource.service_name}"
end
def run_command(command, opts = {})
home = user_home(service_user)
env_variables = { 'HOME' => home }
cmd = shell_out(command, :user => service_user, :group => service_group, :env => env_variables)
cmd.error! unless opts[:dont_raise]
cmd
end
def determine_current_status!
service_running?
service_enabled?
end
def service_running?
begin
# get sure eye master process is running
run_command(load_eye)
if run_command(status_command, { :dont_raise => true }).exitstatus > 0
@current_resource.running false
else
@current_resource.running true
end
rescue Mixlib::ShellOut::ShellCommandFailed, SystemCallError
@current_resource.running false
nil
end
end
def service_enabled?
if ::File.exists?(config_file) &&
::File.exists?("#{node['eye']['init_dir']}/#{new_resource.init_script_prefix}#{new_resource.service_name}")
@current_resource.enabled true
else
@current_resource.enabled false
end
end
def user_home(user)
home = if new_resource.user_srv_home.nil?
node['etc']['passwd'][user]['dir']
else
home = new_resource.user_srv_home
end
home
end
def service_user
new_resource.user_srv ? new_resource.user_srv_uid : node['eye']['user']
end
def service_group
new_resource.user_srv ? new_resource.user_srv_gid : node['eye']['group']
end
def user_conf_dir
::File.join(node['eye']['conf_dir'], service_user) if node['eye']['conf_dir']
end
def user_log_dir
::File.join(node['eye']['log_dir'], service_user) if node['eye']['log_dir']
end
def config_file
new_resource.config_path ||
::File.join(user_conf_dir, "#{new_resource.service_name}.eye")
end
def eye_bin
new_resource.bin || node['eye']['bin']
end
|
class HookController < ActionController::Base
MAX_SIZE = 100 * 1024
def digest
status = :ok
json = {}
if request.body.length > MAX_SIZE
status = :bad_request
json[:error] = 'Data is too long'
else
unless (token = params[:access_token])
if (auth_header = request.headers['Authorization'])
token_type, token = auth_header.to_s.squeeze(' ').strip.split(' ')
token = nil unless token_type == 'Bearer' && token.present?
end
end
if token
begin
if Cenit::Hook.digest(token, params[:slug], request.body.read, request.content_type)
json[:status] = :ok
else
json[:error] = "Hook token is invalid"
status = :unauthorized
end
rescue Exception => ex
report = Setup::SystemNotification.create_from(ex)
json[:error] = "Ask for support by supplying this code: #{report.id}"
status = :internal_server_error
end
else
json[:error] = "Authorization token is malformed or missing"
status = :unauthorized
end
end
render json: json, status: status
end
end
Add | Responding hook requests with Accepted status code
class HookController < ActionController::Base
MAX_SIZE = 100 * 1024
def digest
status = :accepted
json = {}
if request.body.length > MAX_SIZE
status = :bad_request
json[:error] = 'Data is too long'
else
unless (token = params[:access_token])
if (auth_header = request.headers['Authorization'])
token_type, token = auth_header.to_s.squeeze(' ').strip.split(' ')
token = nil unless token_type == 'Bearer' && token.present?
end
end
if token
begin
if Cenit::Hook.digest(token, params[:slug], request.body.read, request.content_type)
json[:status] = :accepted
else
json[:error] = "Hook token is invalid"
status = :unauthorized
end
rescue Exception => ex
report = Setup::SystemNotification.create_from(ex)
json[:error] = "Ask for support by supplying this code: #{report.id}"
status = :internal_server_error
end
else
json[:error] = "Authorization token is malformed or missing"
status = :unauthorized
end
end
render json: json, status: status
end
end
|
#
# Author:: Noah Kantrowitz <noah@opscode.com>
# Cookbook Name:: application_ruby
# Provider:: unicorn
#
# Copyright:: 2011-2012, Opscode, Inc <legal@opscode.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include Chef::DSL::IncludeRecipe
action :before_compile do
if new_resource.bundler.nil?
new_resource.bundler rails_resource && rails_resource.bundler
end
unless new_resource.bundler
include_recipe "unicorn"
end
new_resource.bundle_command rails_resource && rails_resource.bundle_command
unless new_resource.restart_command
new_resource.restart_command do
if new_resource.runit
execute "/etc/init.d/#{new_resource.name} hup" do
user "root"
end
elsif new_resource.upstart
service "#{new_resource.name}-unicorn" do
provider Chef::Provider::Service::Upstart
action :restart
end
end
end
end
end
action :before_deploy do
end
action :before_migrate do
end
action :before_symlink do
end
action :before_restart do
new_resource = @new_resource
unicorn_config "/etc/unicorn/#{new_resource.name}.rb" do
listen(new_resource.listen || { new_resource.port => new_resource.options })
working_directory ::File.join(new_resource.path, 'current')
worker_timeout new_resource.worker_timeout
preload_app new_resource.preload_app
worker_processes new_resource.worker_processes
before_fork new_resource.before_fork
after_fork new_resource.after_fork
forked_user new_resource.forked_user
forked_group new_resource.forked_group
before_exec new_resource.before_exec
pid new_resource.pid
stderr_path new_resource.stderr_path
stdout_path new_resource.stdout_path
unicorn_command_line new_resource.unicorn_command_line
copy_on_write new_resource.copy_on_write
enable_stats new_resource.enable_stats
upstart new_resource.upstart
end
if new_resource.runit
runit_service new_resource.name do
run_template_name 'unicorn'
log_template_name 'unicorn'
owner new_resource.owner if new_resource.owner
group new_resource.group if new_resource.group
cookbook new_resource.runit_template_cookbook
options(
:app => new_resource,
:bundler => new_resource.bundler,
:bundle_command => new_resource.bundle_command,
:rails_env => new_resource.environment_name,
:smells_like_rack => ::File.exists?(::File.join(new_resource.path, "current", "config.ru"))
)
end
end
end
action :after_restart do
end
protected
def rails_resource
new_resource.application.sub_resources.select{|res| res.type == :rails}.first
end
Revert "forgot to add restart command"
This reverts commit 086c21b808a0aea16ac5cb387ffb6eaab0239551.
#
# Author:: Noah Kantrowitz <noah@opscode.com>
# Cookbook Name:: application_ruby
# Provider:: unicorn
#
# Copyright:: 2011-2012, Opscode, Inc <legal@opscode.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include Chef::DSL::IncludeRecipe
action :before_compile do
if new_resource.bundler.nil?
new_resource.bundler rails_resource && rails_resource.bundler
end
unless new_resource.bundler
include_recipe "unicorn"
end
new_resource.bundle_command rails_resource && rails_resource.bundle_command
unless new_resource.restart_command
new_resource.restart_command do
if new_resource.runit
execute "/etc/init.d/#{new_resource.name} hup" do
user "root"
end
elsif new_resource.monit
execute ""
elsif new_resource.upstart
end
end
end
end
action :before_deploy do
end
action :before_migrate do
end
action :before_symlink do
end
action :before_restart do
new_resource = @new_resource
unicorn_config "/etc/unicorn/#{new_resource.name}.rb" do
listen(new_resource.listen || { new_resource.port => new_resource.options })
working_directory ::File.join(new_resource.path, 'current')
worker_timeout new_resource.worker_timeout
preload_app new_resource.preload_app
worker_processes new_resource.worker_processes
before_fork new_resource.before_fork
after_fork new_resource.after_fork
forked_user new_resource.forked_user
forked_group new_resource.forked_group
before_exec new_resource.before_exec
pid new_resource.pid
stderr_path new_resource.stderr_path
stdout_path new_resource.stdout_path
unicorn_command_line new_resource.unicorn_command_line
copy_on_write new_resource.copy_on_write
enable_stats new_resource.enable_stats
upstart new_resource.upstart
end
if new_resource.runit
runit_service new_resource.name do
run_template_name 'unicorn'
log_template_name 'unicorn'
owner new_resource.owner if new_resource.owner
group new_resource.group if new_resource.group
cookbook new_resource.runit_template_cookbook
options(
:app => new_resource,
:bundler => new_resource.bundler,
:bundle_command => new_resource.bundle_command,
:rails_env => new_resource.environment_name,
:smells_like_rack => ::File.exists?(::File.join(new_resource.path, "current", "config.ru"))
)
end
end
end
action :after_restart do
end
protected
def rails_resource
new_resource.application.sub_resources.select{|res| res.type == :rails}.first
end
|
class HostController < ApplicationController
before_action :check_privileges
before_action :get_session_data
after_action :cleanup_action
after_action :set_session_data
include Mixins::GenericFormMixin
include Mixins::GenericSessionMixin
include Mixins::GenericListMixin
include Mixins::GenericShowMixin
include Mixins::MoreShowActions
include Mixins::BreadcrumbsMixin
def self.display_methods
%w[
hv_info os_info devices network storage_adapters performance timeline storages
resource_pools vms miq_templates compliance_history custom_button_events cloud_networks cloud_subnets
]
end
def display_config_info
@showtype = "config"
title = case @display
when "hv_info" then _("VM Monitor Information")
when "os_info" then _("OS Information")
when "devices" then _("Devices")
end
drop_breadcrumb(:name => "#{@record.name} (#{title})",
:url => show_link(@record, :display => @display))
end
alias display_hv_info display_config_info
alias display_os_info display_config_info
alias display_devices display_config_info
def display_tree_resources
@showtype = "config"
title, tree = if @display == "network"
@network_tree = TreeBuilderNetwork.new(:network_tree, @sb, true, :root => @record)
[_("Network"), :network_tree]
else
@sa_tree = TreeBuilderStorageAdapters.new(:sa_tree, @sb, true, :root => @record)
[_("Storage Adapters"), :sa_tree]
end
drop_breadcrumb(:name => "#{@record.name} (#{title})",
:url => show_link(@record, :display => @display))
self.x_active_tree = tree
end
alias display_network display_tree_resources
alias display_storage_adapters display_tree_resources
def filesystems_subsets
assert_privileges('host_show')
scope = nil
label = _('Files')
host_service_group = HostServiceGroup.where(:id => params['host_service_group']).first
if host_service_group
scope = [[:host_service_group_filesystems, host_service_group.id]]
label = _("Configuration files of nova service")
end
return label, scope
end
def filesystems
assert_privileges('host_show')
label, scope = filesystems_subsets
show_association('filesystems', label, :filesystems, Filesystem, nil, scope)
end
def host_services_subsets
assert_privileges('host_show')
scope = nil
label = _('Services')
host_service_group = HostServiceGroup.where(:id => params['host_service_group']).first
if host_service_group
case params[:status]
when 'running'
scope = [[:host_service_group_running_systemd, host_service_group.id]]
label = _("Running system services of %{name}") % {:name => host_service_group.name}
when 'failed'
scope = [[:host_service_group_failed_systemd, host_service_group.id]]
label = _("Failed system services of %{name}") % {:name => host_service_group.name}
when 'all'
scope = [[:host_service_group_systemd, host_service_group.id]]
label = _("All system services of %{name}") % {:name => host_service_group.name}
end
end
return label, scope
end
def host_services
label, scope = host_services_subsets
show_association('host_services', label, :host_services, SystemService, nil, scope)
session[:host_display] = "host_services"
end
def host_cloud_services
assert_privileges('host_show')
@center_toolbar = 'host_cloud_services'
@no_checkboxes = false
show_association('host_cloud_services', _('Cloud Services'), :cloud_services, CloudService, nil, nil)
end
def advanced_settings
show_association('advanced_settings', _('Advanced Settings'), :advanced_settings, AdvancedSetting)
end
def firewall_rules
assert_privileges('host_show')
@display = "main"
show_association('firewall_rules', _('Firewall Rules'), :firewall_rules, FirewallRule)
end
def guest_applications
assert_privileges('host_show')
show_association('guest_applications', _('Packages'), :guest_applications, GuestApplication)
end
# Show the main Host list view overriding method from Mixins::GenericListMixin
def show_list
session[:host_items] = nil
options = {:no_checkboxes => ActiveRecord::Type::Boolean.new.cast(params[:no_checkboxes])}
process_show_list(options)
end
def start
redirect_to(:action => 'show_list')
end
def edit
assert_privileges("host_edit")
if session[:host_items].nil?
@host = find_record_with_rbac(Host, params[:id])
@in_a_form = true
session[:changed] = false
drop_breadcrumb(:name => _("Edit Host '%{name}'") % {:name => @host.name}, :url => "/host/edit/#{@host.id}")
@title = _("Info/Settings")
else # if editing credentials for multi host
drop_breadcrumb(:name => _('Edit Hosts'), :url => '/host/edit/')
@title = _("Credentials/Settings")
@host = if params[:selected_host]
find_record_with_rbac(Host, params[:selected_host])
else
Host.new
end
@changed = true
@showlinks = true
@in_a_form = true
# Get the db records that are being tagged
hostitems = Host.find(session[:host_items]).sort_by(&:name)
@selected_hosts = {}
hostitems.each do |h|
@selected_hosts[h.id] = h.name
end
build_targets_hash(hostitems)
@view = get_db_view(Host) # Instantiate the MIQ Report view object
end
end
def update
assert_privileges("host_edit")
case params[:button]
when "cancel"
if session[:host_items] # canceling editing credentials for multiple Hosts
flash_and_redirect(_("Edit of credentials for selected Hosts was cancelled by the user"))
else
@host = find_record_with_rbac(Host, params[:id])
flash_and_redirect(_("Edit of Host \"%{name}\" was cancelled by the user") % {:name => @host.name})
end
when "save"
if session[:host_items].nil?
@host = find_record_with_rbac(Host, params[:id])
old_host_attributes = @host.attributes.clone
valid_host = find_record_with_rbac(Host, params[:id])
set_record_vars(valid_host, :validate) # Set the record variables, but don't save
if valid_record? && set_record_vars(@host) && @host.save
AuditEvent.success(build_saved_audit_hash_angular(old_host_attributes, @host, false))
flash_and_redirect(_("Host \"%{name}\" was saved") % {:name => @host.name})
nil
else
@errors.each { |msg| add_flash(msg, :error) }
@host.errors.each do |field, msg|
add_flash("#{field.to_s.capitalize} #{msg}", :error)
end
drop_breadcrumb(:name => _("Edit Host '%{name}'") % {:name => @host.name}, :url => "/host/edit/#{@host.id}")
@in_a_form = true
javascript_flash
end
else
valid_host = find_record_with_rbac(Host, params[:validate_id].presence || session[:host_items].first.to_i)
# Set the record variables, but don't save
creds = set_credentials(valid_host, :validate)
if valid_record?
@error = Host.batch_update_authentication(session[:host_items], creds)
end
if @error || @error.blank?
flash_and_redirect(_("Credentials/Settings saved successfully"))
else
drop_breadcrumb(:name => _("Edit Host '%{name}'") % {:name => @host.name}, :url => "/host/edit/#{@host.id}")
@in_a_form = true
javascript_flash
end
end
when "reset"
params[:edittype] = @edit[:edittype] # remember the edit type
flash_to_session(_("All changes have been reset"), :warning)
@in_a_form = true
javascript_redirect(:action => 'edit', :id => @host.id.to_s)
when "validate"
verify_host = find_record_with_rbac(Host, params[:validate_id] ? params[:validate_id].to_i : params[:id])
if session[:host_items].nil?
set_record_vars(verify_host, :validate)
else
set_credentials(verify_host, :validate)
end
@in_a_form = true
@changed = session[:changed]
require "net/ssh"
begin
verify_host.verify_credentials(params[:type], :remember_host => params.key?(:remember_host))
rescue Net::SSH::HostKeyMismatch # Capture the Host key mismatch from the verify
render :update do |page|
page << javascript_prologue
new_url = url_for_only_path(:action => "update", :button => "validate", :type => params[:type], :remember_host => "true", :escape => false)
page << "if (confirm('#{_('The Host SSH key has changed, do you want to accept the new key?')}')) miqAjax('#{new_url}', true);"
end
return
rescue StandardError => bang
add_flash(bang.to_s, :error)
else
add_flash(_("Credential validation was successful"))
end
javascript_flash
end
end
# handle buttons pressed on the button bar
def button
@edit = session[:edit] # Restore @edit for adv search box
params[:display] = @display if %w[vms storages].include?(@display) # Were we displaying vms/storages
if params[:pressed].starts_with?("vm_", # Handle buttons from sub-items screen
"miq_template_",
"guest_",
"storage_")
pfx = pfx_for_vm_button_pressed(params[:pressed])
process_vm_buttons(pfx)
scanstorage if params[:pressed] == "storage_scan"
tag(Storage) if params[:pressed] == "storage_tag"
# Control transferred to another screen, so return
return if ["host_drift", "#{pfx}_compare", "#{pfx}_tag", "#{pfx}_policy_sim",
"#{pfx}_retire", "#{pfx}_protect", "#{pfx}_ownership",
"#{pfx}_reconfigure", "#{pfx}_retire", "#{pfx}_right_size",
"storage_tag"].include?(params[:pressed]) && @flash_array.nil?
unless ["#{pfx}_edit", "#{pfx}_miq_request_new", "#{pfx}_clone", "#{pfx}_migrate", "#{pfx}_publish", 'vm_rename'].include?(params[:pressed])
@refresh_div = "main_div"
@refresh_partial = "layouts/gtl"
show
end
else # Handle Host buttons
params[:page] = @current_page unless @current_page.nil? # Save current page for list refresh
@refresh_div = "main_div" # Default div for button.rjs to refresh
case params[:pressed]
when 'common_drift' then drift_analysis
when 'custom_button' then custom_buttons
when 'host_analyze_check_compliance' then analyze_check_compliance_hosts
when 'host_check_compliance' then check_compliance_hosts
when 'host_cloud_service_scheduling_toggle' then toggleservicescheduling
when 'host_compare' then comparemiq
when 'host_delete' then deletehosts
when 'host_edit' then edit_record
when 'host_introspect' then introspecthosts
when 'host_manageable' then sethoststomanageable
when 'host_protect' then assign_policies(Host)
when 'host_provide' then providehosts
when 'host_refresh' then refreshhosts
when 'host_scan' then scanhosts
when 'host_tag' then tag(Host)
when 'host_toggle_maintenance' then maintenancehosts
end
# Handle Host power buttons
if %w[host_shutdown host_reboot host_standby host_enter_maint_mode host_exit_maint_mode host_start
host_stop host_reset].include?(params[:pressed])
powerbutton_hosts(params[:pressed].split("_")[1..-1].join("_")) # Handle specific power button
end
perf_chart_chooser if params[:pressed] == "perf_reload"
perf_refresh_data if params[:pressed] == "perf_refresh"
return if ["custom_button"].include?(params[:pressed]) # custom button screen, so return, let custom_buttons method handle everything
return if %w[host_tag host_compare common_drift host_protect perf_reload].include?(params[:pressed]) &&
@flash_array.nil? # Another screen showing, so return
if @flash_array.nil? && !@refresh_partial # if no button handler ran, show not implemented msg
add_flash(_("Button not yet implemented"), :error)
@refresh_partial = "layouts/flash_msg"
@refresh_div = "flash_msg_div"
elsif @flash_array && @lastaction == "show"
@host = @record = identify_record(params[:id])
@refresh_partial = "layouts/flash_msg"
@refresh_div = "flash_msg_div"
end
end
if @lastaction == "show" && ["custom_button"].include?(params[:pressed])
@host = @record = identify_record(params[:id])
end
if single_delete_test
single_delete_redirect
elsif params[:pressed].ends_with?("_edit") ||
["#{pfx}_miq_request_new", "#{pfx}_migrate", "#{pfx}_publish"].include?(params[:pressed]) ||
["#{pfx}_clone", 'vm_rename'].include?(params[:pressed]) && @flash_array.nil?
if @flash_array
show_list
replace_gtl_main_div
elsif @redirect_controller
if ["#{pfx}_clone", "#{pfx}_migrate", "#{pfx}_publish"].include?(params[:pressed])
if flash_errors?
javascript_flash
else
javascript_redirect(:controller => @redirect_controller,
:action => @refresh_partial,
:id => @redirect_id,
:prov_type => @prov_type,
:prov_id => @prov_id,
:org_controller => @org_controller,
:escape => false)
end
else
render_or_redirect_partial(pfx)
end
else
javascript_redirect(:action => @refresh_partial, :id => @redirect_id)
end
elsif @refresh_div == "main_div" && @lastaction == "show_list"
replace_gtl_main_div
else
render_flash unless performed?
end
end
def host_form_fields
assert_privileges("host_edit")
host = find_record_with_rbac(Host, params[:id])
validate_against = if session.fetch_path(:edit, :validate_against) && params[:button] != "reset"
session.fetch_path(:edit, :validate_against)
end
host_hash = {
:name => host.name,
:hostname => host.hostname,
:ipmi_address => host.ipmi_address || "",
:custom_1 => host.custom_1 || "",
:user_assigned_os => host.user_assigned_os,
:operating_system => !(host.operating_system.nil? || host.operating_system.product_name.nil?),
:mac_address => host.mac_address || "",
:default_userid => host.authentication_userid.to_s,
:remote_userid => host.has_authentication_type?(:remote) ? host.authentication_userid(:remote).to_s : "",
:ws_userid => host.has_authentication_type?(:ws) ? host.authentication_userid(:ws).to_s : "",
:ipmi_userid => host.has_authentication_type?(:ipmi) ? host.authentication_userid(:ipmi).to_s : "",
:validate_id => validate_against,
}
render :json => host_hash
end
private
def record_class
case params[:display] || @display
when 'storages'
Storage
when 'vms', 'miq_templates'
VmOrTemplate
else
Host
end
end
def textual_group_list
[
%i[properties relationships],
%i[
compliance security configuration diagnostics smart_management miq_custom_attributes
ems_custom_attributes authentications cloud_services openstack_hardware_status openstack_service_status
]
]
end
helper_method :textual_group_list
def breadcrumb_name(_model)
_("Hosts")
end
# Validate the host record fields
def valid_record?
valid = true
@errors = []
if params[:ws_port] && (params[:ws_port] !~ /^\d+$/)
@errors.push(_("Web Services Listen Port must be numeric"))
valid = false
end
if params[:log_wrapsize] && ((params[:log_wrapsize] !~ /^\d+$/) || params[:log_wrapsize].to_i.zero?)
@errors.push(_("Log Wrap Size must be numeric and greater than zero"))
valid = false
end
valid
end
# Set record variables to new values
def set_record_vars(host, mode = nil)
host.name = params[:name]
host.hostname = params[:hostname].strip unless params[:hostname].nil?
host.ipmi_address = params[:ipmi_address]
host.mac_address = params[:mac_address]
host.custom_1 = params[:custom_1] unless mode == :validate
host.user_assigned_os = params[:user_assigned_os]
set_credentials(host, mode)
true
end
def set_credentials(host, mode)
creds = {}
if params[:default_userid]
default_password = params[:default_password] || host.authentication_password
creds[:default] = {:userid => params[:default_userid], :password => default_password}
end
if params[:remote_userid]
remote_password = params[:remote_password] || host.authentication_password(:remote)
creds[:remote] = {:userid => params[:remote_userid], :password => remote_password}
end
if params[:ws_userid]
ws_password = params[:ws_password] || host.authentication_password(:ws)
creds[:ws] = {:userid => params[:ws_userid], :password => ws_password}
end
if params[:ipmi_userid]
ipmi_password = params[:ipmi_password] || host.authentication_password(:ipmi)
creds[:ipmi] = {:userid => params[:ipmi_userid], :password => ipmi_password}
end
host.update_authentication(creds, :save => (mode != :validate))
creds
end
def title
_("Host")
end
def get_session_data
super
@drift_db = "Host"
end
def set_session_data
super
session[:miq_compressed] = @compressed unless @compressed.nil?
session[:miq_exists_mode] = @exists_mode unless @exists_mode.nil?
end
def breadcrumbs_options
{
:breadcrumbs => [
{:title => _("Compute")},
{:title => _("Infrastructure")},
{:title => _("Hosts"), :url => controller_url},
],
:record_info => @host,
}.compact
end
menu_section :inf
feature_for_actions "#{controller_name}_show_list", *ADV_SEARCH_ACTIONS
feature_for_actions "#{controller_name}_show", :groups, :users, :patches
feature_for_actions "#{controller_name}_timeline", :tl_chooser
feature_for_actions "#{controller_name}_perf", :perf_top_chart
has_custom_buttons
end
Fix hosts network page tree view
class HostController < ApplicationController
before_action :check_privileges
before_action :get_session_data
after_action :cleanup_action
after_action :set_session_data
include Mixins::GenericFormMixin
include Mixins::GenericSessionMixin
include Mixins::GenericListMixin
include Mixins::GenericShowMixin
include Mixins::MoreShowActions
include Mixins::BreadcrumbsMixin
def self.display_methods
%w[
hv_info os_info devices network storage_adapters performance timeline storages
resource_pools vms miq_templates compliance_history custom_button_events cloud_networks cloud_subnets
]
end
def display_config_info
@showtype = "config"
title = case @display
when "hv_info" then _("VM Monitor Information")
when "os_info" then _("OS Information")
when "devices" then _("Devices")
end
drop_breadcrumb(:name => "#{@record.name} (#{title})",
:url => show_link(@record, :display => @display))
end
alias display_hv_info display_config_info
alias display_os_info display_config_info
alias display_devices display_config_info
def display_tree_resources
@showtype = "config"
title, tree = if @display == "network"
@network_tree = TreeBuilderNetwork.new(:network_tree, @sb, true, :root => @record)
[_("Network"), :network_tree]
else
@sa_tree = TreeBuilderStorageAdapters.new(:sa_tree, @sb, true, :root => @record)
[_("Storage Adapters"), :sa_tree]
end
drop_breadcrumb(:name => "#{@record.name} (#{title})",
:url => show_link(@record, :display => @display))
self.x_active_tree = tree
end
alias display_network display_tree_resources
alias display_storage_adapters display_tree_resources
def filesystems_subsets
assert_privileges('host_show')
scope = nil
label = _('Files')
host_service_group = HostServiceGroup.where(:id => params['host_service_group']).first
if host_service_group
scope = [[:host_service_group_filesystems, host_service_group.id]]
label = _("Configuration files of nova service")
end
return label, scope
end
def filesystems
assert_privileges('host_show')
label, scope = filesystems_subsets
show_association('filesystems', label, :filesystems, Filesystem, nil, scope)
end
def host_services_subsets
assert_privileges('host_show')
scope = nil
label = _('Services')
host_service_group = HostServiceGroup.where(:id => params['host_service_group']).first
if host_service_group
case params[:status]
when 'running'
scope = [[:host_service_group_running_systemd, host_service_group.id]]
label = _("Running system services of %{name}") % {:name => host_service_group.name}
when 'failed'
scope = [[:host_service_group_failed_systemd, host_service_group.id]]
label = _("Failed system services of %{name}") % {:name => host_service_group.name}
when 'all'
scope = [[:host_service_group_systemd, host_service_group.id]]
label = _("All system services of %{name}") % {:name => host_service_group.name}
end
end
return label, scope
end
def host_services
label, scope = host_services_subsets
show_association('host_services', label, :host_services, SystemService, nil, scope)
session[:host_display] = "host_services"
end
def host_cloud_services
assert_privileges('host_show')
@center_toolbar = 'host_cloud_services'
@no_checkboxes = false
show_association('host_cloud_services', _('Cloud Services'), :cloud_services, CloudService, nil, nil)
end
def advanced_settings
show_association('advanced_settings', _('Advanced Settings'), :advanced_settings, AdvancedSetting)
end
def firewall_rules
assert_privileges('host_show')
@display = "main"
show_association('firewall_rules', _('Firewall Rules'), :firewall_rules, FirewallRule)
end
def guest_applications
assert_privileges('host_show')
show_association('guest_applications', _('Packages'), :guest_applications, GuestApplication)
end
# Show the main Host list view overriding method from Mixins::GenericListMixin
def show_list
session[:host_items] = nil
options = {:no_checkboxes => ActiveRecord::Type::Boolean.new.cast(params[:no_checkboxes])}
process_show_list(options)
end
def start
redirect_to(:action => 'show_list')
end
def edit
assert_privileges("host_edit")
if session[:host_items].nil?
@host = find_record_with_rbac(Host, params[:id])
@in_a_form = true
session[:changed] = false
drop_breadcrumb(:name => _("Edit Host '%{name}'") % {:name => @host.name}, :url => "/host/edit/#{@host.id}")
@title = _("Info/Settings")
else # if editing credentials for multi host
drop_breadcrumb(:name => _('Edit Hosts'), :url => '/host/edit/')
@title = _("Credentials/Settings")
@host = if params[:selected_host]
find_record_with_rbac(Host, params[:selected_host])
else
Host.new
end
@changed = true
@showlinks = true
@in_a_form = true
# Get the db records that are being tagged
hostitems = Host.find(session[:host_items]).sort_by(&:name)
@selected_hosts = {}
hostitems.each do |h|
@selected_hosts[h.id] = h.name
end
build_targets_hash(hostitems)
@view = get_db_view(Host) # Instantiate the MIQ Report view object
end
end
def update
assert_privileges("host_edit")
case params[:button]
when "cancel"
if session[:host_items] # canceling editing credentials for multiple Hosts
flash_and_redirect(_("Edit of credentials for selected Hosts was cancelled by the user"))
else
@host = find_record_with_rbac(Host, params[:id])
flash_and_redirect(_("Edit of Host \"%{name}\" was cancelled by the user") % {:name => @host.name})
end
when "save"
if session[:host_items].nil?
@host = find_record_with_rbac(Host, params[:id])
old_host_attributes = @host.attributes.clone
valid_host = find_record_with_rbac(Host, params[:id])
set_record_vars(valid_host, :validate) # Set the record variables, but don't save
if valid_record? && set_record_vars(@host) && @host.save
AuditEvent.success(build_saved_audit_hash_angular(old_host_attributes, @host, false))
flash_and_redirect(_("Host \"%{name}\" was saved") % {:name => @host.name})
nil
else
@errors.each { |msg| add_flash(msg, :error) }
@host.errors.each do |field, msg|
add_flash("#{field.to_s.capitalize} #{msg}", :error)
end
drop_breadcrumb(:name => _("Edit Host '%{name}'") % {:name => @host.name}, :url => "/host/edit/#{@host.id}")
@in_a_form = true
javascript_flash
end
else
valid_host = find_record_with_rbac(Host, params[:validate_id].presence || session[:host_items].first.to_i)
# Set the record variables, but don't save
creds = set_credentials(valid_host, :validate)
if valid_record?
@error = Host.batch_update_authentication(session[:host_items], creds)
end
if @error || @error.blank?
flash_and_redirect(_("Credentials/Settings saved successfully"))
else
drop_breadcrumb(:name => _("Edit Host '%{name}'") % {:name => @host.name}, :url => "/host/edit/#{@host.id}")
@in_a_form = true
javascript_flash
end
end
when "reset"
params[:edittype] = @edit[:edittype] # remember the edit type
flash_to_session(_("All changes have been reset"), :warning)
@in_a_form = true
javascript_redirect(:action => 'edit', :id => @host.id.to_s)
when "validate"
verify_host = find_record_with_rbac(Host, params[:validate_id] ? params[:validate_id].to_i : params[:id])
if session[:host_items].nil?
set_record_vars(verify_host, :validate)
else
set_credentials(verify_host, :validate)
end
@in_a_form = true
@changed = session[:changed]
require "net/ssh"
begin
verify_host.verify_credentials(params[:type], :remember_host => params.key?(:remember_host))
rescue Net::SSH::HostKeyMismatch # Capture the Host key mismatch from the verify
render :update do |page|
page << javascript_prologue
new_url = url_for_only_path(:action => "update", :button => "validate", :type => params[:type], :remember_host => "true", :escape => false)
page << "if (confirm('#{_('The Host SSH key has changed, do you want to accept the new key?')}')) miqAjax('#{new_url}', true);"
end
return
rescue StandardError => bang
add_flash(bang.to_s, :error)
else
add_flash(_("Credential validation was successful"))
end
javascript_flash
end
end
# handle buttons pressed on the button bar
def button
@edit = session[:edit] # Restore @edit for adv search box
params[:display] = @display if %w[vms storages].include?(@display) # Were we displaying vms/storages
if params[:pressed].starts_with?("vm_", # Handle buttons from sub-items screen
"miq_template_",
"guest_",
"storage_")
pfx = pfx_for_vm_button_pressed(params[:pressed])
process_vm_buttons(pfx)
scanstorage if params[:pressed] == "storage_scan"
tag(Storage) if params[:pressed] == "storage_tag"
# Control transferred to another screen, so return
return if ["host_drift", "#{pfx}_compare", "#{pfx}_tag", "#{pfx}_policy_sim",
"#{pfx}_retire", "#{pfx}_protect", "#{pfx}_ownership",
"#{pfx}_reconfigure", "#{pfx}_retire", "#{pfx}_right_size",
"storage_tag"].include?(params[:pressed]) && @flash_array.nil?
unless ["#{pfx}_edit", "#{pfx}_miq_request_new", "#{pfx}_clone", "#{pfx}_migrate", "#{pfx}_publish", 'vm_rename'].include?(params[:pressed])
@refresh_div = "main_div"
@refresh_partial = "layouts/gtl"
show
end
else # Handle Host buttons
params[:page] = @current_page unless @current_page.nil? # Save current page for list refresh
@refresh_div = "main_div" # Default div for button.rjs to refresh
case params[:pressed]
when 'common_drift' then drift_analysis
when 'custom_button' then custom_buttons
when 'host_analyze_check_compliance' then analyze_check_compliance_hosts
when 'host_check_compliance' then check_compliance_hosts
when 'host_cloud_service_scheduling_toggle' then toggleservicescheduling
when 'host_compare' then comparemiq
when 'host_delete' then deletehosts
when 'host_edit' then edit_record
when 'host_introspect' then introspecthosts
when 'host_manageable' then sethoststomanageable
when 'host_protect' then assign_policies(Host)
when 'host_provide' then providehosts
when 'host_refresh' then refreshhosts
when 'host_scan' then scanhosts
when 'host_tag' then tag(Host)
when 'host_toggle_maintenance' then maintenancehosts
end
# Handle Host power buttons
if %w[host_shutdown host_reboot host_standby host_enter_maint_mode host_exit_maint_mode host_start
host_stop host_reset].include?(params[:pressed])
powerbutton_hosts(params[:pressed].split("_")[1..-1].join("_")) # Handle specific power button
end
perf_chart_chooser if params[:pressed] == "perf_reload"
perf_refresh_data if params[:pressed] == "perf_refresh"
return if ["custom_button"].include?(params[:pressed]) # custom button screen, so return, let custom_buttons method handle everything
return if %w[host_tag host_compare common_drift host_protect perf_reload].include?(params[:pressed]) &&
@flash_array.nil? # Another screen showing, so return
if @flash_array.nil? && !@refresh_partial # if no button handler ran, show not implemented msg
add_flash(_("Button not yet implemented"), :error)
@refresh_partial = "layouts/flash_msg"
@refresh_div = "flash_msg_div"
elsif @flash_array && @lastaction == "show"
@host = @record = identify_record(params[:id])
@refresh_partial = "layouts/flash_msg"
@refresh_div = "flash_msg_div"
end
end
if @lastaction == "show" && ["custom_button"].include?(params[:pressed])
@host = @record = identify_record(params[:id])
end
if single_delete_test
single_delete_redirect
elsif params[:pressed].ends_with?("_edit") ||
["#{pfx}_miq_request_new", "#{pfx}_migrate", "#{pfx}_publish"].include?(params[:pressed]) ||
["#{pfx}_clone", 'vm_rename'].include?(params[:pressed]) && @flash_array.nil?
if @flash_array
show_list
replace_gtl_main_div
elsif @redirect_controller
if ["#{pfx}_clone", "#{pfx}_migrate", "#{pfx}_publish"].include?(params[:pressed])
if flash_errors?
javascript_flash
else
javascript_redirect(:controller => @redirect_controller,
:action => @refresh_partial,
:id => @redirect_id,
:prov_type => @prov_type,
:prov_id => @prov_id,
:org_controller => @org_controller,
:escape => false)
end
else
render_or_redirect_partial(pfx)
end
else
javascript_redirect(:action => @refresh_partial, :id => @redirect_id)
end
elsif @refresh_div == "main_div" && @lastaction == "show_list"
replace_gtl_main_div
else
render_flash unless performed?
end
end
def host_form_fields
assert_privileges("host_edit")
host = find_record_with_rbac(Host, params[:id])
validate_against = if session.fetch_path(:edit, :validate_against) && params[:button] != "reset"
session.fetch_path(:edit, :validate_against)
end
host_hash = {
:name => host.name,
:hostname => host.hostname,
:ipmi_address => host.ipmi_address || "",
:custom_1 => host.custom_1 || "",
:user_assigned_os => host.user_assigned_os,
:operating_system => !(host.operating_system.nil? || host.operating_system.product_name.nil?),
:mac_address => host.mac_address || "",
:default_userid => host.authentication_userid.to_s,
:remote_userid => host.has_authentication_type?(:remote) ? host.authentication_userid(:remote).to_s : "",
:ws_userid => host.has_authentication_type?(:ws) ? host.authentication_userid(:ws).to_s : "",
:ipmi_userid => host.has_authentication_type?(:ipmi) ? host.authentication_userid(:ipmi).to_s : "",
:validate_id => validate_against,
}
render :json => host_hash
end
def features
[
{
:role => "infra_networking",
:role_any => true,
:name => :infra_networking,
:title => _("Switches")
}
].map { |hsh| ApplicationController::Feature.new_with_hash(hsh) }
end
private
def record_class
case params[:display] || @display
when 'storages'
Storage
when 'vms', 'miq_templates'
VmOrTemplate
else
Host
end
end
def textual_group_list
[
%i[properties relationships],
%i[
compliance security configuration diagnostics smart_management miq_custom_attributes
ems_custom_attributes authentications cloud_services openstack_hardware_status openstack_service_status
]
]
end
helper_method :textual_group_list
def breadcrumb_name(_model)
_("Hosts")
end
# Validate the host record fields
def valid_record?
valid = true
@errors = []
if params[:ws_port] && (params[:ws_port] !~ /^\d+$/)
@errors.push(_("Web Services Listen Port must be numeric"))
valid = false
end
if params[:log_wrapsize] && ((params[:log_wrapsize] !~ /^\d+$/) || params[:log_wrapsize].to_i.zero?)
@errors.push(_("Log Wrap Size must be numeric and greater than zero"))
valid = false
end
valid
end
# Set record variables to new values
def set_record_vars(host, mode = nil)
host.name = params[:name]
host.hostname = params[:hostname].strip unless params[:hostname].nil?
host.ipmi_address = params[:ipmi_address]
host.mac_address = params[:mac_address]
host.custom_1 = params[:custom_1] unless mode == :validate
host.user_assigned_os = params[:user_assigned_os]
set_credentials(host, mode)
true
end
def set_credentials(host, mode)
creds = {}
if params[:default_userid]
default_password = params[:default_password] || host.authentication_password
creds[:default] = {:userid => params[:default_userid], :password => default_password}
end
if params[:remote_userid]
remote_password = params[:remote_password] || host.authentication_password(:remote)
creds[:remote] = {:userid => params[:remote_userid], :password => remote_password}
end
if params[:ws_userid]
ws_password = params[:ws_password] || host.authentication_password(:ws)
creds[:ws] = {:userid => params[:ws_userid], :password => ws_password}
end
if params[:ipmi_userid]
ipmi_password = params[:ipmi_password] || host.authentication_password(:ipmi)
creds[:ipmi] = {:userid => params[:ipmi_userid], :password => ipmi_password}
end
host.update_authentication(creds, :save => (mode != :validate))
creds
end
def title
_("Host")
end
def get_session_data
super
@drift_db = "Host"
end
def set_session_data
super
session[:miq_compressed] = @compressed unless @compressed.nil?
session[:miq_exists_mode] = @exists_mode unless @exists_mode.nil?
end
def breadcrumbs_options
{
:breadcrumbs => [
{:title => _("Compute")},
{:title => _("Infrastructure")},
{:title => _("Hosts"), :url => controller_url},
],
:record_info => @host,
}.compact
end
menu_section :inf
feature_for_actions "#{controller_name}_show_list", *ADV_SEARCH_ACTIONS
feature_for_actions "#{controller_name}_show", :groups, :users, :patches
feature_for_actions "#{controller_name}_timeline", :tl_chooser
feature_for_actions "#{controller_name}_perf", :perf_top_chart
has_custom_buttons
end
|
class HostController < ApplicationController
before_action :check_privileges
before_action :get_session_data
after_action :cleanup_action
after_action :set_session_data
include Mixins::GenericSessionMixin
include Mixins::GenericListMixin
include Mixins::GenericShowMixin
include Mixins::MoreShowActions
include Mixins::BreadcrumbsMixin
def self.display_methods
%w[
hv_info os_info devices network storage_adapters performance timeline storages
resource_pools vms miq_templates compliance_history custom_button_events cloud_networks cloud_subnets
]
end
def display_config_info
@showtype = "config"
title = case @display
when "hv_info" then _("VM Monitor Information")
when "os_info" then _("OS Information")
when "devices" then _("Devices")
end
drop_breadcrumb(:name => "#{@record.name} (#{title})",
:url => show_link(@record, :display => @display))
end
alias display_hv_info display_config_info
alias display_os_info display_config_info
alias display_devices display_config_info
def display_tree_resources
@showtype = "config"
title, tree = if @display == "network"
@network_tree = TreeBuilderNetwork.new(:network_tree, @sb, true, :root => @record)
[_("Network"), :network_tree]
else
@sa_tree = TreeBuilderStorageAdapters.new(:sa_tree, @sb, true, :root => @record)
[_("Storage Adapters"), :sa_tree]
end
drop_breadcrumb(:name => "#{@record.name} (#{title})",
:url => show_link(@record, :display => @display))
self.x_active_tree = tree
end
alias display_network display_tree_resources
alias display_storage_adapters display_tree_resources
def filesystems_subsets
scope = nil
label = _('Files')
host_service_group = HostServiceGroup.where(:id => params['host_service_group']).first
if host_service_group
scope = [[:host_service_group_filesystems, host_service_group.id]]
label = _("Configuration files of nova service")
end
return label, scope
end
def filesystems
label, scope = filesystems_subsets
show_association('filesystems', label, :filesystems, Filesystem, nil, scope)
end
def host_services_subsets
scope = nil
label = _('Services')
host_service_group = HostServiceGroup.where(:id => params['host_service_group']).first
if host_service_group
case params[:status]
when 'running'
scope = [[:host_service_group_running_systemd, host_service_group.id]]
label = _("Running system services of %{name}") % {:name => host_service_group.name}
when 'failed'
scope = [[:host_service_group_failed_systemd, host_service_group.id]]
label = _("Failed system services of %{name}") % {:name => host_service_group.name}
when 'all'
scope = [[:host_service_group_systemd, host_service_group.id]]
label = _("All system services of %{name}") % {:name => host_service_group.name}
end
end
return label, scope
end
def host_services
label, scope = host_services_subsets
show_association('host_services', label, :host_services, SystemService, nil, scope)
session[:host_display] = "host_services"
end
def host_cloud_services
@center_toolbar = 'host_cloud_services'
@no_checkboxes = false
show_association('host_cloud_services', _('Cloud Services'), :cloud_services, CloudService, nil, nil)
end
def advanced_settings
show_association('advanced_settings', _('Advanced Settings'), :advanced_settings, AdvancedSetting)
end
def firewall_rules
@display = "main"
show_association('firewall_rules', _('Firewall Rules'), :firewall_rules, FirewallRule)
end
def guest_applications
show_association('guest_applications', _('Packages'), :guest_applications, GuestApplication)
end
# Show the main Host list view overriding method from Mixins::GenericListMixin
def show_list
session[:host_items] = nil
options = {:no_checkboxes => ActiveRecord::Type::Boolean.new.cast(params[:no_checkboxes])}
process_show_list(options)
end
def start
redirect_to(:action => 'show_list')
end
def edit
assert_privileges("host_edit")
if session[:host_items].nil?
@host = find_record_with_rbac(Host, params[:id])
@in_a_form = true
session[:changed] = false
drop_breadcrumb(:name => _("Edit Host '%{name}'") % {:name => @host.name}, :url => "/host/edit/#{@host.id}")
@title = _("Info/Settings")
else # if editing credentials for multi host
@title = _("Credentials/Settings")
@host = if params[:selected_host]
find_record_with_rbac(Host, params[:selected_host])
else
Host.new
end
@changed = true
@showlinks = true
@in_a_form = true
# Get the db records that are being tagged
hostitems = Host.find(session[:host_items]).sort_by(&:name)
@selected_hosts = {}
hostitems.each do |h|
@selected_hosts[h.id] = h.name
end
build_targets_hash(hostitems)
@view = get_db_view(Host) # Instantiate the MIQ Report view object
@view.table = ReportFormatter::Converter.records2table(hostitems, @view.cols + ['id'])
end
end
def update
assert_privileges("host_edit")
case params[:button]
when "cancel"
session[:edit] = nil # clean out the saved info
@breadcrumbs.pop if @breadcrumbs
if !session[:host_items].nil?
flash_to_session(_("Edit of credentials for selected Hosts / Nodes was cancelled by the user"))
javascript_redirect(:action => @lastaction, :display => session[:host_display])
else
@host = find_record_with_rbac(Host, params[:id])
flash_to_session(_("Edit of Host / Node \"%{name}\" was cancelled by the user") % {:name => @host.name})
javascript_redirect(:action => @lastaction, :id => @host.id, :display => session[:host_display])
end
when "save"
if session[:host_items].nil?
@host = find_record_with_rbac(Host, params[:id])
old_host_attributes = @host.attributes.clone
valid_host = find_record_with_rbac(Host, params[:id])
set_record_vars(valid_host, :validate) # Set the record variables, but don't save
if valid_record? && set_record_vars(@host) && @host.save
flash_to_session(_("Host / Node \"%{name}\" was saved") % {:name => @host.name})
@breadcrumbs.pop if @breadcrumbs
AuditEvent.success(build_saved_audit_hash_angular(old_host_attributes, @host, false))
if @lastaction == 'show_list'
javascript_redirect(:action => "show_list")
else
javascript_redirect(:action => "show", :id => @host.id.to_s)
end
return
else
@errors.each { |msg| add_flash(msg, :error) }
@host.errors.each do |field, msg|
add_flash("#{field.to_s.capitalize} #{msg}", :error)
end
drop_breadcrumb(:name => _("Edit Host '%{name}'") % {:name => @host.name}, :url => "/host/edit/#{@host.id}")
@in_a_form = true
javascript_flash
end
else
valid_host = find_record_with_rbac(Host, params[:validate_id].presence || session[:host_items].first.to_i)
# Set the record variables, but don't save
creds = set_credentials(valid_host, :validate)
if valid_record?
@error = Host.batch_update_authentication(session[:host_items], creds)
end
if @error || @error.blank?
flash_to_session(_("Credentials/Settings saved successfully"))
javascript_redirect(:action => 'show_list')
else
drop_breadcrumb(:name => _("Edit Host '%{name}'") % {:name => @host.name}, :url => "/host/edit/#{@host.id}")
@in_a_form = true
javascript_flash
end
end
when "reset"
params[:edittype] = @edit[:edittype] # remember the edit type
flash_to_session(_("All changes have been reset"), :warning)
@in_a_form = true
javascript_redirect(:action => 'edit', :id => @host.id.to_s)
when "validate"
verify_host = find_record_with_rbac(Host, params[:validate_id] ? params[:validate_id].to_i : params[:id])
if session[:host_items].nil?
set_record_vars(verify_host, :validate)
else
set_credentials(verify_host, :validate)
end
@in_a_form = true
@changed = session[:changed]
require "net/ssh"
begin
verify_host.verify_credentials(params[:type], :remember_host => params.key?(:remember_host))
rescue MiqException::MiqSshUtilHostKeyMismatch # Capture the Host key mismatch from the verify
render :update do |page|
page << javascript_prologue
new_url = url_for_only_path(:action => "update", :button => "validate", :type => params[:type], :remember_host => "true", :escape => false)
page << "if (confirm('The Host SSH key has changed, do you want to accept the new key?')) miqAjax('#{new_url}', true);"
end
return
rescue => bang
add_flash(bang.to_s, :error)
else
add_flash(_("Credential validation was successful"))
end
javascript_flash
end
end
# handle buttons pressed on the button bar
def button
@edit = session[:edit] # Restore @edit for adv search box
params[:display] = @display if %w[vms storages].include?(@display) # Were we displaying vms/storages
if params[:pressed].starts_with?("vm_", # Handle buttons from sub-items screen
"miq_template_",
"guest_",
"storage_")
pfx = pfx_for_vm_button_pressed(params[:pressed])
process_vm_buttons(pfx)
scanstorage if params[:pressed] == "storage_scan"
tag(Storage) if params[:pressed] == "storage_tag"
# Control transferred to another screen, so return
return if ["host_drift", "#{pfx}_compare", "#{pfx}_tag", "#{pfx}_policy_sim",
"#{pfx}_retire", "#{pfx}_protect", "#{pfx}_ownership",
"#{pfx}_reconfigure", "#{pfx}_retire", "#{pfx}_right_size",
"storage_tag"].include?(params[:pressed]) && @flash_array.nil?
unless ["#{pfx}_edit", "#{pfx}_miq_request_new", "#{pfx}_clone", "#{pfx}_migrate", "#{pfx}_publish", 'vm_rename'].include?(params[:pressed])
@refresh_div = "main_div"
@refresh_partial = "layouts/gtl"
show
end
else # Handle Host buttons
params[:page] = @current_page unless @current_page.nil? # Save current page for list refresh
@refresh_div = "main_div" # Default div for button.rjs to refresh
case params[:pressed]
when 'common_drift' then drift_analysis
when 'custom_button' then custom_buttons
when 'host_analyze_check_compliance' then analyze_check_compliance_hosts
when 'host_check_compliance' then check_compliance_hosts
when 'host_cloud_service_scheduling_toggle' then toggleservicescheduling
when 'host_compare' then comparemiq
when 'host_delete' then deletehosts
when 'host_edit' then edit_record
when 'host_introspect' then introspecthosts
when 'host_manageable' then sethoststomanageable
when 'host_protect' then assign_policies(Host)
when 'host_provide' then providehosts
when 'host_refresh' then refreshhosts
when 'host_scan' then scanhosts
when 'host_tag' then tag(Host)
when 'host_toggle_maintenance' then maintenancehosts
end
# Handle Host power buttons
if %w[host_shutdown host_reboot host_standby host_enter_maint_mode host_exit_maint_mode host_start
host_stop host_reset].include?(params[:pressed])
powerbutton_hosts(params[:pressed].split("_")[1..-1].join("_")) # Handle specific power button
end
perf_chart_chooser if params[:pressed] == "perf_reload"
perf_refresh_data if params[:pressed] == "perf_refresh"
return if ["custom_button"].include?(params[:pressed]) # custom button screen, so return, let custom_buttons method handle everything
return if %w[host_tag host_compare common_drift host_protect perf_reload].include?(params[:pressed]) &&
@flash_array.nil? # Another screen showing, so return
if @flash_array.nil? && !@refresh_partial # if no button handler ran, show not implemented msg
add_flash(_("Button not yet implemented"), :error)
@refresh_partial = "layouts/flash_msg"
@refresh_div = "flash_msg_div"
elsif @flash_array && @lastaction == "show"
@host = @record = identify_record(params[:id])
@refresh_partial = "layouts/flash_msg"
@refresh_div = "flash_msg_div"
end
end
if @lastaction == "show" && ["custom_button"].include?(params[:pressed])
@host = @record = identify_record(params[:id])
end
if single_delete_test
single_delete_redirect
elsif params[:pressed].ends_with?("_edit") ||
["#{pfx}_miq_request_new", "#{pfx}_clone", "#{pfx}_migrate", "#{pfx}_publish"].include?(params[:pressed]) ||
params[:pressed] == 'vm_rename' && @flash_array.nil?
if @flash_array
show_list
replace_gtl_main_div
elsif @redirect_controller
if ["#{pfx}_clone", "#{pfx}_migrate", "#{pfx}_publish"].include?(params[:pressed])
if flash_errors?
javascript_flash
else
javascript_redirect(:controller => @redirect_controller,
:action => @refresh_partial,
:id => @redirect_id,
:prov_type => @prov_type,
:prov_id => @prov_id,
:org_controller => @org_controller,
:escape => false)
end
else
render_or_redirect_partial(pfx)
end
else
javascript_redirect(:action => @refresh_partial, :id => @redirect_id)
end
elsif @refresh_div == "main_div" && @lastaction == "show_list"
replace_gtl_main_div
else
render_flash
end
end
def host_form_fields
assert_privileges("host_edit")
host = find_record_with_rbac(Host, params[:id])
validate_against = if session.fetch_path(:edit, :validate_against) && params[:button] != "reset"
session.fetch_path(:edit, :validate_against)
end
host_hash = {
:name => host.name,
:hostname => host.hostname,
:ipmi_address => host.ipmi_address ? host.ipmi_address : "",
:custom_1 => host.custom_1 ? host.custom_1 : "",
:user_assigned_os => host.user_assigned_os,
:operating_system => !(host.operating_system.nil? || host.operating_system.product_name.nil?),
:mac_address => host.mac_address ? host.mac_address : "",
:default_userid => host.authentication_userid.to_s,
:remote_userid => host.has_authentication_type?(:remote) ? host.authentication_userid(:remote).to_s : "",
:ws_userid => host.has_authentication_type?(:ws) ? host.authentication_userid(:ws).to_s : "",
:ipmi_userid => host.has_authentication_type?(:ipmi) ? host.authentication_userid(:ipmi).to_s : "",
:validate_id => validate_against,
}
render :json => host_hash
end
private
def textual_group_list
[
%i[properties relationships],
%i[
compliance security configuration diagnostics smart_management miq_custom_attributes
ems_custom_attributes authentications cloud_services openstack_hardware_status openstack_service_status
]
]
end
helper_method :textual_group_list
def breadcrumb_name(_model)
title_for_hosts
end
# Validate the host record fields
def valid_record?
valid = true
@errors = []
if params[:ws_port] && (params[:ws_port] !~ /^\d+$/)
@errors.push(_("Web Services Listen Port must be numeric"))
valid = false
end
if params[:log_wrapsize] && ((params[:log_wrapsize] !~ /^\d+$/) || params[:log_wrapsize].to_i.zero?)
@errors.push(_("Log Wrap Size must be numeric and greater than zero"))
valid = false
end
valid
end
# Set record variables to new values
def set_record_vars(host, mode = nil)
host.name = params[:name]
host.hostname = params[:hostname].strip unless params[:hostname].nil?
host.ipmi_address = params[:ipmi_address]
host.mac_address = params[:mac_address]
host.custom_1 = params[:custom_1] unless mode == :validate
host.user_assigned_os = params[:user_assigned_os]
set_credentials(host, mode)
true
end
def set_credentials(host, mode)
creds = {}
if params[:default_userid]
default_password = params[:default_password] ? params[:default_password] : host.authentication_password
creds[:default] = {:userid => params[:default_userid], :password => default_password}
end
if params[:remote_userid]
remote_password = params[:remote_password] ? params[:remote_password] : host.authentication_password(:remote)
creds[:remote] = {:userid => params[:remote_userid], :password => remote_password}
end
if params[:ws_userid]
ws_password = params[:ws_password] ? params[:ws_password] : host.authentication_password(:ws)
creds[:ws] = {:userid => params[:ws_userid], :password => ws_password}
end
if params[:ipmi_userid]
ipmi_password = params[:ipmi_password] ? params[:ipmi_password] : host.authentication_password(:ipmi)
creds[:ipmi] = {:userid => params[:ipmi_userid], :password => ipmi_password}
end
host.update_authentication(creds, :save => (mode != :validate))
creds
end
def title
_("Host")
end
def get_session_data
super
@drift_db = "Host"
end
def set_session_data
super
session[:miq_compressed] = @compressed unless @compressed.nil?
session[:miq_exists_mode] = @exists_mode unless @exists_mode.nil?
end
def breadcrumbs_options
{
:breadcrumbs => [
{:title => _("Compute")},
{:title => _("Infrastructure")},
{:title => title_for_hosts, :url => controller_url},
],
:record_info => @host,
}.compact
end
menu_section :inf
has_custom_buttons
end
Re-add the removed gettext
class HostController < ApplicationController
before_action :check_privileges
before_action :get_session_data
after_action :cleanup_action
after_action :set_session_data
include Mixins::GenericSessionMixin
include Mixins::GenericListMixin
include Mixins::GenericShowMixin
include Mixins::MoreShowActions
include Mixins::BreadcrumbsMixin
def self.display_methods
%w[
hv_info os_info devices network storage_adapters performance timeline storages
resource_pools vms miq_templates compliance_history custom_button_events cloud_networks cloud_subnets
]
end
def display_config_info
@showtype = "config"
title = case @display
when "hv_info" then _("VM Monitor Information")
when "os_info" then _("OS Information")
when "devices" then _("Devices")
end
drop_breadcrumb(:name => "#{@record.name} (#{title})",
:url => show_link(@record, :display => @display))
end
alias display_hv_info display_config_info
alias display_os_info display_config_info
alias display_devices display_config_info
def display_tree_resources
@showtype = "config"
title, tree = if @display == "network"
@network_tree = TreeBuilderNetwork.new(:network_tree, @sb, true, :root => @record)
[_("Network"), :network_tree]
else
@sa_tree = TreeBuilderStorageAdapters.new(:sa_tree, @sb, true, :root => @record)
[_("Storage Adapters"), :sa_tree]
end
drop_breadcrumb(:name => "#{@record.name} (#{title})",
:url => show_link(@record, :display => @display))
self.x_active_tree = tree
end
alias display_network display_tree_resources
alias display_storage_adapters display_tree_resources
def filesystems_subsets
scope = nil
label = _('Files')
host_service_group = HostServiceGroup.where(:id => params['host_service_group']).first
if host_service_group
scope = [[:host_service_group_filesystems, host_service_group.id]]
label = _("Configuration files of nova service")
end
return label, scope
end
def filesystems
label, scope = filesystems_subsets
show_association('filesystems', label, :filesystems, Filesystem, nil, scope)
end
def host_services_subsets
scope = nil
label = _('Services')
host_service_group = HostServiceGroup.where(:id => params['host_service_group']).first
if host_service_group
case params[:status]
when 'running'
scope = [[:host_service_group_running_systemd, host_service_group.id]]
label = _("Running system services of %{name}") % {:name => host_service_group.name}
when 'failed'
scope = [[:host_service_group_failed_systemd, host_service_group.id]]
label = _("Failed system services of %{name}") % {:name => host_service_group.name}
when 'all'
scope = [[:host_service_group_systemd, host_service_group.id]]
label = _("All system services of %{name}") % {:name => host_service_group.name}
end
end
return label, scope
end
def host_services
label, scope = host_services_subsets
show_association('host_services', label, :host_services, SystemService, nil, scope)
session[:host_display] = "host_services"
end
def host_cloud_services
@center_toolbar = 'host_cloud_services'
@no_checkboxes = false
show_association('host_cloud_services', _('Cloud Services'), :cloud_services, CloudService, nil, nil)
end
def advanced_settings
show_association('advanced_settings', _('Advanced Settings'), :advanced_settings, AdvancedSetting)
end
def firewall_rules
@display = "main"
show_association('firewall_rules', _('Firewall Rules'), :firewall_rules, FirewallRule)
end
def guest_applications
show_association('guest_applications', _('Packages'), :guest_applications, GuestApplication)
end
# Show the main Host list view overriding method from Mixins::GenericListMixin
def show_list
session[:host_items] = nil
options = {:no_checkboxes => ActiveRecord::Type::Boolean.new.cast(params[:no_checkboxes])}
process_show_list(options)
end
def start
redirect_to(:action => 'show_list')
end
def edit
assert_privileges("host_edit")
if session[:host_items].nil?
@host = find_record_with_rbac(Host, params[:id])
@in_a_form = true
session[:changed] = false
drop_breadcrumb(:name => _("Edit Host '%{name}'") % {:name => @host.name}, :url => "/host/edit/#{@host.id}")
@title = _("Info/Settings")
else # if editing credentials for multi host
@title = _("Credentials/Settings")
@host = if params[:selected_host]
find_record_with_rbac(Host, params[:selected_host])
else
Host.new
end
@changed = true
@showlinks = true
@in_a_form = true
# Get the db records that are being tagged
hostitems = Host.find(session[:host_items]).sort_by(&:name)
@selected_hosts = {}
hostitems.each do |h|
@selected_hosts[h.id] = h.name
end
build_targets_hash(hostitems)
@view = get_db_view(Host) # Instantiate the MIQ Report view object
@view.table = ReportFormatter::Converter.records2table(hostitems, @view.cols + ['id'])
end
end
def update
assert_privileges("host_edit")
case params[:button]
when "cancel"
session[:edit] = nil # clean out the saved info
@breadcrumbs.pop if @breadcrumbs
if !session[:host_items].nil?
flash_to_session(_("Edit of credentials for selected Hosts / Nodes was cancelled by the user"))
javascript_redirect(:action => @lastaction, :display => session[:host_display])
else
@host = find_record_with_rbac(Host, params[:id])
flash_to_session(_("Edit of Host / Node \"%{name}\" was cancelled by the user") % {:name => @host.name})
javascript_redirect(:action => @lastaction, :id => @host.id, :display => session[:host_display])
end
when "save"
if session[:host_items].nil?
@host = find_record_with_rbac(Host, params[:id])
old_host_attributes = @host.attributes.clone
valid_host = find_record_with_rbac(Host, params[:id])
set_record_vars(valid_host, :validate) # Set the record variables, but don't save
if valid_record? && set_record_vars(@host) && @host.save
flash_to_session(_("Host / Node \"%{name}\" was saved") % {:name => @host.name})
@breadcrumbs.pop if @breadcrumbs
AuditEvent.success(build_saved_audit_hash_angular(old_host_attributes, @host, false))
if @lastaction == 'show_list'
javascript_redirect(:action => "show_list")
else
javascript_redirect(:action => "show", :id => @host.id.to_s)
end
return
else
@errors.each { |msg| add_flash(msg, :error) }
@host.errors.each do |field, msg|
add_flash("#{field.to_s.capitalize} #{msg}", :error)
end
drop_breadcrumb(:name => _("Edit Host '%{name}'") % {:name => @host.name}, :url => "/host/edit/#{@host.id}")
@in_a_form = true
javascript_flash
end
else
valid_host = find_record_with_rbac(Host, params[:validate_id].presence || session[:host_items].first.to_i)
# Set the record variables, but don't save
creds = set_credentials(valid_host, :validate)
if valid_record?
@error = Host.batch_update_authentication(session[:host_items], creds)
end
if @error || @error.blank?
flash_to_session(_("Credentials/Settings saved successfully"))
javascript_redirect(:action => 'show_list')
else
drop_breadcrumb(:name => _("Edit Host '%{name}'") % {:name => @host.name}, :url => "/host/edit/#{@host.id}")
@in_a_form = true
javascript_flash
end
end
when "reset"
params[:edittype] = @edit[:edittype] # remember the edit type
flash_to_session(_("All changes have been reset"), :warning)
@in_a_form = true
javascript_redirect(:action => 'edit', :id => @host.id.to_s)
when "validate"
verify_host = find_record_with_rbac(Host, params[:validate_id] ? params[:validate_id].to_i : params[:id])
if session[:host_items].nil?
set_record_vars(verify_host, :validate)
else
set_credentials(verify_host, :validate)
end
@in_a_form = true
@changed = session[:changed]
require "net/ssh"
begin
verify_host.verify_credentials(params[:type], :remember_host => params.key?(:remember_host))
rescue MiqException::MiqSshUtilHostKeyMismatch # Capture the Host key mismatch from the verify
render :update do |page|
page << javascript_prologue
new_url = url_for_only_path(:action => "update", :button => "validate", :type => params[:type], :remember_host => "true", :escape => false)
page << "if (confirm('#{_('The Host SSH key has changed, do you want to accept the new key?')}')) miqAjax('#{new_url}', true);"
end
return
rescue => bang
add_flash(bang.to_s, :error)
else
add_flash(_("Credential validation was successful"))
end
javascript_flash
end
end
# handle buttons pressed on the button bar
def button
@edit = session[:edit] # Restore @edit for adv search box
params[:display] = @display if %w[vms storages].include?(@display) # Were we displaying vms/storages
if params[:pressed].starts_with?("vm_", # Handle buttons from sub-items screen
"miq_template_",
"guest_",
"storage_")
pfx = pfx_for_vm_button_pressed(params[:pressed])
process_vm_buttons(pfx)
scanstorage if params[:pressed] == "storage_scan"
tag(Storage) if params[:pressed] == "storage_tag"
# Control transferred to another screen, so return
return if ["host_drift", "#{pfx}_compare", "#{pfx}_tag", "#{pfx}_policy_sim",
"#{pfx}_retire", "#{pfx}_protect", "#{pfx}_ownership",
"#{pfx}_reconfigure", "#{pfx}_retire", "#{pfx}_right_size",
"storage_tag"].include?(params[:pressed]) && @flash_array.nil?
unless ["#{pfx}_edit", "#{pfx}_miq_request_new", "#{pfx}_clone", "#{pfx}_migrate", "#{pfx}_publish", 'vm_rename'].include?(params[:pressed])
@refresh_div = "main_div"
@refresh_partial = "layouts/gtl"
show
end
else # Handle Host buttons
params[:page] = @current_page unless @current_page.nil? # Save current page for list refresh
@refresh_div = "main_div" # Default div for button.rjs to refresh
case params[:pressed]
when 'common_drift' then drift_analysis
when 'custom_button' then custom_buttons
when 'host_analyze_check_compliance' then analyze_check_compliance_hosts
when 'host_check_compliance' then check_compliance_hosts
when 'host_cloud_service_scheduling_toggle' then toggleservicescheduling
when 'host_compare' then comparemiq
when 'host_delete' then deletehosts
when 'host_edit' then edit_record
when 'host_introspect' then introspecthosts
when 'host_manageable' then sethoststomanageable
when 'host_protect' then assign_policies(Host)
when 'host_provide' then providehosts
when 'host_refresh' then refreshhosts
when 'host_scan' then scanhosts
when 'host_tag' then tag(Host)
when 'host_toggle_maintenance' then maintenancehosts
end
# Handle Host power buttons
if %w[host_shutdown host_reboot host_standby host_enter_maint_mode host_exit_maint_mode host_start
host_stop host_reset].include?(params[:pressed])
powerbutton_hosts(params[:pressed].split("_")[1..-1].join("_")) # Handle specific power button
end
perf_chart_chooser if params[:pressed] == "perf_reload"
perf_refresh_data if params[:pressed] == "perf_refresh"
return if ["custom_button"].include?(params[:pressed]) # custom button screen, so return, let custom_buttons method handle everything
return if %w[host_tag host_compare common_drift host_protect perf_reload].include?(params[:pressed]) &&
@flash_array.nil? # Another screen showing, so return
if @flash_array.nil? && !@refresh_partial # if no button handler ran, show not implemented msg
add_flash(_("Button not yet implemented"), :error)
@refresh_partial = "layouts/flash_msg"
@refresh_div = "flash_msg_div"
elsif @flash_array && @lastaction == "show"
@host = @record = identify_record(params[:id])
@refresh_partial = "layouts/flash_msg"
@refresh_div = "flash_msg_div"
end
end
if @lastaction == "show" && ["custom_button"].include?(params[:pressed])
@host = @record = identify_record(params[:id])
end
if single_delete_test
single_delete_redirect
elsif params[:pressed].ends_with?("_edit") ||
["#{pfx}_miq_request_new", "#{pfx}_clone", "#{pfx}_migrate", "#{pfx}_publish"].include?(params[:pressed]) ||
params[:pressed] == 'vm_rename' && @flash_array.nil?
if @flash_array
show_list
replace_gtl_main_div
elsif @redirect_controller
if ["#{pfx}_clone", "#{pfx}_migrate", "#{pfx}_publish"].include?(params[:pressed])
if flash_errors?
javascript_flash
else
javascript_redirect(:controller => @redirect_controller,
:action => @refresh_partial,
:id => @redirect_id,
:prov_type => @prov_type,
:prov_id => @prov_id,
:org_controller => @org_controller,
:escape => false)
end
else
render_or_redirect_partial(pfx)
end
else
javascript_redirect(:action => @refresh_partial, :id => @redirect_id)
end
elsif @refresh_div == "main_div" && @lastaction == "show_list"
replace_gtl_main_div
else
render_flash
end
end
def host_form_fields
assert_privileges("host_edit")
host = find_record_with_rbac(Host, params[:id])
validate_against = if session.fetch_path(:edit, :validate_against) && params[:button] != "reset"
session.fetch_path(:edit, :validate_against)
end
host_hash = {
:name => host.name,
:hostname => host.hostname,
:ipmi_address => host.ipmi_address ? host.ipmi_address : "",
:custom_1 => host.custom_1 ? host.custom_1 : "",
:user_assigned_os => host.user_assigned_os,
:operating_system => !(host.operating_system.nil? || host.operating_system.product_name.nil?),
:mac_address => host.mac_address ? host.mac_address : "",
:default_userid => host.authentication_userid.to_s,
:remote_userid => host.has_authentication_type?(:remote) ? host.authentication_userid(:remote).to_s : "",
:ws_userid => host.has_authentication_type?(:ws) ? host.authentication_userid(:ws).to_s : "",
:ipmi_userid => host.has_authentication_type?(:ipmi) ? host.authentication_userid(:ipmi).to_s : "",
:validate_id => validate_against,
}
render :json => host_hash
end
private
def textual_group_list
[
%i[properties relationships],
%i[
compliance security configuration diagnostics smart_management miq_custom_attributes
ems_custom_attributes authentications cloud_services openstack_hardware_status openstack_service_status
]
]
end
helper_method :textual_group_list
def breadcrumb_name(_model)
title_for_hosts
end
# Validate the host record fields
def valid_record?
valid = true
@errors = []
if params[:ws_port] && (params[:ws_port] !~ /^\d+$/)
@errors.push(_("Web Services Listen Port must be numeric"))
valid = false
end
if params[:log_wrapsize] && ((params[:log_wrapsize] !~ /^\d+$/) || params[:log_wrapsize].to_i.zero?)
@errors.push(_("Log Wrap Size must be numeric and greater than zero"))
valid = false
end
valid
end
# Set record variables to new values
def set_record_vars(host, mode = nil)
host.name = params[:name]
host.hostname = params[:hostname].strip unless params[:hostname].nil?
host.ipmi_address = params[:ipmi_address]
host.mac_address = params[:mac_address]
host.custom_1 = params[:custom_1] unless mode == :validate
host.user_assigned_os = params[:user_assigned_os]
set_credentials(host, mode)
true
end
def set_credentials(host, mode)
creds = {}
if params[:default_userid]
default_password = params[:default_password] ? params[:default_password] : host.authentication_password
creds[:default] = {:userid => params[:default_userid], :password => default_password}
end
if params[:remote_userid]
remote_password = params[:remote_password] ? params[:remote_password] : host.authentication_password(:remote)
creds[:remote] = {:userid => params[:remote_userid], :password => remote_password}
end
if params[:ws_userid]
ws_password = params[:ws_password] ? params[:ws_password] : host.authentication_password(:ws)
creds[:ws] = {:userid => params[:ws_userid], :password => ws_password}
end
if params[:ipmi_userid]
ipmi_password = params[:ipmi_password] ? params[:ipmi_password] : host.authentication_password(:ipmi)
creds[:ipmi] = {:userid => params[:ipmi_userid], :password => ipmi_password}
end
host.update_authentication(creds, :save => (mode != :validate))
creds
end
def title
_("Host")
end
def get_session_data
super
@drift_db = "Host"
end
def set_session_data
super
session[:miq_compressed] = @compressed unless @compressed.nil?
session[:miq_exists_mode] = @exists_mode unless @exists_mode.nil?
end
def breadcrumbs_options
{
:breadcrumbs => [
{:title => _("Compute")},
{:title => _("Infrastructure")},
{:title => title_for_hosts, :url => controller_url},
],
:record_info => @host,
}.compact
end
menu_section :inf
has_custom_buttons
end
|
class MainController < ApplicationController
before_filter :check_for_mobile
def index
if @user_agent == :tablet
@voted = Vote.already_voted?(request.remote_ip)
@image = Photo.new
@current_image = Photo.last
else
end
end
def create
@photo = Photo.new(photo_params)
@photo.ip = request.remote_ip
if params[:photo][:image]
@photo.caption = nil if @photo.caption == ""
Vote.clear_votes if @photo.save
end
redirect_to '/'
end
def keep
Vote.place_vote(true, request.remote_ip)
redirect_to '/'
end
def kill
Vote.place_vote(false, request.remote_ip)
redirect_to '/'
end
private
def photo_params
params.require(:photo).permit(:caption, :image)
end
def check_for_mobile
# request.user_agent =~ /Mobile|webOS/ ? (request.variant = :tablet) : (request.variant = :desktop)
@user_agent =~ /Mobile|webOS/ ? (request.variant = :tablet) : (request.variant = :desktop)
end
end
Fix bug in check_for_mobile method that was preventing correct rendering
class MainController < ApplicationController
before_filter :check_for_mobile, :only => index
def index
if @user_agent == :tablet
@voted = Vote.already_voted?(request.remote_ip)
@image = Photo.new
@current_image = Photo.last
end
end
def create
@photo = Photo.new(photo_params)
@photo.ip = request.remote_ip
if params[:photo][:image]
@photo.caption = nil if @photo.caption == ""
Vote.clear_votes if @photo.save
end
redirect_to '/'
end
def keep
Vote.place_vote(true, request.remote_ip)
redirect_to '/'
end
def kill
Vote.place_vote(false, request.remote_ip)
redirect_to '/'
end
private
def photo_params
params.require(:photo).permit(:caption, :image)
end
def check_for_mobile
request.user_agent =~ /Mobile|webOS/ ? (@user_agent = :tablet) : (@user_agent = :desktop)
end
end
|
class MapsController < ApplicationController
layout 'mapdetail', :only => [:show, :edit, :preview, :warp, :clip, :align, :activity, :warped, :export, :metadata, :comments]
before_filter :store_location, :only => [:warp, :align, :clip, :export, :edit, :comments ]
before_filter :authenticate_user!, :only => [:new, :create, :edit, :update, :destroy, :delete, :warp, :rectify, :clip, :align, :warp_align, :mask_map, :delete_mask, :save_mask, :save_mask_and_warp, :set_rough_state, :set_rough_centroid, :publish, :trace, :id, :map_type, :create_inset]
before_filter :check_administrator_role, :only => [:publish, :map_type, :create_inset]
before_filter :find_map_if_available,
:except => [:show, :index, :wms, :tile, :mapserver_wms, :warp_aligned, :status, :new, :create, :update, :edit, :tag, :geosearch, :map_type, :create_inset]
before_filter :check_link_back, :only => [:show, :warp, :clip, :align, :warped, :export, :activity]
skip_before_filter :check_site_read_only, :only => [:show, :index, :geosearch, :wms, :tile, :status, :warped, :comments, :export, :metadata, :activity, :preview, :thumb, :inset_maps]
rescue_from ActiveRecord::RecordNotFound, :with => :bad_record
helper :sort
include SortHelper
###############
#
# Collection actions
#
###############
def index
sort_init('updated_at', {:default_order => "desc"})
sort_update
@show_warped = params[:show_warped]
request.query_string.length > 0 ? qstring = "?" + request.query_string : qstring = ""
set_session_link_back url_for(:controller=> 'maps', :action => 'index',:skip_relative_url_root => false, :only_path => false )+ qstring
@query = params[:query]
@field = %w(text title description status catnyp nypl_digital_id uuid).detect{|f| f == (params[:field])}
@field = "text" if @field.nil?
where_col = @field
if @field == "text"
where_col = "(title || ' ' || description)"
end
#we'll use POSIX regular expression for searches ~*'( |^)robinson([^A-z]|$)' and to strip out brakets etc ~*'(:punct:|^|)plate 6([^A-z]|$)';
if @query && @query.strip.length > 0 && @field
conditions = ["#{where_col} ~* ?", '(:punct:|^|)'+@query+'([^A-z]|$)']
else
conditions = nil
end
@year_min = Map.minimum(:issue_year).to_i - 1
@year_max = Map.maximum(:issue_year).to_i + 1
@year_min = 1600 if @year_min == 0
@year_max = 2015 if @year_max == 0
year_conditions = nil
if params[:from] && params[:to] && !(@year_min == params[:from].to_i && @year_max == params[:to].to_i)
year_conditions = {:issue_year => params[:from].to_i..params[:to].to_i}
end
@from = params[:from]
@to = params[:to]
if params[:sort_order] && params[:sort_order] == "desc"
sort_nulls = " NULLS LAST"
else
sort_nulls = " NULLS FIRST"
end
@per_page = params[:per_page] || 50
paginate_params = {
:page => params[:page],
:per_page => @per_page
}
order_options = sort_clause + sort_nulls
where_options = conditions
#order('name').where('name LIKE ?', "%#{search}%").paginate(page: page, per_page: 10)
if @show_warped == "1"
@maps = Map.warped.where(where_options).where(year_conditions).order(order_options).paginate(paginate_params)
else
@maps = Map.where(where_options).where(year_conditions).order(order_options).paginate(paginate_params)
end
@html_title = "Browse Maps"
if request.xhr?
render :action => 'index.rjs'
else
respond_to do |format|
format.html{ render :layout =>'application' } # index.html.erb
format.xml { render :xml => @maps.to_xml(:root => "maps", :except => [:content_type, :size, :bbox_geom, :uuid, :parent_uuid, :filename, :parent_id, :map, :thumbnail, :rough_centroid]) {|xml|
xml.tag!'stat', "ok"
xml.tag!'total-entries', @maps.total_entries
xml.tag!'per-page', @maps.per_page
xml.tag!'current-page',@maps.current_page} }
format.json { render :json => {:stat => "ok",
:current_page => @maps.current_page,
:per_page => @maps.per_page,
:total_entries => @maps.total_entries,
:total_pages => @maps.total_pages,
:items => @maps.to_a}.to_json(:except => [:content_type, :size, :bbox_geom, :uuid, :parent_uuid, :filename, :parent_id, :map, :thumbnail, :rough_centroid], :methods => :depicts_year) , :callback => params[:callback]
}
end
end
end
def geosearch
require 'geoplanet'
sort_init 'updated_at'
sort_update
extents = [-74.1710,40.5883,-73.4809,40.8485] #NYC
#TODO change to straight javascript call.
if params[:place] && !params[:place].blank?
place_query = params[:place]
GeoPlanet.appid = APP_CONFIG['yahoo_app_id']
geoplanet_result = GeoPlanet::Place.search(place_query, :count => 2)
if geoplanet_result[0]
g_bbox = geoplanet_result[0].bounding_box.map!{|x| x.reverse}
extents = g_bbox[1] + g_bbox[0]
render :json => extents.to_json
return
else
render :json => extents.to_json
return
end
end
if params[:bbox] && params[:bbox].split(',').size == 4
begin
extents = params[:bbox].split(',').collect {|i| Float(i)}
rescue ArgumentError
logger.debug "arg error with bbox, setting extent to defaults"
end
end
@bbox = extents.join(',')
if extents
bbox_poly_ary = [
[ extents[0], extents[1] ],
[ extents[2], extents[1] ],
[ extents[2], extents[3] ],
[ extents[0], extents[3] ],
[ extents[0], extents[1] ]
]
map_srid = 0
map_srid = Map.warped.first.bbox_geom.srid if Map.warped.first && Map.warped.first.bbox_geom
if map_srid == 0
bbox_polygon = GeoRuby::SimpleFeatures::Polygon.from_coordinates([bbox_poly_ary]).as_wkt
else
bbox_polygon = GeoRuby::SimpleFeatures::Polygon.from_coordinates([bbox_poly_ary]).as_ewkt
end
if params[:operation] == "within"
conditions = ["ST_Within(bbox_geom, ST_GeomFromText('#{bbox_polygon}'))"]
else
conditions = ["ST_Intersects(bbox_geom, ST_GeomFromText('#{bbox_polygon}'))"]
end
else
conditions = nil
end
if params[:sort_order] && params[:sort_order] == "desc"
sort_nulls = " NULLS LAST"
else
sort_nulls = " NULLS FIRST"
end
@operation = params[:operation]
if @operation == "intersect"
sort_geo = "ABS(ST_Area(bbox_geom) - ST_Area(ST_GeomFromText('#{bbox_polygon}'))) ASC, "
else
sort_geo ="ST_Area(bbox_geom) DESC ,"
end
@year_min = Map.minimum(:issue_year).to_i - 1
@year_max = Map.maximum(:issue_year).to_i + 1
@year_min = 1600 if @year_min == 0
@year_max = 2015 if @year_max == 0
year_conditions = nil
if params[:from] && params[:to] && !(@year_min == params[:from].to_i && @year_max == params[:to].to_i)
year_conditions = {:issue_year => params[:from].to_i..params[:to].to_i}
end
status_conditions = {:status => [Map.status(:warped), Map.status(:published), Map.status(:publishing)]}
paginate_params = {
:page => params[:page],
:per_page => 20
}
order_params = sort_geo + sort_clause + sort_nulls
@maps = Map.select("bbox, title, description, updated_at, id, nypl_digital_id, uuid, issue_year, status").warped.where(conditions).where(year_conditions).where(status_conditions).order(order_params).paginate(paginate_params)
@jsonmaps = @maps.to_json # (:only => [:bbox, :title, :id, :nypl_digital_id])
respond_to do |format|
format.html{ render :layout =>'application' }
format.json { render :json => {:stat => "ok",
:current_page => @maps.current_page,
:per_page => @maps.per_page,
:total_entries => @maps.total_entries,
:total_pages => @maps.total_pages,
:items => @maps.to_a}.to_json(:methods => :depicts_year) , :callback => params[:callback]}
end
end
###############
#
# Tab actions
#
###############
def show
@current_tab = "show"
@selected_tab = 0
@disabled_tabs =[]
@map = Map.find(params[:id])
@html_title = "Viewing Map "+@map.id.to_s
if @map.status.nil? || @map.status == :unloaded
@mapstatus = "unloaded"
else
@mapstatus = @map.status.to_s
end
#
# Not Logged in users
#
if !user_signed_in?
@disabled_tabs = ["warp", "clip", "align", "activity"]
if @map.status.nil? or @map.status == :unloaded or @map.status == :loading
@disabled_tabs += ["warped"]
end
flash.now[:notice] = "You may need to %s to start editing the map"
flash.now[:notice_item] = ["log in", :new_user_session]
session[:user_return_to] = request.url
if request.xhr?
@xhr_flag = "xhr"
render :action => "preview", :layout => "tab_container"
else
respond_to do |format|
format.html {render :action => "preview"}
format.kml {render :action => "show_kml", :layout => false}
format.rss {render :action=> 'show'}
# format.xml {render :xml => @map.to_xml(:except => [:content_type, :size, :bbox_geom, :uuid, :parent_uuid, :filename, :parent_id, :map, :thumbnail, :rough_centroid]) }
format.json {render :json =>{:stat => "ok", :items => @map}.to_json(:except => [:content_type, :size, :bbox_geom, :uuid, :parent_uuid, :filename, :parent_id, :map, :thumbnail, :rough_centroid]), :callback => params[:callback] }
end
end
return #stop doing anything more
end
#End doing stuff for not logged in users.
#
# Logged in users
#
if @map.versions.last
@current_version_number = @map.versions.last.index
if User.exists?(@map.versions.last.whodunnit.to_i)
@current_version_user = User.find_by_id(@map.versions.last.whodunnit.to_i)
else
@current_version_user = nil
end
else
@current_version_number = 1
@current_version_user = nil
end
version_users = PaperTrail::Version.where({:item_type => 'Map', :item_id => @map.id}).where.not(:whodunnit => nil).where.not(:whodunnit => @current_version_user).select(:whodunnit).distinct.limit(7)
@version_users = version_users.to_a.delete_if{|v| !User.exists?(v.whodunnit) }
unless user_signed_in? and current_user.has_role?("administrator")
if @map.status == :publishing or @map.status == :published
@disabled_tabs += ["warp", "clip", "align"] #dont show any others unless you're an editor
end
end
#note, trying to view an image that hasnt been requested, will cause it to be requested
if @map.status.nil? or @map.status == :unloaded
@disabled_tabs = ["warp", "clip", "align", "warped", "preview","activity", "export"]
@title = "Viewing unwarped map."
logger.debug("starting spawn fetch iamge")
Spawnling.new do
logger.info "starting fetch from image server"
@map.fetch_from_image_server
logger.info "finished fetch from image server. Status = "+@map.status.to_s
end
return
end
@title = "Viewing original map. "
if !@map.warped_or_published?
@title += "This map has not been warped yet."
end
if request.xhr?
choose_layout_if_ajax
else
respond_to do |format|
format.html
format.kml {render :action => "show_kml", :layout => false}
format.json {render :json =>{:stat => "ok", :items => @map}.to_json(:except => [:content_type, :size, :bbox_geom, :uuid, :parent_uuid, :filename, :parent_id, :map, :thumbnail, :rough_centroid]), :callback => params[:callback] }
end
end
end
def comments
@html_title = "comments"
@selected_tab = 9
@current_tab = "comments"
@comments = @map.comments
choose_layout_if_ajax
respond_to do | format |
format.html {}
end
end
def export
@current_tab = "export"
@selected_tab = 5
@html_title = "Export Map" + @map.id.to_s
choose_layout_if_ajax
if params[:unwarped]
tif_filename = @map.filename
else
tif_filename = @map.warped_filename
end
respond_to do | format |
format.html {}
format.tif { send_file tif_filename, :x_sendfile => true }
format.png { send_file @map.warped_png, :x_sendfile => true }
format.aux_xml { send_file @map.warped_png_aux_xml, :x_sendfile => true }
end
end
def clip
#TODO delete current_tab
@current_tab = "clip"
@selected_tab = 3
@html_title = "Cropping Map "+ @map.id.to_s
@gml_exists = "false"
if File.exists?(@map.masking_file_gml+".ol")
@gml_exists = "true"
end
choose_layout_if_ajax
end
def warped
@current_tab = "warped"
@selected_tab = 5
@html_title = "Viewing Rectfied Map "+ @map.id.to_s
if (@map.warped_or_published? || @map.status == :publishing) && @map.gcps.hard.size > 2
@other_layers = Array.new
@map.layers.visible.each do |layer|
@other_layers.push(layer.id)
end
else
flash.now[:notice] = "Whoops, the map needs to be rectified before you can view it"
end
choose_layout_if_ajax
end
def align
@html_title = "Align Maps "
@current_tab = "align"
@selected_tab = 3
choose_layout_if_ajax
end
def warp
@current_tab = "warp"
@selected_tab = 2
@html_title = "Rectifying Map "+ @map.id.to_s
@bestguess_places = @map.find_bestguess_places if @map.gcps.hard.empty?
@other_layers = Array.new
@map.layers.visible.each do |layer|
@other_layers.push(layer.id)
end
@gcps = @map.gcps_with_error
choose_layout_if_ajax
end
def metadata
choose_layout_if_ajax
end
def trace
redirect_to map_path unless @map.published?
@overlay = @map
end
def id
redirect_to map_path unless @map.published?
@overlay = @map
render "id", :layout => false
end
# called by id JS oauth
def idland
render "idland", :layout => false
end
#view the inset maps from a given map
def inset_maps
@html_title = "Inset Maps for "+ @map.id.to_s
@inset_maps = @map.inset_maps
if @map.versions.last
@current_version_number = @map.versions.last.index
if User.exists?(@map.versions.last.whodunnit.to_i)
@current_version_user = User.find_by_id(@map.versions.last.whodunnit.to_i)
else
@current_version_user = nil
end
else
@current_version_number = 1
@current_version_user = nil
end
if @inset_maps.empty?
flash[:notice] = "No inset maps found for this map"
redirect_to @map and return
end
end
###############
#
# Other / API actions
#
###############
# post create inset, admin only
def create_inset
@map = Map.find(params[:id])
unless [:available, :warping, :warped, :published].include?(@map.status)
flash[:error] = "Sorry, this map is not ready to create an inset map from. It's status is "+ @map.status.to_s
redirect_to map_path(@map) and return
else
@inset_map = @map.create_inset
if @inset_map && @inset_map.save
flash[:notice] = "Successfully created inset map!"
redirect_to map_path(@inset_map) and return
else
flash[:error] = "Sorry, there was a problem creating this inset map"
end
end
end
def thumb
map = Map.find(params[:id])
thumb = "http://images.nypl.org/?t=t&id=#{map.nypl_digital_id}"
redirect_to thumb
end
def map_type
@map = Map.find(params[:id])
map_type = params[:map][:map_type]
if Map::MAP_TYPE.include? map_type.to_sym
@map.update_map_type(map_type)
end
if Layer.exists?(params[:layerid].to_i)
@layer = Layer.find(params[:layerid].to_i)
@maps = @layer.maps.paginate(:per_page => 30, :page => 1, :order => :map_type)
end
render :text => "Map has changed. Map type: "+@map.map_type.to_s
end
#pass in soft true to get soft gcps
def gcps
@map = Map.find(params[:id])
gcps = @map.gcps_with_error(params[:soft])
respond_to do |format|
format.html { render :json => {:stat => "ok", :items => gcps.to_a}.to_json(:methods => :error), :callback => params[:callback]}
format.json { render :json => {:stat => "ok", :items => gcps.to_a}.to_json(:methods => :error), :callback => params[:callback]}
format.xml { render :xml => gcps.to_xml(:methods => :error)}
end
end
def get_rough_centroid
map = Map.find(params[:id])
respond_to do |format|
format.json {render :json =>{:stat => "ok", :items => map}.to_json(:except => [:content_type, :size, :bbox_geom, :uuid, :parent_uuid, :filename, :parent_id, :map, :thumbnail]), :callback => params[:callback] }
end
end
def set_rough_centroid
map = Map.find(params[:id])
lon = params[:lon]
lat = params[:lat]
zoom = params[:zoom]
respond_to do |format|
if map.update_attributes(:rough_lon => lon, :rough_lat => lat, :rough_zoom => zoom ) && lat && lon
map.save_rough_centroid(lon, lat)
format.json {render :json =>{:stat => "ok", :items => map}.to_json(:except => [:content_type, :size, :bbox_geom, :uuid, :parent_uuid, :filename, :parent_id, :map, :thumbnail, :rough_centroid]), :callback => params[:callback]
}
else
format.json { render :json => {:stat => "fail", :message => "Rough centroid not set", :items => [], :errors => map.errors.to_a}.to_json, :callback => params[:callback]}
end
end
end
def get_rough_state
map = Map.find(params[:id])
respond_to do |format|
if map.rough_state
format.json { render :json => {:stat => "ok", :items => ["id" => map.id, "rough_state" => map.rough_state]}.to_json, :callback => params[:callback]}
else
format.json { render :json => {:stat => "fail", :message => "Rough state is null", :items => map.rough_state}.to_json, :callback => params[:callback]}
end
end
end
def set_rough_state
map = Map.find(params[:id])
respond_to do |format|
if map.update_attributes(:rough_state => params[:rough_state]) && Map::ROUGH_STATE.include?(params[:rough_state].to_sym)
format.json { render :json => {:stat => "ok", :items => ["id" => map.id, "rough_state" => map.rough_state]}.to_json, :callback => params[:callback] }
else
format.json { render :json => {:stat => "fail", :message =>"Could not update state", :errors => map.errors.to_a, :items => []}.to_json , :callback => params[:callback]}
end
end
end
def status
map = Map.find(params[:id])
if map.status.nil?
sta = "loading"
else
sta = map.status.to_s
end
render :text => sta
end
#should check for admin only
def publish
if @map.status == :publishing
flash[:notice] = "Map currently publishing. Please try again later."
return redirect_to @map
end
if params[:to] == "publish" && @map.status == :warped
@map.publish
flash[:notice] = "Map publishing. Please wait as the map will be published and tiles transfered via tilestache. Status: " + @map.status.to_s
elsif params[:to] == "unpublish" && @map.status == :published
@map.unpublish
flash[:notice] = "Map unpublished. Status: " + @map.status.to_s
end
redirect_to @map
end
def save_mask
message = @map.save_mask(params[:output])
respond_to do | format |
format.html {render :text => message}
format.js { render :text => message} if request.xhr?
format.json {render :json => {:stat =>"ok", :message => message}.to_json , :callback => params[:callback]}
end
end
def delete_mask
message = @map.delete_mask
respond_to do | format |
format.html { render :text => message}
format.js { render :text => message} #if request.xhr?
format.json {render :json => {:stat =>"ok", :message => message}.to_json , :callback => params[:callback]}
end
end
def mask_map
respond_to do | format |
if File.exists?(@map.masking_file_gml)
message = @map.mask!
format.html { render :text => message }
format.js { render :text => message} #if request.xhr?
format.json { render :json => {:stat =>"ok", :message => message}.to_json , :callback => params[:callback]}
else
message = "Mask file not found"
format.html { render :text => message }
format.js { render :text => message} #if request.xhr?
format.json { render :json => {:stat =>"fail", :message => message}.to_json , :callback => params[:callback]}
end
end
end
def save_mask_and_warp
logger.debug "save mask and warp"
if @map.status == :publishing or @map.status == :published
stat = "fail"
msg = "Mask not applied. Map is published so is unable to mask."
elsif @map.status == :warping
stat = "fail"
msg = "Mask not saved as the map is currently being rectified somewhere else, please try again later."
else
@map.save_mask(params[:output])
@map.mask!
stat = "ok"
if @map.gcps.hard.size.nil? || @map.gcps.hard.size < 3
msg = "Map masked, but it needs more control points to rectify. Click the Rectify tab to add some."
stat = "fail"
else
params[:use_mask] = "true"
rectify_main
msg = "Map masked and rectified."
end
end
respond_to do |format|
format.json {render :json => {:stat => stat, :message => msg}.to_json , :callback => params[:callback]}
format.js { render :text => msg } if request.xhr?
end
end
#just works with NSEW directions at the moment.
def warp_aligned
align = params[:align]
append = params[:append]
destmap = Map.find(params[:destmap])
if destmap.status.nil? or destmap.status == :unloaded or destmap.status == :loading
flash.now[:notice] = "Sorry the destination map is not available to be aligned."
redirect_to :action => "show", :id=> params[:destmap]
elsif align != "other"
if params[:align_type] == "original"
destmap.align_with_original(params[:srcmap], align, append )
else
destmap.align_with_warped(params[:srcmap], align, append )
end
flash.now[:notice] = "Map aligned. You can now rectify it!"
redirect_to :action => "show", :id => destmap.id, :anchor => "Rectify_tab"
else
flash.now[:notice] = "Sorry, only horizontal and vertical alignment are available at the moment."
redirect_to :action => "align", :id=> params[:srcmap], :anchor => "Align_tab"
end
end
def rectify
rectify_main
respond_to do |format|
unless @too_few || @fail
format.js
format.html { render :text => @notice_text }
format.json { render :json=> {:stat => "ok", :message => @notice_text}.to_json, :callback => params[:callback] }
else
format.js
format.html { render :text => @notice_text }
format.json { render :json=> {:stat => "fail", :message => @notice_text}.to_json , :callback => params[:callback]}
end
end
end
require 'mapscript'
include Mapscript
def wms
@map = Map.find(params[:id])
#status is additional query param to show the unwarped wms
status = params["STATUS"].to_s.downcase || "unwarped"
ows = Mapscript::OWSRequest.new
ok_params = Hash.new
# params.each {|k,v| k.upcase! } frozen string error
params.each {|k,v| ok_params[k.upcase] = v }
[:request, :version, :transparency, :service, :srs, :width, :height, :bbox, :format, :srs].each do |key|
ows.setParameter(key.to_s, ok_params[key.to_s.upcase]) unless ok_params[key.to_s.upcase].nil?
end
ows.setParameter("VeRsIoN","1.1.1")
ows.setParameter("STYLES", "")
ows.setParameter("LAYERS", "image")
ows.setParameter("COVERAGE", "image")
mapsv = Mapscript::MapObj.new(File.join(Rails.root, '/lib/mapserver/wms.map'))
projfile = File.join(Rails.root, '/lib/proj')
mapsv.setConfigOption("PROJ_LIB", projfile)
#map.setProjection("init=epsg:900913")
mapsv.applyConfigOptions
rel_url_root = (ActionController::Base.relative_url_root.blank?)? '' : ActionController::Base.relative_url_root
mapsv.setMetaData("wms_onlineresource",
"http://" + request.host_with_port + rel_url_root + "/maps/wms/#{@map.id}")
raster = Mapscript::LayerObj.new(mapsv)
raster.name = "image"
raster.type = Mapscript::MS_LAYER_RASTER
raster.addProcessing("RESAMPLE=BILINEAR")
if status == "unwarped"
raster.data = @map.unwarped_filename
else #show the warped map
raster.data = @map.warped_filename
end
raster.status = Mapscript::MS_ON
raster.dump = Mapscript::MS_TRUE
raster.metadata.set('wcs_formats', 'GEOTIFF')
raster.metadata.set('wms_title', @map.title)
raster.metadata.set('wms_srs', 'EPSG:4326 EPSG:3857 EPSG:4269 EPSG:900913')
#raster.debug = Mapscript::MS_TRUE
raster.setProcessingKey("CLOSE_CONNECTION", "ALWAYS")
Mapscript::msIO_installStdoutToBuffer
result = mapsv.OWSDispatch(ows)
content_type = Mapscript::msIO_stripStdoutBufferContentType || "text/plain"
result_data = Mapscript::msIO_getStdoutBufferBytes
send_data result_data, :type => content_type, :disposition => "inline"
Mapscript::msIO_resetHandlers
end
def tile
x = params[:x].to_i
y = params[:y].to_i
z = params[:z].to_i
#for Google/OSM tile scheme we need to alter the y:
y = ((2**z)-y-1)
#calculate the bbox
params[:bbox] = get_tile_bbox(x,y,z)
#build up the other params
params[:status] = "warped"
params[:format] = "image/png"
params[:service] = "WMS"
params[:version] = "1.1.1"
params[:request] = "GetMap"
params[:srs] = "EPSG:900913"
params[:width] = "256"
params[:height] = "256"
#call the wms thing
wms
end
private
def rectify_main
resample_param = params[:resample_options]
transform_param = params[:transform_options]
masking_option = params[:mask]
resample_option = ""
transform_option = ""
case transform_param
when "auto"
transform_option = ""
when "p1"
transform_option = " -order 1 "
when "p2"
transform_option = " -order 2 "
when "p3"
transform_option = " -order 3 "
when "tps"
transform_option = " -tps "
else
transform_option = ""
end
case resample_param
when "near"
resample_option = " -rn "
when "bilinear"
resample_option = " -rb "
when "cubic"
resample_option = " -rc "
when "cubicspline"
resample_option = " -rcs "
when "lanczos" #its very very slow
resample_option = " -rn "
else
resample_option = " -rn"
end
use_mask = params[:use_mask]
@too_few = false
if @map.gcps.hard.size.nil? || @map.gcps.hard.size < 3
@too_few = true
@notice_text = "Sorry, the map needs at least three control points to be able to rectify it"
@output = @notice_text
elsif @map.status == :warping
@fail = true
@notice_text = "Sorry, the map is currently being rectified somewhere else, please try again later."
@output = @notice_text
elsif @map.status == :publishing or @map.status == :published
@fail = true
@notice_text = "Sorry, this map is published, and cannot be rectified."
@output = @notice_text
else
if user_signed_in?
um = current_user.my_maps.new(:map => @map)
um.save
end
@output = @map.warp! transform_option, resample_option, use_mask #,masking_option
@notice_text = "Map rectified."
end
end
# tile utility methods. calculates the bounding box for a given TMS tile.
# Based on http://www.maptiler.org/google-maps-coordinates-tile-bounds-projection/
# GDAL2Tiles, Google Summer of Code 2007 & 2008
# by Klokan Petr Pridal
def get_tile_bbox(x,y,z)
min_x, min_y = get_merc_coords(x * 256, y * 256, z)
max_x, max_y = get_merc_coords( (x + 1) * 256, (y + 1) * 256, z )
return "#{min_x},#{min_y},#{max_x},#{max_y}"
end
def get_merc_coords(x,y,z)
resolution = (2 * Math::PI * 6378137 / 256) / (2 ** z)
merc_x = (x * resolution -2 * Math::PI * 6378137 / 2.0)
merc_y = (y * resolution - 2 * Math::PI * 6378137 / 2.0)
return merc_x, merc_y
end
def set_session_link_back link_url
session[:link_back] = link_url
end
def check_link_back
@link_back = session[:link_back]
if @link_back.nil?
@link_back = url_for(:action => 'index')
end
session[:link_back] = @link_back
end
#only allow deleting by a user if the user owns it
def check_if_map_can_be_deleted
if user_signed_in? and (current_user.own_this_map?(params[:id]) or current_user.has_role?("editor"))
@map = Map.find(params[:id])
else
flash[:notice] = "Sorry, you cannot delete other people's maps!"
redirect_to map_path
end
end
def bad_record
#logger.error("not found #{params[:id]}")
respond_to do | format |
format.html do
flash[:notice] = "Map not found"
redirect_to :action => :index
end
format.json {render :json => {:stat => "not found", :items =>[]}.to_json, :status => 404}
end
end
#only allow editing by a user if the user owns it, or if and editor tries to edit it
# def check_if_map_is_editable
# if user_signed_in? and (current_user.own_this_map?(params[:id]) or current_user.has_role?("editor"))
# @map = Map.find(params[:id])
# elsif Map.find(params[:id]).owner.nil?
# @map = Map.find(params[:id])
# else
# flash[:notice] = "Sorry, you cannot edit other people's maps"
# redirect_to map_path
# end
# end
def find_map_if_available
@map = Map.find(params[:id])
if @map.status.nil? or @map.status == :unloaded or @map.status == :loading
redirect_to map_path
end
end
def map_params
params.require(:map).permit(:title, :description, :tag_list, :map_type, :subject_area, :unique_id,
:source_uri, :call_number, :publisher, :publication_place, :authors, :date_depicted, :scale,
:metadata_projection, :metadata_lat, :metadata_lon, :public,
"published_date(3i)", "published_date(2i)", "published_date(1i)", "reprint_date(3i)",
"reprint_date(2i)", "reprint_date(1i)", :upload_url, :upload )
end
def choose_layout_if_ajax
if request.xhr?
@xhr_flag = "xhr"
render :layout => "tab_container"
end
end
def store_location
case request.parameters[:action]
when "warp"
anchor = "Rectify_tab"
when "clip"
anchor = "Crop_tab"
when "align"
anchor = "Align_tab"
when "export"
anchor = "Export_tab"
when "comments"
anchor = "Comments_tab"
else
anchor = ""
end
return if anchor.blank?
if request.parameters[:action] && request.parameters[:id]
session[:user_return_to] = map_path(:id => request.parameters[:id], :anchor => anchor)
else
session[:user_return_to] = request.url
end
end
end
exposing bbox_geom and uuid in json/xml endopints
class MapsController < ApplicationController
layout 'mapdetail', :only => [:show, :edit, :preview, :warp, :clip, :align, :activity, :warped, :export, :metadata, :comments]
before_filter :store_location, :only => [:warp, :align, :clip, :export, :edit, :comments ]
before_filter :authenticate_user!, :only => [:new, :create, :edit, :update, :destroy, :delete, :warp, :rectify, :clip, :align, :warp_align, :mask_map, :delete_mask, :save_mask, :save_mask_and_warp, :set_rough_state, :set_rough_centroid, :publish, :trace, :id, :map_type, :create_inset]
before_filter :check_administrator_role, :only => [:publish, :map_type, :create_inset]
before_filter :find_map_if_available,
:except => [:show, :index, :wms, :tile, :mapserver_wms, :warp_aligned, :status, :new, :create, :update, :edit, :tag, :geosearch, :map_type, :create_inset]
before_filter :check_link_back, :only => [:show, :warp, :clip, :align, :warped, :export, :activity]
skip_before_filter :check_site_read_only, :only => [:show, :index, :geosearch, :wms, :tile, :status, :warped, :comments, :export, :metadata, :activity, :preview, :thumb, :inset_maps]
rescue_from ActiveRecord::RecordNotFound, :with => :bad_record
helper :sort
include SortHelper
###############
#
# Collection actions
#
###############
def index
sort_init('updated_at', {:default_order => "desc"})
sort_update
@show_warped = params[:show_warped]
request.query_string.length > 0 ? qstring = "?" + request.query_string : qstring = ""
set_session_link_back url_for(:controller=> 'maps', :action => 'index',:skip_relative_url_root => false, :only_path => false )+ qstring
@query = params[:query]
@field = %w(text title description status catnyp nypl_digital_id uuid).detect{|f| f == (params[:field])}
@field = "text" if @field.nil?
where_col = @field
if @field == "text"
where_col = "(title || ' ' || description)"
end
#we'll use POSIX regular expression for searches ~*'( |^)robinson([^A-z]|$)' and to strip out brakets etc ~*'(:punct:|^|)plate 6([^A-z]|$)';
if @query && @query.strip.length > 0 && @field
conditions = ["#{where_col} ~* ?", '(:punct:|^|)'+@query+'([^A-z]|$)']
else
conditions = nil
end
@year_min = Map.minimum(:issue_year).to_i - 1
@year_max = Map.maximum(:issue_year).to_i + 1
@year_min = 1600 if @year_min == 0
@year_max = 2015 if @year_max == 0
year_conditions = nil
if params[:from] && params[:to] && !(@year_min == params[:from].to_i && @year_max == params[:to].to_i)
year_conditions = {:issue_year => params[:from].to_i..params[:to].to_i}
end
@from = params[:from]
@to = params[:to]
if params[:sort_order] && params[:sort_order] == "desc"
sort_nulls = " NULLS LAST"
else
sort_nulls = " NULLS FIRST"
end
@per_page = params[:per_page] || 50
paginate_params = {
:page => params[:page],
:per_page => @per_page
}
order_options = sort_clause + sort_nulls
where_options = conditions
#order('name').where('name LIKE ?', "%#{search}%").paginate(page: page, per_page: 10)
if @show_warped == "1"
@maps = Map.warped.where(where_options).where(year_conditions).order(order_options).paginate(paginate_params)
else
@maps = Map.where(where_options).where(year_conditions).order(order_options).paginate(paginate_params)
end
@html_title = "Browse Maps"
if request.xhr?
render :action => 'index.rjs'
else
respond_to do |format|
format.html{ render :layout =>'application' } # index.html.erb
format.xml { render :xml => @maps.to_xml(:root => "maps", :except => [:content_type, :size, :parent_uuid, :filename, :parent_id, :map, :thumbnail, :rough_centroid]) {|xml|
xml.tag!'stat', "ok"
xml.tag!'total-entries', @maps.total_entries
xml.tag!'per-page', @maps.per_page
xml.tag!'current-page',@maps.current_page} }
format.json { render :json => {:stat => "ok",
:current_page => @maps.current_page,
:per_page => @maps.per_page,
:total_entries => @maps.total_entries,
:total_pages => @maps.total_pages,
:items => @maps.to_a}.to_json(:except => [:content_type, :size, :parent_uuid, :filename, :parent_id, :map, :thumbnail, :rough_centroid], :methods => :depicts_year) , :callback => params[:callback]
}
end
end
end
def geosearch
require 'geoplanet'
sort_init 'updated_at'
sort_update
extents = [-74.1710,40.5883,-73.4809,40.8485] #NYC
#TODO change to straight javascript call.
if params[:place] && !params[:place].blank?
place_query = params[:place]
GeoPlanet.appid = APP_CONFIG['yahoo_app_id']
geoplanet_result = GeoPlanet::Place.search(place_query, :count => 2)
if geoplanet_result[0]
g_bbox = geoplanet_result[0].bounding_box.map!{|x| x.reverse}
extents = g_bbox[1] + g_bbox[0]
render :json => extents.to_json
return
else
render :json => extents.to_json
return
end
end
if params[:bbox] && params[:bbox].split(',').size == 4
begin
extents = params[:bbox].split(',').collect {|i| Float(i)}
rescue ArgumentError
logger.debug "arg error with bbox, setting extent to defaults"
end
end
@bbox = extents.join(',')
if extents
bbox_poly_ary = [
[ extents[0], extents[1] ],
[ extents[2], extents[1] ],
[ extents[2], extents[3] ],
[ extents[0], extents[3] ],
[ extents[0], extents[1] ]
]
map_srid = 0
map_srid = Map.warped.first.bbox_geom.srid if Map.warped.first && Map.warped.first.bbox_geom
if map_srid == 0
bbox_polygon = GeoRuby::SimpleFeatures::Polygon.from_coordinates([bbox_poly_ary]).as_wkt
else
bbox_polygon = GeoRuby::SimpleFeatures::Polygon.from_coordinates([bbox_poly_ary]).as_ewkt
end
if params[:operation] == "within"
conditions = ["ST_Within(bbox_geom, ST_GeomFromText('#{bbox_polygon}'))"]
else
conditions = ["ST_Intersects(bbox_geom, ST_GeomFromText('#{bbox_polygon}'))"]
end
else
conditions = nil
end
if params[:sort_order] && params[:sort_order] == "desc"
sort_nulls = " NULLS LAST"
else
sort_nulls = " NULLS FIRST"
end
@operation = params[:operation]
if @operation == "intersect"
sort_geo = "ABS(ST_Area(bbox_geom) - ST_Area(ST_GeomFromText('#{bbox_polygon}'))) ASC, "
else
sort_geo ="ST_Area(bbox_geom) DESC ,"
end
@year_min = Map.minimum(:issue_year).to_i - 1
@year_max = Map.maximum(:issue_year).to_i + 1
@year_min = 1600 if @year_min == 0
@year_max = 2015 if @year_max == 0
year_conditions = nil
if params[:from] && params[:to] && !(@year_min == params[:from].to_i && @year_max == params[:to].to_i)
year_conditions = {:issue_year => params[:from].to_i..params[:to].to_i}
end
status_conditions = {:status => [Map.status(:warped), Map.status(:published), Map.status(:publishing)]}
paginate_params = {
:page => params[:page],
:per_page => 20
}
order_params = sort_geo + sort_clause + sort_nulls
@maps = Map.select("bbox, title, description, updated_at, id, nypl_digital_id, uuid, issue_year, status").warped.where(conditions).where(year_conditions).where(status_conditions).order(order_params).paginate(paginate_params)
@jsonmaps = @maps.to_json # (:only => [:bbox, :title, :id, :nypl_digital_id])
respond_to do |format|
format.html{ render :layout =>'application' }
format.json { render :json => {:stat => "ok",
:current_page => @maps.current_page,
:per_page => @maps.per_page,
:total_entries => @maps.total_entries,
:total_pages => @maps.total_pages,
:items => @maps.to_a}.to_json(:methods => :depicts_year) , :callback => params[:callback]}
end
end
###############
#
# Tab actions
#
###############
def show
@current_tab = "show"
@selected_tab = 0
@disabled_tabs =[]
@map = Map.find(params[:id])
@html_title = "Viewing Map "+@map.id.to_s
if @map.status.nil? || @map.status == :unloaded
@mapstatus = "unloaded"
else
@mapstatus = @map.status.to_s
end
#
# Not Logged in users
#
if !user_signed_in?
@disabled_tabs = ["warp", "clip", "align", "activity"]
if @map.status.nil? or @map.status == :unloaded or @map.status == :loading
@disabled_tabs += ["warped"]
end
flash.now[:notice] = "You may need to %s to start editing the map"
flash.now[:notice_item] = ["log in", :new_user_session]
session[:user_return_to] = request.url
if request.xhr?
@xhr_flag = "xhr"
render :action => "preview", :layout => "tab_container"
else
respond_to do |format|
format.html {render :action => "preview"}
format.kml {render :action => "show_kml", :layout => false}
format.rss {render :action=> 'show'}
# format.xml {render :xml => @map.to_xml(:except => [:content_type, :size, :bbox_geom, :uuid, :parent_uuid, :filename, :parent_id, :map, :thumbnail, :rough_centroid]) }
format.json {render :json =>{:stat => "ok", :items => @map}.to_json(:except => [:content_type, :size, :bbox_geom, :uuid, :parent_uuid, :filename, :parent_id, :map, :thumbnail, :rough_centroid]), :callback => params[:callback] }
end
end
return #stop doing anything more
end
#End doing stuff for not logged in users.
#
# Logged in users
#
if @map.versions.last
@current_version_number = @map.versions.last.index
if User.exists?(@map.versions.last.whodunnit.to_i)
@current_version_user = User.find_by_id(@map.versions.last.whodunnit.to_i)
else
@current_version_user = nil
end
else
@current_version_number = 1
@current_version_user = nil
end
version_users = PaperTrail::Version.where({:item_type => 'Map', :item_id => @map.id}).where.not(:whodunnit => nil).where.not(:whodunnit => @current_version_user).select(:whodunnit).distinct.limit(7)
@version_users = version_users.to_a.delete_if{|v| !User.exists?(v.whodunnit) }
unless user_signed_in? and current_user.has_role?("administrator")
if @map.status == :publishing or @map.status == :published
@disabled_tabs += ["warp", "clip", "align"] #dont show any others unless you're an editor
end
end
#note, trying to view an image that hasnt been requested, will cause it to be requested
if @map.status.nil? or @map.status == :unloaded
@disabled_tabs = ["warp", "clip", "align", "warped", "preview","activity", "export"]
@title = "Viewing unwarped map."
logger.debug("starting spawn fetch iamge")
Spawnling.new do
logger.info "starting fetch from image server"
@map.fetch_from_image_server
logger.info "finished fetch from image server. Status = "+@map.status.to_s
end
return
end
@title = "Viewing original map. "
if !@map.warped_or_published?
@title += "This map has not been warped yet."
end
if request.xhr?
choose_layout_if_ajax
else
respond_to do |format|
format.html
format.kml {render :action => "show_kml", :layout => false}
format.json {render :json =>{:stat => "ok", :items => @map}.to_json(:except => [:content_type, :size, :bbox_geom, :uuid, :parent_uuid, :filename, :parent_id, :map, :thumbnail, :rough_centroid]), :callback => params[:callback] }
end
end
end
def comments
@html_title = "comments"
@selected_tab = 9
@current_tab = "comments"
@comments = @map.comments
choose_layout_if_ajax
respond_to do | format |
format.html {}
end
end
def export
@current_tab = "export"
@selected_tab = 5
@html_title = "Export Map" + @map.id.to_s
choose_layout_if_ajax
if params[:unwarped]
tif_filename = @map.filename
else
tif_filename = @map.warped_filename
end
respond_to do | format |
format.html {}
format.tif { send_file tif_filename, :x_sendfile => true }
format.png { send_file @map.warped_png, :x_sendfile => true }
format.aux_xml { send_file @map.warped_png_aux_xml, :x_sendfile => true }
end
end
def clip
#TODO delete current_tab
@current_tab = "clip"
@selected_tab = 3
@html_title = "Cropping Map "+ @map.id.to_s
@gml_exists = "false"
if File.exists?(@map.masking_file_gml+".ol")
@gml_exists = "true"
end
choose_layout_if_ajax
end
def warped
@current_tab = "warped"
@selected_tab = 5
@html_title = "Viewing Rectfied Map "+ @map.id.to_s
if (@map.warped_or_published? || @map.status == :publishing) && @map.gcps.hard.size > 2
@other_layers = Array.new
@map.layers.visible.each do |layer|
@other_layers.push(layer.id)
end
else
flash.now[:notice] = "Whoops, the map needs to be rectified before you can view it"
end
choose_layout_if_ajax
end
def align
@html_title = "Align Maps "
@current_tab = "align"
@selected_tab = 3
choose_layout_if_ajax
end
def warp
@current_tab = "warp"
@selected_tab = 2
@html_title = "Rectifying Map "+ @map.id.to_s
@bestguess_places = @map.find_bestguess_places if @map.gcps.hard.empty?
@other_layers = Array.new
@map.layers.visible.each do |layer|
@other_layers.push(layer.id)
end
@gcps = @map.gcps_with_error
choose_layout_if_ajax
end
def metadata
choose_layout_if_ajax
end
def trace
redirect_to map_path unless @map.published?
@overlay = @map
end
def id
redirect_to map_path unless @map.published?
@overlay = @map
render "id", :layout => false
end
# called by id JS oauth
def idland
render "idland", :layout => false
end
#view the inset maps from a given map
def inset_maps
@html_title = "Inset Maps for "+ @map.id.to_s
@inset_maps = @map.inset_maps
if @map.versions.last
@current_version_number = @map.versions.last.index
if User.exists?(@map.versions.last.whodunnit.to_i)
@current_version_user = User.find_by_id(@map.versions.last.whodunnit.to_i)
else
@current_version_user = nil
end
else
@current_version_number = 1
@current_version_user = nil
end
if @inset_maps.empty?
flash[:notice] = "No inset maps found for this map"
redirect_to @map and return
end
end
###############
#
# Other / API actions
#
###############
# post create inset, admin only
def create_inset
@map = Map.find(params[:id])
unless [:available, :warping, :warped, :published].include?(@map.status)
flash[:error] = "Sorry, this map is not ready to create an inset map from. It's status is "+ @map.status.to_s
redirect_to map_path(@map) and return
else
@inset_map = @map.create_inset
if @inset_map && @inset_map.save
flash[:notice] = "Successfully created inset map!"
redirect_to map_path(@inset_map) and return
else
flash[:error] = "Sorry, there was a problem creating this inset map"
end
end
end
def thumb
map = Map.find(params[:id])
thumb = "http://images.nypl.org/?t=t&id=#{map.nypl_digital_id}"
redirect_to thumb
end
def map_type
@map = Map.find(params[:id])
map_type = params[:map][:map_type]
if Map::MAP_TYPE.include? map_type.to_sym
@map.update_map_type(map_type)
end
if Layer.exists?(params[:layerid].to_i)
@layer = Layer.find(params[:layerid].to_i)
@maps = @layer.maps.paginate(:per_page => 30, :page => 1, :order => :map_type)
end
render :text => "Map has changed. Map type: "+@map.map_type.to_s
end
#pass in soft true to get soft gcps
def gcps
@map = Map.find(params[:id])
gcps = @map.gcps_with_error(params[:soft])
respond_to do |format|
format.html { render :json => {:stat => "ok", :items => gcps.to_a}.to_json(:methods => :error), :callback => params[:callback]}
format.json { render :json => {:stat => "ok", :items => gcps.to_a}.to_json(:methods => :error), :callback => params[:callback]}
format.xml { render :xml => gcps.to_xml(:methods => :error)}
end
end
def get_rough_centroid
map = Map.find(params[:id])
respond_to do |format|
format.json {render :json =>{:stat => "ok", :items => map}.to_json(:except => [:content_type, :size, :bbox_geom, :uuid, :parent_uuid, :filename, :parent_id, :map, :thumbnail]), :callback => params[:callback] }
end
end
def set_rough_centroid
map = Map.find(params[:id])
lon = params[:lon]
lat = params[:lat]
zoom = params[:zoom]
respond_to do |format|
if map.update_attributes(:rough_lon => lon, :rough_lat => lat, :rough_zoom => zoom ) && lat && lon
map.save_rough_centroid(lon, lat)
format.json {render :json =>{:stat => "ok", :items => map}.to_json(:except => [:content_type, :size, :bbox_geom, :uuid, :parent_uuid, :filename, :parent_id, :map, :thumbnail, :rough_centroid]), :callback => params[:callback]
}
else
format.json { render :json => {:stat => "fail", :message => "Rough centroid not set", :items => [], :errors => map.errors.to_a}.to_json, :callback => params[:callback]}
end
end
end
def get_rough_state
map = Map.find(params[:id])
respond_to do |format|
if map.rough_state
format.json { render :json => {:stat => "ok", :items => ["id" => map.id, "rough_state" => map.rough_state]}.to_json, :callback => params[:callback]}
else
format.json { render :json => {:stat => "fail", :message => "Rough state is null", :items => map.rough_state}.to_json, :callback => params[:callback]}
end
end
end
def set_rough_state
map = Map.find(params[:id])
respond_to do |format|
if map.update_attributes(:rough_state => params[:rough_state]) && Map::ROUGH_STATE.include?(params[:rough_state].to_sym)
format.json { render :json => {:stat => "ok", :items => ["id" => map.id, "rough_state" => map.rough_state]}.to_json, :callback => params[:callback] }
else
format.json { render :json => {:stat => "fail", :message =>"Could not update state", :errors => map.errors.to_a, :items => []}.to_json , :callback => params[:callback]}
end
end
end
def status
map = Map.find(params[:id])
if map.status.nil?
sta = "loading"
else
sta = map.status.to_s
end
render :text => sta
end
#should check for admin only
def publish
if @map.status == :publishing
flash[:notice] = "Map currently publishing. Please try again later."
return redirect_to @map
end
if params[:to] == "publish" && @map.status == :warped
@map.publish
flash[:notice] = "Map publishing. Please wait as the map will be published and tiles transfered via tilestache. Status: " + @map.status.to_s
elsif params[:to] == "unpublish" && @map.status == :published
@map.unpublish
flash[:notice] = "Map unpublished. Status: " + @map.status.to_s
end
redirect_to @map
end
def save_mask
message = @map.save_mask(params[:output])
respond_to do | format |
format.html {render :text => message}
format.js { render :text => message} if request.xhr?
format.json {render :json => {:stat =>"ok", :message => message}.to_json , :callback => params[:callback]}
end
end
def delete_mask
message = @map.delete_mask
respond_to do | format |
format.html { render :text => message}
format.js { render :text => message} #if request.xhr?
format.json {render :json => {:stat =>"ok", :message => message}.to_json , :callback => params[:callback]}
end
end
def mask_map
respond_to do | format |
if File.exists?(@map.masking_file_gml)
message = @map.mask!
format.html { render :text => message }
format.js { render :text => message} #if request.xhr?
format.json { render :json => {:stat =>"ok", :message => message}.to_json , :callback => params[:callback]}
else
message = "Mask file not found"
format.html { render :text => message }
format.js { render :text => message} #if request.xhr?
format.json { render :json => {:stat =>"fail", :message => message}.to_json , :callback => params[:callback]}
end
end
end
def save_mask_and_warp
logger.debug "save mask and warp"
if @map.status == :publishing or @map.status == :published
stat = "fail"
msg = "Mask not applied. Map is published so is unable to mask."
elsif @map.status == :warping
stat = "fail"
msg = "Mask not saved as the map is currently being rectified somewhere else, please try again later."
else
@map.save_mask(params[:output])
@map.mask!
stat = "ok"
if @map.gcps.hard.size.nil? || @map.gcps.hard.size < 3
msg = "Map masked, but it needs more control points to rectify. Click the Rectify tab to add some."
stat = "fail"
else
params[:use_mask] = "true"
rectify_main
msg = "Map masked and rectified."
end
end
respond_to do |format|
format.json {render :json => {:stat => stat, :message => msg}.to_json , :callback => params[:callback]}
format.js { render :text => msg } if request.xhr?
end
end
#just works with NSEW directions at the moment.
def warp_aligned
align = params[:align]
append = params[:append]
destmap = Map.find(params[:destmap])
if destmap.status.nil? or destmap.status == :unloaded or destmap.status == :loading
flash.now[:notice] = "Sorry the destination map is not available to be aligned."
redirect_to :action => "show", :id=> params[:destmap]
elsif align != "other"
if params[:align_type] == "original"
destmap.align_with_original(params[:srcmap], align, append )
else
destmap.align_with_warped(params[:srcmap], align, append )
end
flash.now[:notice] = "Map aligned. You can now rectify it!"
redirect_to :action => "show", :id => destmap.id, :anchor => "Rectify_tab"
else
flash.now[:notice] = "Sorry, only horizontal and vertical alignment are available at the moment."
redirect_to :action => "align", :id=> params[:srcmap], :anchor => "Align_tab"
end
end
def rectify
rectify_main
respond_to do |format|
unless @too_few || @fail
format.js
format.html { render :text => @notice_text }
format.json { render :json=> {:stat => "ok", :message => @notice_text}.to_json, :callback => params[:callback] }
else
format.js
format.html { render :text => @notice_text }
format.json { render :json=> {:stat => "fail", :message => @notice_text}.to_json , :callback => params[:callback]}
end
end
end
require 'mapscript'
include Mapscript
def wms
@map = Map.find(params[:id])
#status is additional query param to show the unwarped wms
status = params["STATUS"].to_s.downcase || "unwarped"
ows = Mapscript::OWSRequest.new
ok_params = Hash.new
# params.each {|k,v| k.upcase! } frozen string error
params.each {|k,v| ok_params[k.upcase] = v }
[:request, :version, :transparency, :service, :srs, :width, :height, :bbox, :format, :srs].each do |key|
ows.setParameter(key.to_s, ok_params[key.to_s.upcase]) unless ok_params[key.to_s.upcase].nil?
end
ows.setParameter("VeRsIoN","1.1.1")
ows.setParameter("STYLES", "")
ows.setParameter("LAYERS", "image")
ows.setParameter("COVERAGE", "image")
mapsv = Mapscript::MapObj.new(File.join(Rails.root, '/lib/mapserver/wms.map'))
projfile = File.join(Rails.root, '/lib/proj')
mapsv.setConfigOption("PROJ_LIB", projfile)
#map.setProjection("init=epsg:900913")
mapsv.applyConfigOptions
rel_url_root = (ActionController::Base.relative_url_root.blank?)? '' : ActionController::Base.relative_url_root
mapsv.setMetaData("wms_onlineresource",
"http://" + request.host_with_port + rel_url_root + "/maps/wms/#{@map.id}")
raster = Mapscript::LayerObj.new(mapsv)
raster.name = "image"
raster.type = Mapscript::MS_LAYER_RASTER
raster.addProcessing("RESAMPLE=BILINEAR")
if status == "unwarped"
raster.data = @map.unwarped_filename
else #show the warped map
raster.data = @map.warped_filename
end
raster.status = Mapscript::MS_ON
raster.dump = Mapscript::MS_TRUE
raster.metadata.set('wcs_formats', 'GEOTIFF')
raster.metadata.set('wms_title', @map.title)
raster.metadata.set('wms_srs', 'EPSG:4326 EPSG:3857 EPSG:4269 EPSG:900913')
#raster.debug = Mapscript::MS_TRUE
raster.setProcessingKey("CLOSE_CONNECTION", "ALWAYS")
Mapscript::msIO_installStdoutToBuffer
result = mapsv.OWSDispatch(ows)
content_type = Mapscript::msIO_stripStdoutBufferContentType || "text/plain"
result_data = Mapscript::msIO_getStdoutBufferBytes
send_data result_data, :type => content_type, :disposition => "inline"
Mapscript::msIO_resetHandlers
end
def tile
x = params[:x].to_i
y = params[:y].to_i
z = params[:z].to_i
#for Google/OSM tile scheme we need to alter the y:
y = ((2**z)-y-1)
#calculate the bbox
params[:bbox] = get_tile_bbox(x,y,z)
#build up the other params
params[:status] = "warped"
params[:format] = "image/png"
params[:service] = "WMS"
params[:version] = "1.1.1"
params[:request] = "GetMap"
params[:srs] = "EPSG:900913"
params[:width] = "256"
params[:height] = "256"
#call the wms thing
wms
end
private
def rectify_main
resample_param = params[:resample_options]
transform_param = params[:transform_options]
masking_option = params[:mask]
resample_option = ""
transform_option = ""
case transform_param
when "auto"
transform_option = ""
when "p1"
transform_option = " -order 1 "
when "p2"
transform_option = " -order 2 "
when "p3"
transform_option = " -order 3 "
when "tps"
transform_option = " -tps "
else
transform_option = ""
end
case resample_param
when "near"
resample_option = " -rn "
when "bilinear"
resample_option = " -rb "
when "cubic"
resample_option = " -rc "
when "cubicspline"
resample_option = " -rcs "
when "lanczos" #its very very slow
resample_option = " -rn "
else
resample_option = " -rn"
end
use_mask = params[:use_mask]
@too_few = false
if @map.gcps.hard.size.nil? || @map.gcps.hard.size < 3
@too_few = true
@notice_text = "Sorry, the map needs at least three control points to be able to rectify it"
@output = @notice_text
elsif @map.status == :warping
@fail = true
@notice_text = "Sorry, the map is currently being rectified somewhere else, please try again later."
@output = @notice_text
elsif @map.status == :publishing or @map.status == :published
@fail = true
@notice_text = "Sorry, this map is published, and cannot be rectified."
@output = @notice_text
else
if user_signed_in?
um = current_user.my_maps.new(:map => @map)
um.save
end
@output = @map.warp! transform_option, resample_option, use_mask #,masking_option
@notice_text = "Map rectified."
end
end
# tile utility methods. calculates the bounding box for a given TMS tile.
# Based on http://www.maptiler.org/google-maps-coordinates-tile-bounds-projection/
# GDAL2Tiles, Google Summer of Code 2007 & 2008
# by Klokan Petr Pridal
def get_tile_bbox(x,y,z)
min_x, min_y = get_merc_coords(x * 256, y * 256, z)
max_x, max_y = get_merc_coords( (x + 1) * 256, (y + 1) * 256, z )
return "#{min_x},#{min_y},#{max_x},#{max_y}"
end
def get_merc_coords(x,y,z)
resolution = (2 * Math::PI * 6378137 / 256) / (2 ** z)
merc_x = (x * resolution -2 * Math::PI * 6378137 / 2.0)
merc_y = (y * resolution - 2 * Math::PI * 6378137 / 2.0)
return merc_x, merc_y
end
def set_session_link_back link_url
session[:link_back] = link_url
end
def check_link_back
@link_back = session[:link_back]
if @link_back.nil?
@link_back = url_for(:action => 'index')
end
session[:link_back] = @link_back
end
#only allow deleting by a user if the user owns it
def check_if_map_can_be_deleted
if user_signed_in? and (current_user.own_this_map?(params[:id]) or current_user.has_role?("editor"))
@map = Map.find(params[:id])
else
flash[:notice] = "Sorry, you cannot delete other people's maps!"
redirect_to map_path
end
end
def bad_record
#logger.error("not found #{params[:id]}")
respond_to do | format |
format.html do
flash[:notice] = "Map not found"
redirect_to :action => :index
end
format.json {render :json => {:stat => "not found", :items =>[]}.to_json, :status => 404}
end
end
#only allow editing by a user if the user owns it, or if and editor tries to edit it
# def check_if_map_is_editable
# if user_signed_in? and (current_user.own_this_map?(params[:id]) or current_user.has_role?("editor"))
# @map = Map.find(params[:id])
# elsif Map.find(params[:id]).owner.nil?
# @map = Map.find(params[:id])
# else
# flash[:notice] = "Sorry, you cannot edit other people's maps"
# redirect_to map_path
# end
# end
def find_map_if_available
@map = Map.find(params[:id])
if @map.status.nil? or @map.status == :unloaded or @map.status == :loading
redirect_to map_path
end
end
def map_params
params.require(:map).permit(:title, :description, :tag_list, :map_type, :subject_area, :unique_id,
:source_uri, :call_number, :publisher, :publication_place, :authors, :date_depicted, :scale,
:metadata_projection, :metadata_lat, :metadata_lon, :public,
"published_date(3i)", "published_date(2i)", "published_date(1i)", "reprint_date(3i)",
"reprint_date(2i)", "reprint_date(1i)", :upload_url, :upload )
end
def choose_layout_if_ajax
if request.xhr?
@xhr_flag = "xhr"
render :layout => "tab_container"
end
end
def store_location
case request.parameters[:action]
when "warp"
anchor = "Rectify_tab"
when "clip"
anchor = "Crop_tab"
when "align"
anchor = "Align_tab"
when "export"
anchor = "Export_tab"
when "comments"
anchor = "Comments_tab"
else
anchor = ""
end
return if anchor.blank?
if request.parameters[:action] && request.parameters[:id]
session[:user_return_to] = map_path(:id => request.parameters[:id], :anchor => anchor)
else
session[:user_return_to] = request.url
end
end
end
|
class NoteController < ApplicationController
def index
@machine = Machine.new
end
def notate
notes = params[:machine][:notes]
serial_number = params[:machine][:serial_number])
if machine_array = Machine.where(serial_number: serial_number).!empty?
existing_machine = machine_array[0]
existing_machine.update(notes: notes)
flash[:notice] = "Note was attached to machine"
flash[:type] = "success"
redirect_to action: 'index'
else
flash[:notice] = "Serial number has not been logged"
flash[:type] = "error"
redirect_to action: 'index'
end
end
end
Whoops! Typo!
class NoteController < ApplicationController
def index
@machine = Machine.new
end
def notate
notes = params[:machine][:notes]
serial_number = params[:machine][:serial_number]
if machine_array = Machine.where(serial_number: serial_number).lenght != 0
existing_machine = machine_array[0]
existing_machine.update(notes: notes)
flash[:notice] = "Note was attached to machine"
flash[:type] = "success"
redirect_to action: 'index'
else
flash[:notice] = "Serial number has not been logged"
flash[:type] = "error"
redirect_to action: 'index'
end
end
end
|
class OpacController < ApplicationController
layout "opac"
#include Devise::Controllers::InternalHelpers
def index
@events = Event.order('start_at DESC').limit(5)
end
def search
end
def manifestations_index
end
def signed_in
logger.info "OpacController login"
puts "@@@@v@"
puts @current_user
end
end
refs 4007 使っていないメソッドが残っていたので削除
class OpacController < ApplicationController
layout "opac"
def index
@events = Event.order('start_at DESC').limit(5)
end
def search
end
def manifestations_index
end
end
|
class PoolController < ApplicationController
layout "default"
before_action :member_only, :only => [:destroy, :update, :add_post, :remove_post, :import, :zip]
before_action :post_member_only, :only => [:create]
before_action :contributor_only, :only => [:copy, :transfer_metadata]
helper :post
def index
@pools = Pool.all
options = {
:per_page => 20,
:page => page_number
}
order = params[:order]
search_tokens = []
if params[:query]
begin
query = params[:query].shellsplit
# Will raise error if not a valid shell-quoted string (unbalanced quotes).
# Use plain split instead.
rescue ArgumentError
query = params[:query].split
end
query.each do |token|
if token =~ /^(order|limit|posts):(.+)$/
if Regexp.last_match[1] == "order"
order = Regexp.last_match[2]
elsif Regexp.last_match[1] == "limit"
options[:per_page] = Regexp.last_match[2].to_i
options[:per_page] = [options[:per_page], 100].min
elsif Regexp.last_match[1] == "posts"
@pools = @pools.where(*Post.sql_range_for_where(Tag.parse_helper(Regexp.last_match[2]), "post_count"))
end
else
search_tokens << token
end
end
end
if search_tokens.any?
value_index_query = "(#{Array.wrap(search_tokens).map(&:to_escaped_for_tsquery).join(" & ")})"
@pools = @pools.where("search_index @@ to_tsquery('pg_catalog.english', ?)", value_index_query)
# If a search keyword contains spaces, then it was quoted in the search query
# and we should only match adjacent words. tsquery won't do this for us; we need
# to filter results where the words aren't adjacent.
#
# This has a side-effect: any stopwords, stemming, parsing, etc. rules performed
# by to_tsquery won't be done here. We need to perform the same processing as
# is used to generate search_index. We don't perform all of the stemming rules, so
# although "jump" may match "jumping", "jump beans" won't match "jumping beans" because
# we'll filter it out.
#
# This also doesn't perform tokenization, so some obscure cases won't match perfectly;
# for example, "abc def" will match "xxxabc def abc" when it probably shouldn't. Doing
# this more correctly requires Postgresql support that doesn't exist right now.
query.each do |q|
# Don't do this if there are no spaces in the query, so we don't turn off tsquery
# parsing when we don't need to.
next unless q.include?(" ")
@pools = @pools.where("position(LOWER(?) IN LOWER(replace_underscores(name))) > 0 OR position(LOWER(?) IN LOWER(description)) > 0", q, q)
end
end
if order.nil?
if search_tokens.empty?
order = "date"
else
order = "name"
end
end
order = case order
when "name" then "nat_sort(name) ASC"
when "date" then "created_at DESC"
when "updated" then "updated_at DESC"
when "id" then "id DESC"
else "created_at DESC"
end
@pools = @pools.order(order).paginate options
@samples = {}
@pools.each do |p|
post = p.get_sample
unless post then next end
@samples[p] = post
end
respond_to_list("pools", :atom => true)
end
def show
if params[:samples] == "0" then params.delete(:samples) end
begin
@pool = Pool.includes(:pool_posts => :post).find(params[:id])
rescue
flash[:notice] = t("c.pool.not_found", :id => params[:id].to_i)
redirect_to :action => :index
return
end
@browse_mode = @current_user.pool_browse_mode
q = Tag.parse_query("")
q[:pool] = params[:id].to_i
q[:show_deleted_only] = false
if @browse_mode == 1
q[:limit] = 1000
else
q[:limit] = 24
end
count = Post.count_by_sql(Post.generate_sql(q, :from_api => true, :count => true))
@posts = WillPaginate::Collection.new(page_number, q[:limit], count)
sql = Post.generate_sql(q, :from_api => true, :offset => @posts.offset, :limit => @posts.per_page)
@posts.replace(Post.find_by_sql(sql))
respond_to do |fmt|
fmt.html
fmt.xml do
builder = Builder::XmlMarkup.new(:indent => 2)
builder.instruct!
xml = @pool.to_xml(:builder => builder, :skip_instruct => true) do
builder.posts do
@posts.each do |post|
post.to_xml(:builder => builder, :skip_instruct => true)
end
end
end
render :xml => xml
end
fmt.json
end
end
def update
@pool = Pool.find(params[:id])
unless @pool.can_be_updated_by?(@current_user)
access_denied
return
end
if request.post?
@pool.update_attributes(pool_params)
respond_to_success("Pool updated", :action => "show", :id => params[:id])
end
end
def create
if request.post?
@pool = Pool.create(pool_params.merge(:user_id => @current_user.id))
if @pool.errors.empty?
respond_to_success("Pool created", :action => "show", :id => @pool.id)
else
respond_to_error(@pool, :action => "index")
end
else
@pool = Pool.new(:user_id => @current_user.id)
end
end
def copy
@old_pool = Pool.find_by_id(params[:id])
name = params[:name] || "#{@old_pool.name} (copy)"
@new_pool = Pool.new(:user_id => @current_user.id, :name => name, :description => @old_pool.description)
if request.post?
@new_pool.save
unless @new_pool.errors.empty?
respond_to_error(@new_pool, :action => "index")
return
end
@old_pool.pool_posts.each do |pp|
@new_pool.add_post(pp.post_id, :sequence => pp.sequence)
end
respond_to_success("Pool created", :action => "show", :id => @new_pool.id)
end
end
def destroy
@pool = Pool.find(params[:id])
if request.post?
if @pool.can_be_updated_by?(@current_user)
@pool.destroy
respond_to_success("Pool deleted", :action => "index")
else
access_denied
end
end
end
def add_post
if request.post?
@pool = Pool.find(params[:pool_id])
session[:last_pool_id] = @pool.id
if params[:pool] && !params[:pool][:sequence].blank?
sequence = params[:pool][:sequence]
else
sequence = nil
end
begin
@pool.add_post(params[:post_id], :sequence => sequence, :user => @current_user)
respond_to_success("Post added", :controller => "post", :action => "show", :id => params[:post_id])
rescue Pool::PostAlreadyExistsError
respond_to_error("Post already exists", { :controller => "post", :action => "show", :id => params[:post_id] }, :status => 423)
rescue Pool::AccessDeniedError
access_denied
rescue => x
respond_to_error(x.class, :controller => "post", :action => "show", :id => params[:post_id])
end
else
@pools = Pool.where(:is_active => true)
if @current_user.is_anonymous?
@pools = @pools.where(:is_public => true)
else
@pools = @pools.where("is_public = TRUE OR user_id = ?", @current_user.id)
end
@pools = @pools.order(:name)
@post = Post.find(params[:post_id])
end
end
def remove_post
if request.post?
@pool = Pool.find(params[:pool_id])
post = Post.find(params[:post_id])
begin
@pool.remove_post(params[:post_id], :user => @current_user)
rescue Pool::AccessDeniedError
access_denied
return
end
api_data = Post.batch_api_data([post])
response.headers["X-Post-Id"] = params[:post_id]
respond_to_success("Post removed", { :controller => "post", :action => "show", :id => params[:post_id] }, :api => api_data)
else
@pool = Pool.find(params[:pool_id])
@post = Post.find(params[:post_id])
end
end
def order
@pool = Pool.find(params[:id])
unless @pool.can_be_updated_by?(@current_user)
access_denied
return
end
if request.post?
PoolPost.transaction do
params.fetch(:pool_post_sequence, []).each do |i, seq|
PoolPost.update(i, :sequence => seq)
end
@pool.reload
@pool.update_pool_links
end
flash[:notice] = "Ordering updated"
redirect_to :action => "show", :id => params[:id]
else
@pool_posts = @pool.pool_posts
end
end
def import
@pool = Pool.find(params[:id])
unless @pool.can_be_updated_by?(@current_user)
access_denied
return
end
if request.post?
if params[:posts].is_a?(Hash)
ordered_posts = params[:posts].sort { |a, b| a[1] <=> b[1] }.map { |a| a[0] }
PoolPost.transaction do
ordered_posts.each do |post_id|
begin
@pool.add_post(post_id, :skip_update_pool_links => true)
rescue Pool::PostAlreadyExistsError
# ignore
end
end
@pool.update_pool_links
end
end
redirect_to :action => "show", :id => @pool.id
else
respond_to do |fmt|
fmt.html
fmt.js do
@posts = Post.find_by_tags(params[:query], :limit => 500)
@posts = @posts.select { |x| x.can_be_seen_by?(@current_user) }
end
end
end
end
def select
@post_id = params[:post_id].to_i
@pools = Pool.where(:is_active => true)
if @current_user.is_anonymous?
@pools = @pools.where(:is_public => true)
else
@pools = @pools.where("is_public = TRUE OR user_id = ?", @current_user.id)
end
@pools = @pools.order(:name)
render :layout => false
end
# Generate a ZIP control file for nginx, and redirect to the ZIP.
if CONFIG["pool_zips"]
def zip
# FIXME: should use the correct mime type instead of this hackery.
Rack::MiniProfiler.deauthorize_request if Rails.env.development?
pool = Pool.includes(:pool_posts => :post).find(params[:id])
@pool_zip = pool.get_zip_data(params)
headers["X-Archive-Files"] = "zip"
render :layout => false
end
end
def transfer_metadata
@to = Pool.find(params[:to])
unless params[:from]
@from = nil
return
end
@from = Pool.find(params[:from])
from_posts = @from.pool_posts
to_posts = @to.pool_posts
if from_posts.length == to_posts.length
@truncated = false
else
@truncated = true
min_posts = [from_posts.length, to_posts.length].min
from_posts = from_posts.slice(0, min_posts)
to_posts = to_posts.slice(0, min_posts)
end
@posts = []
from_posts.each_index do |idx|
data = {}
from = from_posts[idx].post
to = to_posts[idx].post
data[:from] = from
data[:to] = to
tags = from.tags.split(" ")
if from.rating != to.rating
tags << "rating:%s" % to.rating
end
if from.is_shown_in_index != to.is_shown_in_index
tags << (from.is_shown_in_index ? "show" : "hide")
end
if from.parent_id != to.id
tags << "child:%i" % from.id
end
data[:tags] = tags.join(" ")
@posts << data
end
end
private
def pool_params
params.require(:pool).permit(:name, :description, :is_public, :is_active)
end
end
Only return html format for pool#order
class PoolController < ApplicationController
layout "default"
before_action :member_only, :only => [:destroy, :update, :add_post, :remove_post, :import, :zip]
before_action :post_member_only, :only => [:create]
before_action :contributor_only, :only => [:copy, :transfer_metadata]
helper :post
def index
@pools = Pool.all
options = {
:per_page => 20,
:page => page_number
}
order = params[:order]
search_tokens = []
if params[:query]
begin
query = params[:query].shellsplit
# Will raise error if not a valid shell-quoted string (unbalanced quotes).
# Use plain split instead.
rescue ArgumentError
query = params[:query].split
end
query.each do |token|
if token =~ /^(order|limit|posts):(.+)$/
if Regexp.last_match[1] == "order"
order = Regexp.last_match[2]
elsif Regexp.last_match[1] == "limit"
options[:per_page] = Regexp.last_match[2].to_i
options[:per_page] = [options[:per_page], 100].min
elsif Regexp.last_match[1] == "posts"
@pools = @pools.where(*Post.sql_range_for_where(Tag.parse_helper(Regexp.last_match[2]), "post_count"))
end
else
search_tokens << token
end
end
end
if search_tokens.any?
value_index_query = "(#{Array.wrap(search_tokens).map(&:to_escaped_for_tsquery).join(" & ")})"
@pools = @pools.where("search_index @@ to_tsquery('pg_catalog.english', ?)", value_index_query)
# If a search keyword contains spaces, then it was quoted in the search query
# and we should only match adjacent words. tsquery won't do this for us; we need
# to filter results where the words aren't adjacent.
#
# This has a side-effect: any stopwords, stemming, parsing, etc. rules performed
# by to_tsquery won't be done here. We need to perform the same processing as
# is used to generate search_index. We don't perform all of the stemming rules, so
# although "jump" may match "jumping", "jump beans" won't match "jumping beans" because
# we'll filter it out.
#
# This also doesn't perform tokenization, so some obscure cases won't match perfectly;
# for example, "abc def" will match "xxxabc def abc" when it probably shouldn't. Doing
# this more correctly requires Postgresql support that doesn't exist right now.
query.each do |q|
# Don't do this if there are no spaces in the query, so we don't turn off tsquery
# parsing when we don't need to.
next unless q.include?(" ")
@pools = @pools.where("position(LOWER(?) IN LOWER(replace_underscores(name))) > 0 OR position(LOWER(?) IN LOWER(description)) > 0", q, q)
end
end
if order.nil?
if search_tokens.empty?
order = "date"
else
order = "name"
end
end
order = case order
when "name" then "nat_sort(name) ASC"
when "date" then "created_at DESC"
when "updated" then "updated_at DESC"
when "id" then "id DESC"
else "created_at DESC"
end
@pools = @pools.order(order).paginate options
@samples = {}
@pools.each do |p|
post = p.get_sample
unless post then next end
@samples[p] = post
end
respond_to_list("pools", :atom => true)
end
def show
if params[:samples] == "0" then params.delete(:samples) end
begin
@pool = Pool.includes(:pool_posts => :post).find(params[:id])
rescue
flash[:notice] = t("c.pool.not_found", :id => params[:id].to_i)
redirect_to :action => :index
return
end
@browse_mode = @current_user.pool_browse_mode
q = Tag.parse_query("")
q[:pool] = params[:id].to_i
q[:show_deleted_only] = false
if @browse_mode == 1
q[:limit] = 1000
else
q[:limit] = 24
end
count = Post.count_by_sql(Post.generate_sql(q, :from_api => true, :count => true))
@posts = WillPaginate::Collection.new(page_number, q[:limit], count)
sql = Post.generate_sql(q, :from_api => true, :offset => @posts.offset, :limit => @posts.per_page)
@posts.replace(Post.find_by_sql(sql))
respond_to do |fmt|
fmt.html
fmt.xml do
builder = Builder::XmlMarkup.new(:indent => 2)
builder.instruct!
xml = @pool.to_xml(:builder => builder, :skip_instruct => true) do
builder.posts do
@posts.each do |post|
post.to_xml(:builder => builder, :skip_instruct => true)
end
end
end
render :xml => xml
end
fmt.json
end
end
def update
@pool = Pool.find(params[:id])
unless @pool.can_be_updated_by?(@current_user)
access_denied
return
end
if request.post?
@pool.update_attributes(pool_params)
respond_to_success("Pool updated", :action => "show", :id => params[:id])
end
end
def create
if request.post?
@pool = Pool.create(pool_params.merge(:user_id => @current_user.id))
if @pool.errors.empty?
respond_to_success("Pool created", :action => "show", :id => @pool.id)
else
respond_to_error(@pool, :action => "index")
end
else
@pool = Pool.new(:user_id => @current_user.id)
end
end
def copy
@old_pool = Pool.find_by_id(params[:id])
name = params[:name] || "#{@old_pool.name} (copy)"
@new_pool = Pool.new(:user_id => @current_user.id, :name => name, :description => @old_pool.description)
if request.post?
@new_pool.save
unless @new_pool.errors.empty?
respond_to_error(@new_pool, :action => "index")
return
end
@old_pool.pool_posts.each do |pp|
@new_pool.add_post(pp.post_id, :sequence => pp.sequence)
end
respond_to_success("Pool created", :action => "show", :id => @new_pool.id)
end
end
def destroy
@pool = Pool.find(params[:id])
if request.post?
if @pool.can_be_updated_by?(@current_user)
@pool.destroy
respond_to_success("Pool deleted", :action => "index")
else
access_denied
end
end
end
def add_post
if request.post?
@pool = Pool.find(params[:pool_id])
session[:last_pool_id] = @pool.id
if params[:pool] && !params[:pool][:sequence].blank?
sequence = params[:pool][:sequence]
else
sequence = nil
end
begin
@pool.add_post(params[:post_id], :sequence => sequence, :user => @current_user)
respond_to_success("Post added", :controller => "post", :action => "show", :id => params[:post_id])
rescue Pool::PostAlreadyExistsError
respond_to_error("Post already exists", { :controller => "post", :action => "show", :id => params[:post_id] }, :status => 423)
rescue Pool::AccessDeniedError
access_denied
rescue => x
respond_to_error(x.class, :controller => "post", :action => "show", :id => params[:post_id])
end
else
@pools = Pool.where(:is_active => true)
if @current_user.is_anonymous?
@pools = @pools.where(:is_public => true)
else
@pools = @pools.where("is_public = TRUE OR user_id = ?", @current_user.id)
end
@pools = @pools.order(:name)
@post = Post.find(params[:post_id])
end
end
def remove_post
if request.post?
@pool = Pool.find(params[:pool_id])
post = Post.find(params[:post_id])
begin
@pool.remove_post(params[:post_id], :user => @current_user)
rescue Pool::AccessDeniedError
access_denied
return
end
api_data = Post.batch_api_data([post])
response.headers["X-Post-Id"] = params[:post_id]
respond_to_success("Post removed", { :controller => "post", :action => "show", :id => params[:post_id] }, :api => api_data)
else
@pool = Pool.find(params[:pool_id])
@post = Post.find(params[:post_id])
end
end
def order
@pool = Pool.find(params[:id])
unless @pool.can_be_updated_by?(@current_user)
access_denied
return
end
if request.post?
PoolPost.transaction do
params.fetch(:pool_post_sequence, []).each do |i, seq|
PoolPost.update(i, :sequence => seq)
end
@pool.reload
@pool.update_pool_links
end
flash[:notice] = "Ordering updated"
redirect_to :action => "show", :id => params[:id]
else
@pool_posts = @pool.pool_posts
end
respond_to do |format|
format.html
end
end
def import
@pool = Pool.find(params[:id])
unless @pool.can_be_updated_by?(@current_user)
access_denied
return
end
if request.post?
if params[:posts].is_a?(Hash)
ordered_posts = params[:posts].sort { |a, b| a[1] <=> b[1] }.map { |a| a[0] }
PoolPost.transaction do
ordered_posts.each do |post_id|
begin
@pool.add_post(post_id, :skip_update_pool_links => true)
rescue Pool::PostAlreadyExistsError
# ignore
end
end
@pool.update_pool_links
end
end
redirect_to :action => "show", :id => @pool.id
else
respond_to do |fmt|
fmt.html
fmt.js do
@posts = Post.find_by_tags(params[:query], :limit => 500)
@posts = @posts.select { |x| x.can_be_seen_by?(@current_user) }
end
end
end
end
def select
@post_id = params[:post_id].to_i
@pools = Pool.where(:is_active => true)
if @current_user.is_anonymous?
@pools = @pools.where(:is_public => true)
else
@pools = @pools.where("is_public = TRUE OR user_id = ?", @current_user.id)
end
@pools = @pools.order(:name)
render :layout => false
end
# Generate a ZIP control file for nginx, and redirect to the ZIP.
if CONFIG["pool_zips"]
def zip
# FIXME: should use the correct mime type instead of this hackery.
Rack::MiniProfiler.deauthorize_request if Rails.env.development?
pool = Pool.includes(:pool_posts => :post).find(params[:id])
@pool_zip = pool.get_zip_data(params)
headers["X-Archive-Files"] = "zip"
render :layout => false
end
end
def transfer_metadata
@to = Pool.find(params[:to])
unless params[:from]
@from = nil
return
end
@from = Pool.find(params[:from])
from_posts = @from.pool_posts
to_posts = @to.pool_posts
if from_posts.length == to_posts.length
@truncated = false
else
@truncated = true
min_posts = [from_posts.length, to_posts.length].min
from_posts = from_posts.slice(0, min_posts)
to_posts = to_posts.slice(0, min_posts)
end
@posts = []
from_posts.each_index do |idx|
data = {}
from = from_posts[idx].post
to = to_posts[idx].post
data[:from] = from
data[:to] = to
tags = from.tags.split(" ")
if from.rating != to.rating
tags << "rating:%s" % to.rating
end
if from.is_shown_in_index != to.is_shown_in_index
tags << (from.is_shown_in_index ? "show" : "hide")
end
if from.parent_id != to.id
tags << "child:%i" % from.id
end
data[:tags] = tags.join(" ")
@posts << data
end
end
private
def pool_params
params.require(:pool).permit(:name, :description, :is_public, :is_active)
end
end
|
require "slimmer/headers"
class RecordNotFound < Exception
end
class RootController < ApplicationController
include Rack::Geo::Utils
include RootHelper
include ActionView::Helpers::TextHelper
include ArtefactHelpers
rescue_from GdsApi::TimedOutException, with: :error_503
rescue_from GdsApi::EndpointNotFound, with: :error_503
def index
set_expiry
set_slimmer_headers(template: "homepage")
# Only needed for Analytics
set_slimmer_dummy_artefact(:section_name => "homepage", :section_url => "/")
end
def publication
error_406 and return if request.format.nil?
if params[:slug] == 'done' and !params[:part].blank?
params[:slug] += '/' + params[:part]
params[:part] = nil
end
@publication = fetch_publication(params)
assert_found(@publication)
@artefact = fetch_artefact
set_slimmer_artefact_headers(@artefact)
case @publication.type
when "place"
set_expiry if params.exclude?('edition') and request.get?
@options = load_place_options(@publication)
@publication.places = @options
when "local_transaction"
@council = load_council(@publication, params[:edition])
@publication.council = @council
when "programme"
params[:part] ||= @publication.parts.first.slug
else
set_expiry if params.exclude?('edition')
end
if @publication.parts
@part = @publication.find_part(params[:part])
end
if video_requested_but_not_found? || part_requested_but_not_found? || empty_part_list?
raise RecordNotFound
elsif @publication.parts && treat_as_standard_html_request? && @part.nil?
params.delete(:slug)
params.delete(:part)
redirect_to publication_url(@publication.slug, @publication.parts.first.slug, params) and return
end
@edition = params[:edition]
instance_variable_set("@#{@publication.type}".to_sym, @publication)
respond_to do |format|
format.html do
if @publication.type == "local_transaction"
@not_found = false
if @council.present? && @council[:url]
redirect_to @council[:url] and return
elsif council_from_geostack.any?
@not_found = true
end
end
render @publication.type
end
format.video do
render @publication.type, layout: "application.html.erb"
end
format.print do
set_slimmer_headers template: "print"
render @publication.type
end
format.json do
render :json => @publication.to_json
end
end
rescue RecordNotFound
set_expiry
error 404
end
def settings
respond_to do |format|
format.html {}
format.raw {
set_slimmer_headers skip: "true"
render 'settings.html.erb'
}
end
end
protected
def fetch_artefact
artefact = content_api.artefact(params[:slug])
unless artefact
logger.warn("Failed to fetch artefact #{params[:slug]} from Content API. Response code: 404")
end
rescue GdsApi::HTTPErrorResponse => e
logger.warn("Failed to fetch artefact from Content API. Response code: #{e.code}")
ensure
return artefact || artefact_unavailable
end
def empty_part_list?
@publication.parts and @publication.parts.empty?
end
def part_requested_but_not_found?
params[:part] && @publication.parts.blank?
end
def video_requested_but_not_found?
request.format.video? && @publication.video_url.blank?
end
# request.format.html? returns 5 when the request format is video.
def treat_as_standard_html_request?
!request.format.json? and !request.format.print? and !request.format.video?
end
def load_place_options(publication)
if geo_known_to_at_least?('ward')
places = imminence_api.places(publication.place_type, geo_header['fuzzy_point']['lat'], geo_header['fuzzy_point']['lon'])
places.each_with_index {|place,i| places[i]['text'] = places[i]['url'].truncate(36) if places[i]['url'].present? }
places
else
[]
end
end
def load_council(local_transaction, edition = nil)
councils = council_from_geostack
basic_params = {slug: local_transaction.slug}
basic_params[:edition] = edition if edition
unless councils.any?
return false
else
providers = councils.map do |council_ons_code|
local_transaction = fetch_publication(basic_params.merge(snac: council_ons_code))
build_local_transaction_information(local_transaction) if local_transaction
end
providers.compact!
provider = providers.select {|council| council[:url] }.first
if provider
provider
else
providers.select {|council| council[:name]}.first
end
end
end
def build_local_transaction_information(local_transaction)
result = {url: nil}
if local_transaction.interaction
result[:url] = local_transaction.interaction.url
# DEPRECATED: authority is not located inside the interaction in the latest version
# of publisher. This is here for backwards compatibility.
if local_transaction.interaction.authority
result.merge!(build_authority_contact_information(local_transaction.interaction.authority))
end
# END DEPRECATED SECTION
end
if local_transaction.authority
result.merge!(build_authority_contact_information(local_transaction.authority))
end
result
end
def build_authority_contact_information(authority)
{
name: authority.name,
contact_address: authority.contact_address,
contact_url: authority.contact_url,
contact_phone: authority.contact_phone,
contact_email: authority.contact_email
}
end
def fetch_publication(params)
options = {
edition: params[:edition],
snac: params[:snac]
}.reject { |k, v| v.blank? }
publisher_api.publication_for_slug(params[:slug], options)
rescue ArgumentError
logger.error "invalid UTF-8 byte sequence with slug `#{params[:slug]}`"
return false
rescue URI::InvalidURIError
logger.error "Invalid URI formed with slug `#{params[:slug]}`"
return false
end
def council_from_geostack
if params['council_ons_codes']
return params['council_ons_codes']
end
if !request.env['HTTP_X_GOVGEO_STACK']
return []
end
location_data = decode_stack(request.env['HTTP_X_GOVGEO_STACK'])
if location_data['council']
location_data['council'].compact.map {|c| c['ons']}.compact
else
return []
end
end
def assert_found(obj)
raise RecordNotFound unless obj
end
def set_slimmer_artefact_headers(artefact)
set_slimmer_headers(format: artefact["format"])
set_slimmer_artefact(artefact)
end
def set_expiry
unless Rails.env.development?
expires_in(60.minutes, :public => true)
end
end
end
Make done logic more readable
require "slimmer/headers"
class RecordNotFound < Exception
end
class RootController < ApplicationController
include Rack::Geo::Utils
include RootHelper
include ActionView::Helpers::TextHelper
include ArtefactHelpers
rescue_from GdsApi::TimedOutException, with: :error_503
rescue_from GdsApi::EndpointNotFound, with: :error_503
def index
set_expiry
set_slimmer_headers(template: "homepage")
# Only needed for Analytics
set_slimmer_dummy_artefact(:section_name => "homepage", :section_url => "/")
end
def publication
error_406 and return if request.format.nil?
if params[:slug] == 'done' and params[:part].present?
params[:slug] += "/#{params[:part]}"
params[:part] = nil
end
@publication = fetch_publication(params)
assert_found(@publication)
@artefact = fetch_artefact
set_slimmer_artefact_headers(@artefact)
case @publication.type
when "place"
set_expiry if params.exclude?('edition') and request.get?
@options = load_place_options(@publication)
@publication.places = @options
when "local_transaction"
@council = load_council(@publication, params[:edition])
@publication.council = @council
when "programme"
params[:part] ||= @publication.parts.first.slug
else
set_expiry if params.exclude?('edition')
end
if @publication.parts
@part = @publication.find_part(params[:part])
end
if video_requested_but_not_found? || part_requested_but_not_found? || empty_part_list?
raise RecordNotFound
elsif @publication.parts && treat_as_standard_html_request? && @part.nil?
params.delete(:slug)
params.delete(:part)
redirect_to publication_url(@publication.slug, @publication.parts.first.slug, params) and return
end
@edition = params[:edition]
instance_variable_set("@#{@publication.type}".to_sym, @publication)
respond_to do |format|
format.html do
if @publication.type == "local_transaction"
@not_found = false
if @council.present? && @council[:url]
redirect_to @council[:url] and return
elsif council_from_geostack.any?
@not_found = true
end
end
render @publication.type
end
format.video do
render @publication.type, layout: "application.html.erb"
end
format.print do
set_slimmer_headers template: "print"
render @publication.type
end
format.json do
render :json => @publication.to_json
end
end
rescue RecordNotFound
set_expiry
error 404
end
def settings
respond_to do |format|
format.html {}
format.raw {
set_slimmer_headers skip: "true"
render 'settings.html.erb'
}
end
end
protected
def fetch_artefact
artefact = content_api.artefact(params[:slug])
unless artefact
logger.warn("Failed to fetch artefact #{params[:slug]} from Content API. Response code: 404")
end
rescue GdsApi::HTTPErrorResponse => e
logger.warn("Failed to fetch artefact from Content API. Response code: #{e.code}")
ensure
return artefact || artefact_unavailable
end
def empty_part_list?
@publication.parts and @publication.parts.empty?
end
def part_requested_but_not_found?
params[:part] && @publication.parts.blank?
end
def video_requested_but_not_found?
request.format.video? && @publication.video_url.blank?
end
# request.format.html? returns 5 when the request format is video.
def treat_as_standard_html_request?
!request.format.json? and !request.format.print? and !request.format.video?
end
def load_place_options(publication)
if geo_known_to_at_least?('ward')
places = imminence_api.places(publication.place_type, geo_header['fuzzy_point']['lat'], geo_header['fuzzy_point']['lon'])
places.each_with_index {|place,i| places[i]['text'] = places[i]['url'].truncate(36) if places[i]['url'].present? }
places
else
[]
end
end
def load_council(local_transaction, edition = nil)
councils = council_from_geostack
basic_params = {slug: local_transaction.slug}
basic_params[:edition] = edition if edition
unless councils.any?
return false
else
providers = councils.map do |council_ons_code|
local_transaction = fetch_publication(basic_params.merge(snac: council_ons_code))
build_local_transaction_information(local_transaction) if local_transaction
end
providers.compact!
provider = providers.select {|council| council[:url] }.first
if provider
provider
else
providers.select {|council| council[:name]}.first
end
end
end
def build_local_transaction_information(local_transaction)
result = {url: nil}
if local_transaction.interaction
result[:url] = local_transaction.interaction.url
# DEPRECATED: authority is not located inside the interaction in the latest version
# of publisher. This is here for backwards compatibility.
if local_transaction.interaction.authority
result.merge!(build_authority_contact_information(local_transaction.interaction.authority))
end
# END DEPRECATED SECTION
end
if local_transaction.authority
result.merge!(build_authority_contact_information(local_transaction.authority))
end
result
end
def build_authority_contact_information(authority)
{
name: authority.name,
contact_address: authority.contact_address,
contact_url: authority.contact_url,
contact_phone: authority.contact_phone,
contact_email: authority.contact_email
}
end
def fetch_publication(params)
options = {
edition: params[:edition],
snac: params[:snac]
}.reject { |k, v| v.blank? }
publisher_api.publication_for_slug(params[:slug], options)
rescue ArgumentError
logger.error "invalid UTF-8 byte sequence with slug `#{params[:slug]}`"
return false
rescue URI::InvalidURIError
logger.error "Invalid URI formed with slug `#{params[:slug]}`"
return false
end
def council_from_geostack
if params['council_ons_codes']
return params['council_ons_codes']
end
if !request.env['HTTP_X_GOVGEO_STACK']
return []
end
location_data = decode_stack(request.env['HTTP_X_GOVGEO_STACK'])
if location_data['council']
location_data['council'].compact.map {|c| c['ons']}.compact
else
return []
end
end
def assert_found(obj)
raise RecordNotFound unless obj
end
def set_slimmer_artefact_headers(artefact)
set_slimmer_headers(format: artefact["format"])
set_slimmer_artefact(artefact)
end
def set_expiry
unless Rails.env.development?
expires_in(60.minutes, :public => true)
end
end
end
|
class RunsController < ApplicationController
before_action :set_run, only: [:show, :edit, :update, :destroy, :delete]
# GET /runs
# GET /runs.json
def index
@runs = Run.all
end
# GET /runs/1
# GET /runs/1.json
def show
end
# GET /runs/new
def new
@run = Run.new
end
# GET /runs/1/edit
def edit
end
# POST /runs
# POST /runs.json
def create
@run = Run.new(run_params)
respond_to do |format|
if @run.save
#createAds
end
format.html { redirect_to runs_path, notice: 'Run was successfully created.' }
format.json { render :index, status: :created, location: @run }
else
format.html { render :new }
format.json { render json: @run.errors, status: :unprocessable_entity }
end
end
end
=begin
def createAds
# creating your own ads for that run
@run.ownads.times
Ad.create(:company_id => @run.campaign.company, :run_id => @run.id, :selfpromotion => true)
end
# creating other people ads for that run
(@run.runprintnumber - @run.ownads).times do
campaigns = Campaign.where.not(:company_id => run_params.company_id).rewhere(:campaigntype_id => @run.campaign.campaigntype_id).order(:created_at)
# you calculated the visitratio, use it to order the campaigns
campaigns.order(:visitratio).reverse_order
Ad.create(:company_id => , :run_id => @run.id, :selfpromotion => false)
# add field for taking count of this number of adds in the campaing.adsreceived
end
end
=end
# PATCH/PUT /runs/1
# PATCH/PUT /runs/1.json
def update
respond_to do |format|
if @run.update(run_params)
format.html { redirect_to @run, notice: 'Run was successfully updated.' }
format.json { render :show, status: :ok, location: @run }
else
format.html { render :edit }
format.json { render json: @run.errors, status: :unprocessable_entity }
end
end
end
def delete
end
# DELETE /runs/1
# DELETE /runs/1.json
def destroy
@run.destroy
respond_to do |format|
format.html { redirect_to runs_url, notice: 'Run was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_run
@run = Run.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def run_params
params.require(:run).permit(:campaign_id, :runprintnumber, :ownads)
end
end
heroku
class RunsController < ApplicationController
before_action :set_run, only: [:show, :edit, :update, :destroy, :delete]
# GET /runs
# GET /runs.json
def index
@runs = Run.all
end
# GET /runs/1
# GET /runs/1.json
def show
end
# GET /runs/new
def new
@run = Run.new
end
# GET /runs/1/edit
def edit
end
# POST /runs
# POST /runs.json
def create
@run = Run.new(run_params)
respond_to do |format|
if @run.save
#createAds
#end
format.html { redirect_to runs_path, notice: 'Run was successfully created.' }
format.json { render :index, status: :created, location: @run }
else
format.html { render :new }
format.json { render json: @run.errors, status: :unprocessable_entity }
end
end
end
=begin
def createAds
# creating your own ads for that run
@run.ownads.times
Ad.create(:company_id => @run.campaign.company, :run_id => @run.id, :selfpromotion => true)
end
# creating other people ads for that run
(@run.runprintnumber - @run.ownads).times do
campaigns = Campaign.where.not(:company_id => run_params.company_id).rewhere(:campaigntype_id => @run.campaign.campaigntype_id).order(:created_at)
# you calculated the visitratio, use it to order the campaigns
campaigns.order(:visitratio).reverse_order
Ad.create(:company_id => , :run_id => @run.id, :selfpromotion => false)
# add field for taking count of this number of adds in the campaing.adsreceived
end
end
=end
# PATCH/PUT /runs/1
# PATCH/PUT /runs/1.json
def update
respond_to do |format|
if @run.update(run_params)
format.html { redirect_to @run, notice: 'Run was successfully updated.' }
format.json { render :show, status: :ok, location: @run }
else
format.html { render :edit }
format.json { render json: @run.errors, status: :unprocessable_entity }
end
end
end
def delete
end
# DELETE /runs/1
# DELETE /runs/1.json
def destroy
@run.destroy
respond_to do |format|
format.html { redirect_to runs_url, notice: 'Run was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_run
@run = Run.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def run_params
params.require(:run).permit(:campaign_id, :runprintnumber, :ownads)
end
end
|
class RunsController < ApplicationController
# Call back
skip_before_action :verify_authenticity_token
before_action :validate_api_format, only: [:create, :update, :delete]
before_action :valid_run_params_for_create, only: [:create]
before_action :valid_run_params_for_show, only: [:show]
before_action :validate_order_params_for_submit, only: [:submit]
# POST /runs/create
# POST /runs/create.json
def create
# Create new run
run = Run.new(run_params)
# Raise error if can not save
raise CoffeeRunError.new("Can not create run") unless run.save
# Generate shortener url
bitly = Bitly.client.shorten(CONFIG["coffee_run_url"] + runs_show_path(no: run.no))
raise CoffeeRunError.new("Can not generate shortener url") if bitly.nil? || bitly.short_url.blank?
# TODO: Perform worker
# UpdateRunningTimeWorker.perform_async(run.running_time * 60, run.no)
render json: { "ok": true, "url": bitly.short_url }, status: :ok
end
# POST /runs/update
# POST /runs/update.json
def update
end
# POST /runs/delete
# POST /runs/delete.json
def delete
end
# GET /runs/show
def show
get_existed_run and return
gon.no = @run.no
get_running_time
gon.running_time = @run.expired_at.strftime("%Y/%m/%d %H:%M:%S")
gon.ordered = (@running_time == 0) ? true : false
get_orderers
end
# GET /runs/runned_list
def runned_list
if request.xhr?
get_existed_run
get_orderers
get_running_time
render :partial => "runned_list", :content_type => "text/html"
else
# Do nothing
end
end
# GET /runs/running_list
def running_list
if request.xhr?
get_existed_run
get_orderers
get_running_time
render :partial => "running_list", :content_type => "text/html"
else
# Do nothing
end
end
# POST /runs/submit
# POST /runs/submit.json
def submit
if request.xhr?
get_existed_run
raise CoffeeRunError.new("Can not submit order") unless @run.orderers.create(order_params)
get_orderers
Pusher["private-runs_channel-#{@run.no}"].trigger("add_order", {
id: session[:authorize_token],
name: params[:order][:name],
beverage: params[:order][:beverage]
})
session[:ordered] = "true"
get_running_time
render :partial => "runned_list", :content_type => "text/html"
else
# Do nothing
end
end
private
def run_params
params.require(:run).permit(:runner, :running_time, :cups, :expired_at)
end
def order_params
params.require(:order).permit(:name, :beverage)
end
def get_existed_run
@run = Run.find_by_no(params[:no]).to_a
if action_name == "show"
redirect_to "/404" and return true
else
raise CoffeeRunError.new("Run not found") if @run.empty?
end
@run = @run.first
end
def get_orderers
@orderers = @run.orderers.all.to_a
end
def valid_run_params_for_create
raise CoffeeRunError.new("Missing or unvalid parameters") if params[:run].blank? || params[:run][:runner].blank? || params[:run][:running_time].blank? || params[:run][:cups].blank?
params[:run][:expired_at] = Time.now + params[:run][:running_time].to_i.minutes
end
def valid_run_params_for_show
# raise CoffeeRunError.new("Missing or unvalid parameters") if params[:no].blank?
redirect_to "/404" if params[:no].blank?
end
def validate_order_params_for_submit
invalid = false
if params[:order].blank? || params[:order][:name].blank? || params[:order][:beverage].blank?
invalid = true
else
invalid = isGibberish(params[:order][:beverage])
end
raise CoffeeRunError.new("Missing or unvalid parameters") if invalid
end
def isGibberish beverage
v = 1
c = 1
gibberish = false
if !beverage.blank?
len = beverage.length - 1
(0..len).each do |i|
if beverage[i].match /[aeiou]/i
v += 1
elsif beverage[i].match /[bcdfghjklmnpqrstvwxyz]/i
c += 1
end
end
ratio = v * 1.0 /(c + v)
if ratio < 0.3 || ratio > 0.6
gibberish = true
end
end
return gibberish
end
def get_running_time
current_time = Time.now.utc
if session[:ordered] == "true"
@running_time = 0
else
@running_time = (@run.expired_at > current_time) ? (@run.expired_at - current_time).round : 0
end
end
end
Fix bug: forgot check if run empty on show
class RunsController < ApplicationController
# Call back
skip_before_action :verify_authenticity_token
before_action :validate_api_format, only: [:create, :update, :delete]
before_action :valid_run_params_for_create, only: [:create]
before_action :valid_run_params_for_show, only: [:show]
before_action :validate_order_params_for_submit, only: [:submit]
# POST /runs/create
# POST /runs/create.json
def create
# Create new run
run = Run.new(run_params)
# Raise error if can not save
raise CoffeeRunError.new("Can not create run") unless run.save
# Generate shortener url
bitly = Bitly.client.shorten(CONFIG["coffee_run_url"] + runs_show_path(no: run.no))
raise CoffeeRunError.new("Can not generate shortener url") if bitly.nil? || bitly.short_url.blank?
# TODO: Perform worker
# UpdateRunningTimeWorker.perform_async(run.running_time * 60, run.no)
render json: { "ok": true, "url": bitly.short_url }, status: :ok
end
# POST /runs/update
# POST /runs/update.json
def update
end
# POST /runs/delete
# POST /runs/delete.json
def delete
end
# GET /runs/show
def show
get_existed_run and return
gon.no = @run.no
get_running_time
gon.running_time = @run.expired_at.strftime("%Y/%m/%d %H:%M:%S")
gon.ordered = (@running_time == 0) ? true : false
get_orderers
end
# GET /runs/runned_list
def runned_list
if request.xhr?
get_existed_run
get_orderers
get_running_time
render :partial => "runned_list", :content_type => "text/html"
else
# Do nothing
end
end
# GET /runs/running_list
def running_list
if request.xhr?
get_existed_run
get_orderers
get_running_time
render :partial => "running_list", :content_type => "text/html"
else
# Do nothing
end
end
# POST /runs/submit
# POST /runs/submit.json
def submit
if request.xhr?
get_existed_run
raise CoffeeRunError.new("Can not submit order") unless @run.orderers.create(order_params)
get_orderers
Pusher["private-runs_channel-#{@run.no}"].trigger("add_order", {
id: session[:authorize_token],
name: params[:order][:name],
beverage: params[:order][:beverage]
})
session[:ordered] = "true"
get_running_time
render :partial => "runned_list", :content_type => "text/html"
else
# Do nothing
end
end
private
def run_params
params.require(:run).permit(:runner, :running_time, :cups, :expired_at)
end
def order_params
params.require(:order).permit(:name, :beverage)
end
def get_existed_run
@run = Run.find_by_no(params[:no]).to_a
if @run.empty?
if action_name == "show"
redirect_to "/404" and return true
else
raise CoffeeRunError.new("Run not found")
end
end
@run = @run.first
end
def get_orderers
@orderers = @run.orderers.all.to_a
end
def valid_run_params_for_create
raise CoffeeRunError.new("Missing or unvalid parameters") if params[:run].blank? || params[:run][:runner].blank? || params[:run][:running_time].blank? || params[:run][:cups].blank?
params[:run][:expired_at] = Time.now + params[:run][:running_time].to_i.minutes
end
def valid_run_params_for_show
# raise CoffeeRunError.new("Missing or unvalid parameters") if params[:no].blank?
redirect_to "/404" if params[:no].blank?
end
def validate_order_params_for_submit
invalid = false
if params[:order].blank? || params[:order][:name].blank? || params[:order][:beverage].blank?
invalid = true
else
invalid = isGibberish(params[:order][:beverage])
end
raise CoffeeRunError.new("Missing or unvalid parameters") if invalid
end
def isGibberish beverage
v = 1
c = 1
gibberish = false
if !beverage.blank?
len = beverage.length - 1
(0..len).each do |i|
if beverage[i].match /[aeiou]/i
v += 1
elsif beverage[i].match /[bcdfghjklmnpqrstvwxyz]/i
c += 1
end
end
ratio = v * 1.0 /(c + v)
if ratio < 0.3 || ratio > 0.6
gibberish = true
end
end
return gibberish
end
def get_running_time
current_time = Time.now.utc
if session[:ordered] == "true"
@running_time = 0
else
@running_time = (@run.expired_at > current_time) ? (@run.expired_at - current_time).round : 0
end
end
end
|
# coding: utf-8
class SiteController < ApplicationController
def index
if session[:uid].nil?
if request.env['omniauth.auth'].nil?
redirect_to '/auth/wechat'
return
end
session[:uid] = request.env['omniauth.auth'][:uid]
end
@uid = session[:uid]
#@uid = 'asdfgh'
@customer = Customer.find_by wechat_id: @uid
if @customer.nil?
@customer = Customer.create(:wechat_id => @uid)
end
@products = Product.all
end
def order
@uid = session[:uid]
#@uid = 'asdfgh'
@orders = Order.where(:wechat_id=>@uid).all
@orders.each do |value|
#byebug
@context_hash = ActiveSupport::JSON.decode(value.context);
value.context = '';
@context_hash.each do |k, v|
product = Product.find(k)
value.context+=product.name+'x'+v+' '
end
end
end
end
update
# coding: utf-8
class SiteController < ApplicationController
def index
if session[:uid].nil?
if request.env['omniauth.auth'].nil?
redirect_to '/auth/wechat'
return
end
session[:uid] = request.env['omniauth.auth'][:uid]
end
@uid = session[:uid]
#@uid = 'asdfgh'
@customer = Customer.find_by wechat_id: @uid
if @customer.nil?
@customer = Customer.create(:wechat_id => @uid, :score=>0)
end
@products = Product.all
end
def order
@uid = session[:uid]
#@uid = 'asdfgh'
@orders = Order.where(:wechat_id=>@uid).all
@orders.each do |value|
#byebug
@context_hash = ActiveSupport::JSON.decode(value.context);
value.context = '';
@context_hash.each do |k, v|
product = Product.find(k)
value.context+=product.name+'x'+v+' '
end
end
end
end
|
class QsoIdVersion < WashOut::Type
map :version => :integer,
:updatedBy => :string,
:id => :string
type_name 'QsoIdVersion'
attr_accessor :id, :version, :updatedBy
end
class SoapQso < WashOut::Type
map :time64H => :integer,
:time64L => :integer,
:xmitFreq => :double,
:recvFreq => :double,
:band => :integer,
:station => :string,
:mode => :integer,
:dupe => :integer,
:serial => :integer,
:qsoparts => [:string],
:version => :integer,
:idKey => :string,
:updatedBy => :string
type_name 'Qso'
end
class RigFrequency < WashOut::Type
map :station => :string,
:networkLetter => :integer,
:label => :string,
:rigNumber => :integer,
:xmitFreq => :double,
:recvFreq => :double,
:mode => :integer
type_name 'RigFrequency'
end
class ArrayOfQso < WashOut::Type
map :Qso => [SoapQso]
type_name 'ArrayOfQso'
end
class ArrayOfQsoIdVersion < WashOut::Type
map :elements => [QsoIdVersion]
type_name 'ArrayOfQsoIdVersion'
attr_accessor :elements
namespace 'contest25'
end
class ArrayOfint < WashOut::Type
map :int => [:integer]
type_name 'ArrayOfint'
attr_accessor :int
end
class ArrayOfstring < WashOut::Type
map :string => [:string]
type_name 'ArrayOfstring'
attr_accessor :string
end
class ArrayOfRigFrequency < WashOut::Type
map :elements => [RigFrequency]
type_name 'ArrayOfRigFrequency'
attr_accessor :elements
end
class QsoUpdate < WashOut::Type
map :qsoArray => ArrayOfQso,
:logState => :integer
type_name 'QsoUpdate'
attr_accessor :qsoArray, :logState
end
class LogSummary < WashOut::Type
map :logState => :integer,
:logSummaryIds => ArrayOfQsoIdVersion
type_name 'LogSummary'
namespace 'contest25'
end
class GetQsosByKeyArrayResult < WashOut::Type
map :value => LogSummary
type_name 'getQsosByKeyArrayResult'
end
class SoapController < ApplicationController
soap_service namespace: 'urn:ContestQsos2',
soap_action_routing: false,
additional_namespaces: {"contest25" => 'http://schemas.datacontract.org/2004/07/ContestQsos' }
soap_action 'GetSessionId',
:args => nil,
:return => { 'tns:GetSessionIdResult' => :string },
:response_tag => 'GetSessionIdResponse',
:to => :get_session_id
def get_session_id
render :soap => { 'tns:GetSessionIdResult' => '123456' }
end
soap_action 'ColumnNamesToIndices',
:args => { :SessionId => :string, :ColumnNames => ArrayOfstring },
:return => { 'tns:ColumnNamesToIndicesResult' => ArrayOfint },
:response_tag => 'ColumnNamesToIndicesResponse',
:to => :column_names_to_indices
def column_names_to_indices
session_id = params[:SessionId]
column_names = params[:ColumnNames]
puts column_names
column_names = column_names[:string]
column_indices = []
i = 4
column_names.each do |column_name|
case column_name
when 'APP_WRITELOG_RCV'
column_indices.push 0
when 'ARRL_SECT'
column_indices.push 1
when 'APP_WRITELOG_C'
column_indices.push 2
when 'APP_WRITELOG_PREF'
column_indices.push 3
else
column_indices.push i
i += 1
end
end
render :soap => { 'tns:ColumnNamesToIndicesResult' => { :int => column_indices } }
end
soap_action 'AddAndGetLogSummary',
:args => { :SessionId => :string, :QsoAddArray => ArrayOfQso, :OldState => :integer, :MaxRequested => :integer },
:return => { 'tns:AddAndGetLogSummaryResult' => LogSummary },
:response_tag => 'AddAndGetLogSummaryResponse',
:to => :add_and_get_log_summary
def add_and_get_log_summary2
render file: 'soap/add_and_get_log_summary.xml.erb', content_type: 'text/xml'
end
def add_qsos(new_qsos)
new_qsos.each do |qso|
puts qso
new_qso = Qso.new
new_qso.time_upper = qso[:time64H]
new_qso.time_lower = qso[:time64L]
new_qso.transmit_frequency = qso[:xmitFreq]
new_qso.receive_frequency = qso[:recvFreq]
new_qso.band = qso[:band]
new_qso.station = qso[:station]
new_qso.mode = qso[:mode]
new_qso.dupe = qso[:dupe]
new_qso.serial = qso[:serial]
new_qso.version = qso[:version]
new_qso.id_key = qso[:idKey]
new_qso.updated_by = qso[:updatedBy]
qso_parts = qso[:qsoparts]
new_qso.operating_class = qso_parts[0]
new_qso.section = qso_parts[1]
new_qso.c_field = qso_parts[2]
new_qso.country_prefix = qso_parts[3]
new_qso.save
end
end
def add_and_get_log_summary
old_log_state = params[:OldState]
new_qsos = params[:QsoAddArray][:Qso] || []
max_requested = params[:MaxRequested]
summary_ids = []
qsos_in_update = Qso.where('id > ?', old_log_state).limit(max_requested)
qsos_in_update.each do |qso|
summary_ids << { id: qso.id_key, version: qso.version, updatedBy: qso.updated_by }
end
add_qsos(new_qsos)
new_log_state = 0
new_log_state = Qso.last.id unless Qso.last.nil?
render :soap => { 'tns:AddAndGetLogSummaryResult' => { :logState => new_log_state, :logSummaryIds => { :elements => summary_ids } } }
end
soap_action 'addAndGetQsos',
:args => { :SessionId => :string, :QsoAddArray => ArrayOfQso, :OldState => :integer, :MaxRequested => :integer },
:return => { 'tns:AddAndGetQsoResult' => QsoUpdate },
:response_tag => 'AddAndGetQsoResponse',
:to => :add_and_get_qsos
def add_and_get_qsos
old_log_state = params[:OldState]
new_qsos = params[:QsoAddArray][:Qso] || []
max_requested = params[:MaxRequested]
qso_array = []
new_log_state = old_log_state
Qso.where('id > ?', old_log_state).limit(max_requested).each do |qso|
new_log_state = qso.id
qso_array << qso.to_soap_qso
end
last_log_added_id = add_qsos(new_qsos)
# We're up to date if we've given the client back all the
# qsos in the database, since their last update. This would
# not be the case if there are more qsos than their max_requested
# or another client inserted a record since our query
new_log_state = Qso.last.id if last_log_added_id == (new_log_state + new_qsos.length)
render :soap => { 'tns:AddAndGetQsoResult' => { :qsoArray => { :elements => qso_array }, :logState => new_log_state } }
end
soap_action 'getQsosByKeyArray',
:args => { :SessionId => :string, :QsoKeyarray => ArrayOfstring },
:return => { 'tns:getQsosByKeyArrayResult' => QsoUpdate },
:response_tag => 'getQsosByKeyArrayResponse',
:to => :get_qso_by_key_array
def get_qso_by_key_array
qso_keys = params[:QsoKeyarray] || []
qso_array = []
begin
qso_keys.each do |key|
qso = Qso.find_by! id_key: key
qso_array << qso.to_soap_qso
end
render :soap => { 'tns:getQsosByKeyArrayResult' => { :qsoArray => qso_array, :logState => log_state } }
rescue ActiveRecord::RecordNotFound
# TODO: throw a SOAP exception here
end
end
soap_action 'ExchangeFrequencies',
:args => { :IncomingFreqs => ArrayOfRigFrequency },
:return => { 'tns:ExchangeFrequenciesResult' => ArrayOfRigFrequency },
:response_tag => 'ExchangeFrequenciesResponse',
:to => :exchange_frequencies
def exchange_frequencies
rig_array_in = params[:IncomingFreqs][:elements] || []
rig_array_out = []
rig_array_in.each do |rig_info|
rig = Rig.where(letter: rig_info[:networkLetter]).where(rig_number: rig_info[:rigNumber]).first_or_create do |rig|
# rig.letter = rig_info[:networkLetter]
# rig.rig_number = rig_info[:rigNumber]
rig.station = rig_info[:station]
rig.label = rig_info[:label]
rig.mode = rig_info[:mode]
rig.transmit_frequency = rig_info[:xmitFreq]
rig.receive_frequency = rig_info[:recvFreq]
end
rig.save
end
Rig.where('updated_at < ?', 10.minutes.ago).delete_all
Rig.all.each do |rig|
rig_array_out << rig.to_soap_rig
end
render :soap => { 'tns:ExchangeFrequenciesResult' => { :rf_el => rig_array_out } }
end
end
Fix ColumnNamesToIndices response not handled correctly
class QsoIdVersion < WashOut::Type
map :version => :integer,
:updatedBy => :string,
:id => :string
type_name 'QsoIdVersion'
attr_accessor :id, :version, :updatedBy
end
class SoapQso < WashOut::Type
map :time64H => :integer,
:time64L => :integer,
:xmitFreq => :double,
:recvFreq => :double,
:band => :integer,
:station => :string,
:mode => :integer,
:dupe => :integer,
:serial => :integer,
:qsoparts => [:string],
:version => :integer,
:idKey => :string,
:updatedBy => :string
type_name 'Qso'
end
class RigFrequency < WashOut::Type
map :station => :string,
:networkLetter => :integer,
:label => :string,
:rigNumber => :integer,
:xmitFreq => :double,
:recvFreq => :double,
:mode => :integer
type_name 'RigFrequency'
end
class ArrayOfQso < WashOut::Type
map :Qso => [SoapQso]
type_name 'ArrayOfQso'
end
class ArrayOfQsoIdVersion < WashOut::Type
map :elements => [QsoIdVersion]
type_name 'ArrayOfQsoIdVersion'
attr_accessor :elements
namespace 'contest25'
end
class ArrayOfint < WashOut::Type
map 'contest26:int' => [:integer]
type_name 'ArrayOfint'
namespace 'contest26'
end
class ArrayOfstring < WashOut::Type
map :string => [:string]
type_name 'ArrayOfstring'
attr_accessor :string
end
class ArrayOfRigFrequency < WashOut::Type
map :elements => [RigFrequency]
type_name 'ArrayOfRigFrequency'
attr_accessor :elements
end
class QsoUpdate < WashOut::Type
map :qsoArray => ArrayOfQso,
:logState => :integer
type_name 'QsoUpdate'
attr_accessor :qsoArray, :logState
end
class LogSummary < WashOut::Type
map :logState => :integer,
:logSummaryIds => ArrayOfQsoIdVersion
type_name 'LogSummary'
namespace 'contest25'
end
class GetQsosByKeyArrayResult < WashOut::Type
map :value => LogSummary
type_name 'getQsosByKeyArrayResult'
end
class SoapController < ApplicationController
soap_service namespace: 'urn:ContestQsos2',
soap_action_routing: false,
additional_namespaces: {
'contest25' => 'http://schemas.datacontract.org/2004/07/ContestQsos',
'contest26' => 'http://schemas.microsoft.com/2003/10/Serialization/Arrays'
}
soap_action 'GetSessionId',
:args => nil,
:return => { 'tns:GetSessionIdResult' => :string },
:response_tag => 'GetSessionIdResponse',
:to => :get_session_id
def get_session_id
render :soap => { 'tns:GetSessionIdResult' => '123456' }
end
soap_action 'ColumnNamesToIndices',
:args => { :SessionId => :string, :ColumnNames => ArrayOfstring },
:return => { 'tns:ColumnNamesToIndicesResult' => ArrayOfint },
:response_tag => 'ColumnNamesToIndicesResponse',
:to => :column_names_to_indices
def column_names_to_indices
session_id = params[:SessionId]
column_names = params[:ColumnNames]
puts column_names
column_names = column_names[:string]
column_indices = []
i = 4
column_names.each do |column_name|
case column_name
when 'APP_WRITELOG_RCV'
column_indices.push 0
when 'ARRL_SECT'
column_indices.push 1
when 'APP_WRITELOG_C'
column_indices.push 2
when 'APP_WRITELOG_PREF'
column_indices.push 3
else
column_indices.push i
i += 1
end
end
render :soap => { 'tns:ColumnNamesToIndicesResult' => { 'contest26:int' => column_indices } }
end
soap_action 'AddAndGetLogSummary',
:args => { :SessionId => :string, :QsoAddArray => ArrayOfQso, :OldState => :integer, :MaxRequested => :integer },
:return => { 'tns:AddAndGetLogSummaryResult' => LogSummary },
:response_tag => 'AddAndGetLogSummaryResponse',
:to => :add_and_get_log_summary
def add_and_get_log_summary2
render file: 'soap/add_and_get_log_summary.xml.erb', content_type: 'text/xml'
end
def add_qsos(new_qsos)
new_qsos.each do |qso|
puts qso
new_qso = Qso.new
new_qso.time_upper = qso[:time64H]
new_qso.time_lower = qso[:time64L]
new_qso.transmit_frequency = qso[:xmitFreq]
new_qso.receive_frequency = qso[:recvFreq]
new_qso.band = qso[:band]
new_qso.station = qso[:station]
new_qso.mode = qso[:mode]
new_qso.dupe = qso[:dupe]
new_qso.serial = qso[:serial]
new_qso.version = qso[:version]
new_qso.id_key = qso[:idKey]
new_qso.updated_by = qso[:updatedBy]
qso_parts = qso[:qsoparts]
new_qso.operating_class = qso_parts[0]
new_qso.section = qso_parts[1]
new_qso.c_field = qso_parts[2]
new_qso.country_prefix = qso_parts[3]
new_qso.save
end
end
def add_and_get_log_summary
old_log_state = params[:OldState]
new_qsos = params[:QsoAddArray][:Qso] || []
max_requested = params[:MaxRequested]
summary_ids = []
qsos_in_update = Qso.where('id > ?', old_log_state).limit(max_requested)
qsos_in_update.each do |qso|
summary_ids << { id: qso.id_key, version: qso.version, updatedBy: qso.updated_by }
end
add_qsos(new_qsos)
new_log_state = 0
new_log_state = Qso.last.id unless Qso.last.nil?
render :soap => { 'tns:AddAndGetLogSummaryResult' => { :logState => new_log_state, :logSummaryIds => { :elements => summary_ids } } }
end
soap_action 'addAndGetQsos',
:args => { :SessionId => :string, :QsoAddArray => ArrayOfQso, :OldState => :integer, :MaxRequested => :integer },
:return => { 'tns:AddAndGetQsoResult' => QsoUpdate },
:response_tag => 'AddAndGetQsoResponse',
:to => :add_and_get_qsos
def add_and_get_qsos
old_log_state = params[:OldState]
new_qsos = params[:QsoAddArray][:Qso] || []
max_requested = params[:MaxRequested]
qso_array = []
new_log_state = old_log_state
Qso.where('id > ?', old_log_state).limit(max_requested).each do |qso|
new_log_state = qso.id
qso_array << qso.to_soap_qso
end
last_log_added_id = add_qsos(new_qsos)
# We're up to date if we've given the client back all the
# qsos in the database, since their last update. This would
# not be the case if there are more qsos than their max_requested
# or another client inserted a record since our query
new_log_state = Qso.last.id if last_log_added_id == (new_log_state + new_qsos.length)
render :soap => { 'tns:AddAndGetQsoResult' => { :qsoArray => { :elements => qso_array }, :logState => new_log_state } }
end
soap_action 'getQsosByKeyArray',
:args => { :SessionId => :string, :QsoKeyarray => ArrayOfstring },
:return => { 'tns:getQsosByKeyArrayResult' => QsoUpdate },
:response_tag => 'getQsosByKeyArrayResponse',
:to => :get_qso_by_key_array
def get_qso_by_key_array
qso_keys = params[:QsoKeyarray] || []
qso_array = []
begin
qso_keys.each do |key|
qso = Qso.find_by! id_key: key
qso_array << qso.to_soap_qso
end
render :soap => { 'tns:getQsosByKeyArrayResult' => { :qsoArray => qso_array, :logState => log_state } }
rescue ActiveRecord::RecordNotFound
# TODO: throw a SOAP exception here
end
end
soap_action 'ExchangeFrequencies',
:args => { :IncomingFreqs => ArrayOfRigFrequency },
:return => { 'tns:ExchangeFrequenciesResult' => ArrayOfRigFrequency },
:response_tag => 'ExchangeFrequenciesResponse',
:to => :exchange_frequencies
def exchange_frequencies
rig_array_in = params[:IncomingFreqs][:elements] || []
rig_array_out = []
rig_array_in.each do |rig_info|
rig = Rig.where(letter: rig_info[:networkLetter]).where(rig_number: rig_info[:rigNumber]).first_or_create do |rig|
# rig.letter = rig_info[:networkLetter]
# rig.rig_number = rig_info[:rigNumber]
rig.station = rig_info[:station]
rig.label = rig_info[:label]
rig.mode = rig_info[:mode]
rig.transmit_frequency = rig_info[:xmitFreq]
rig.receive_frequency = rig_info[:recvFreq]
end
rig.save
end
Rig.where('updated_at < ?', 10.minutes.ago).delete_all
Rig.all.each do |rig|
rig_array_out << rig.to_soap_rig
end
render :soap => { 'tns:ExchangeFrequenciesResult' => { :rf_el => rig_array_out } }
end
end
|
require 'rubygems'
require 'sinatra'
require 'pony'
SITE_TITLE = "Cinelandia"
SITE_DESCRIPTION = "Las mejores Películas del mundo mundial."
PELICULA_DE_LA_SEMANA = "Inside Out"
get '/' do
erb :home
end
get'/compraform' do
erb :compraform
end
post '/confirmation' do
require 'socket'
hostname = Socket.gethostname
name = params[:nombre]
apellido = params[:apellido]
correo = params[:correo]
telefono = params[:telefono]
listfilms = params[:listfilms]
id = 165
Pony.options = {
:from => 'noreply@esquemacreativo.com',
:via => :smtp,
:via_options => {
:address => 'smtp.sendgrid.net',
:port => '587',
:domain => 'heroku.com',
:user_name => ENV['SENDGRID_USERNAME'],
:password => ENV['SENDGRID_PASSWORD'],
:authentication => :plain,
:enable_starttls_auto => true
}
}
Pony.mail(:subject=> 'Confirmación compra de Ticket ' + name, :to => correo, :body => 'Ingresa al siguiente link: http://' + hostname +'/' + name.gsub(/\s/,'-') + '/' + listfilms.gsub(/\s/,'-') + '/' + id.to_s)
erb :confirmation , :locals => {'name' => name, 'apellido' => apellido, 'correo' => correo, 'telefono' => telefono, 'film' => listfilms}
end
get '/ticket' do
erb :ticket
end
get '/:name/:listfilms/:id' do
name = params[:name]
listfilms = params[:listfilms]
id = params[:id]
erb :ticket, :locals => {'name' => name, 'film' => listfilms, 'id' => id}
end
Config require host
require 'rubygems'
require 'sinatra'
require 'pony'
SITE_TITLE = "Cinelandia"
SITE_DESCRIPTION = "Las mejores Películas del mundo mundial."
PELICULA_DE_LA_SEMANA = "Inside Out"
get '/' do
erb :home
end
get'/compraform' do
erb :compraform
end
post '/confirmation' do
require 'uri'
hostname = uri.host
name = params[:nombre]
apellido = params[:apellido]
correo = params[:correo]
telefono = params[:telefono]
listfilms = params[:listfilms]
id = 165
Pony.options = {
:from => 'noreply@esquemacreativo.com',
:via => :smtp,
:via_options => {
:address => 'smtp.sendgrid.net',
:port => '587',
:domain => 'heroku.com',
:user_name => ENV['SENDGRID_USERNAME'],
:password => ENV['SENDGRID_PASSWORD'],
:authentication => :plain,
:enable_starttls_auto => true
}
}
Pony.mail(:subject=> 'Confirmación compra de Ticket ' + name, :to => correo, :body => 'Ingresa al siguiente link: http://' + hostname +'/' + name.gsub(/\s/,'-') + '/' + listfilms.gsub(/\s/,'-') + '/' + id.to_s)
erb :confirmation , :locals => {'name' => name, 'apellido' => apellido, 'correo' => correo, 'telefono' => telefono, 'film' => listfilms}
end
get '/ticket' do
erb :ticket
end
get '/:name/:listfilms/:id' do
name = params[:name]
listfilms = params[:listfilms]
id = params[:id]
erb :ticket, :locals => {'name' => name, 'film' => listfilms, 'id' => id}
end |
# -*- encoding: utf-8 -*-
require File.expand_path('../lib/observation/version', __FILE__)
Gem::Specification.new do |gem|
gem.authors = ["Chad Remesch"]
gem.email = ["chad@remesch.com"]
gem.description = %q{TODO: Write a gem description}
gem.summary = %q{TODO: Write a gem summary}
gem.homepage = ""
gem.files = `git ls-files`.split($\)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.name = "observation"
gem.require_paths = ["lib"]
gem.version = Observation::VERSION
end
Updated gemspec
# -*- encoding: utf-8 -*-
require File.expand_path('../lib/observation/version', __FILE__)
Gem::Specification.new do |gem|
gem.authors = ["Chad Remesch"]
gem.email = ["chad@remesch.com"]
gem.description = %q{Add an event system to any Ruby class.}
gem.summary = %q{Observation lets you add a simple event system to any Ruby class. Use it to decrease coupling between your objects.}
gem.homepage = "https://github.com/chadrem/observation"
gem.files = `git ls-files`.split($\)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.name = "observation"
gem.require_paths = ["lib"]
gem.version = Observation::VERSION
end
|
class Piler < Formula
desc "Analyze repetitive DNA found in genome sequences"
homepage "http://drive5.com/piler/"
bottle do
cellar :any_skip_relocation
sha256 "dcb0a2bc15dd739eee21b80db14660b600308a612d0091bfe9cd3161dc3f009d" => :el_capitan
sha256 "62f3c8a1acf4a28f266937011da857c63608d26c10c2f2a690ce05c9223de17f" => :yosemite
sha256 "0efdbf451eb1d240ed255ae1dfe2b907c417fb020a99c3496988b94ca721b225" => :mavericks
end
# doi "10.1093/bioinformatics/bti1003"
# tag "bioinformatics"
version "1.0"
revision 1
if OS.mac?
url "http://drive5.com/piler/piler_pals_osx_src.tar.gz"
sha256 "68c20b68a6bc73224e15bbaddfb982a7f0b38e04324e25f98d387b7186f981a4"
elsif OS.linux?
url "http://drive5.com/piler/piler_source.tar.gz"
sha256 "3b1be91c497bdb22eac8032a60c9f815bdcd03edc5b25d925314a02f54bec44f"
else
raise "Unknown operating system"
end
depends_on "muscle"
def install
cd (if OS.mac? then "piler" else "." end) do
system "make", "CC=#{ENV.cc} -c", "GPP=#{ENV.cxx}", "LDLIBS=-lm" # remove -static
if OS.mac?
bin.install "piler"
else
# Why is the executable named differently?
bin.install "piler2" => "piler"
end
end
end
test do
system "#{bin}/piler", "-version"
end
end
piler: update 1.0_1 bottle for Linuxbrew.
Closes Linuxbrew/homebrew-science#169.
Signed-off-by: Shaun Jackman <b580dab3251a9622aba3803114310c23fdb42900@gmail.com>
class Piler < Formula
desc "Analyze repetitive DNA found in genome sequences"
homepage "http://drive5.com/piler/"
bottle do
cellar :any_skip_relocation
sha256 "dcb0a2bc15dd739eee21b80db14660b600308a612d0091bfe9cd3161dc3f009d" => :el_capitan
sha256 "62f3c8a1acf4a28f266937011da857c63608d26c10c2f2a690ce05c9223de17f" => :yosemite
sha256 "0efdbf451eb1d240ed255ae1dfe2b907c417fb020a99c3496988b94ca721b225" => :mavericks
sha256 "ca33e46d6173d1b7008a3c6488cbe9c650cabb3383547acce611963d88da12a1" => :x86_64_linux
end
# doi "10.1093/bioinformatics/bti1003"
# tag "bioinformatics"
version "1.0"
revision 1
if OS.mac?
url "http://drive5.com/piler/piler_pals_osx_src.tar.gz"
sha256 "68c20b68a6bc73224e15bbaddfb982a7f0b38e04324e25f98d387b7186f981a4"
elsif OS.linux?
url "http://drive5.com/piler/piler_source.tar.gz"
sha256 "3b1be91c497bdb22eac8032a60c9f815bdcd03edc5b25d925314a02f54bec44f"
else
raise "Unknown operating system"
end
depends_on "muscle"
def install
cd (if OS.mac? then "piler" else "." end) do
system "make", "CC=#{ENV.cc} -c", "GPP=#{ENV.cxx}", "LDLIBS=-lm" # remove -static
if OS.mac?
bin.install "piler"
else
# Why is the executable named differently?
bin.install "piler2" => "piler"
end
end
end
test do
system "#{bin}/piler", "-version"
end
end
|
class TeamController < ApplicationController
def index
group = Setting.plugin_redmine_team_page['group'].to_i
group = Group.find(group) rescue nil
if group.nil?
@users = User.all
else
@users = User.in_group(group)
end
@users.sort! {|a,b| a.name <=> b.name }
end
end
Remove anonymous user
class TeamController < ApplicationController
def index
group = Setting.plugin_redmine_team_page['group'].to_i
group = Group.find(group) rescue nil
if group.nil?
@users = User.all - [User.anonymous]
else
@users = User.in_group(group)
end
@users.sort! {|a,b| a.name <=> b.name }
end
end
|
# -*- coding: utf-8 -*-
require 'net/https'
require 'json'
require 'twitter'
def download_events
github_user = ENV['GITHUB_USER']
github_oauth_token = ENV['GITHUB_PERSONAL_ACCESS_TOKEN']
https = Net::HTTP.new('api.github.com',443)
https.use_ssl = true
https.start {
https.get("/users/#{github_user}/received_events?access_token=#{github_oauth_token}")
}
end
def to_array(s)
xs = JSON.parse(s).map {|json|
created_at = json["created_at"]
user = json["actor"]["login"]
repo = json["repo"]["name"]
type = json["type"].sub(/Event$/, "")
content = "#{user}, #{repo}"
url = ""
payload = json["payload"]
case type
when "CommitComment"
short_type = "CommitComment"
url = payload["comment"]["html_url"]
when "Create"
short_type = "Create"
ref_type = payload["ref_type"]
content += "\n#{ref_type}"
url = "https://github.com/#{repo}"
when "Delete"
short_type = "Delete"
ref_type = payload["ref_type"]
content += "\n#{ref_type}"
url = "https://github.com/#{repo}"
when "Fork"
short_type = "Fork"
full_name = payload["forkee"]["full_name"]
content += "\n#{full_name}"
url = payload["forkee"]["html_url"]
when "Gollum"
short_type = "Gollum"
url = "https://github.com/#{repo}/wiki"
when "IssueComment"
short_type = "IssueComment"
issue_title = payload["issue"]["title"]
content += "\n\"#{issue_title}\""
url = payload["comment"]["html_url"]
when "Issues"
short_type = "Issues"
action = payload["action"]
issue_title = payload["issue"]["title"]
content += "\n#{action} \"#{issue_title}\""
url = payload["issue"]["html_url"]
when "Member"
short_type = "Member"
action = payload["action"]
acted_user = payload["member"]["login"]
content += "\n#{action} \"#{acted_user}\""
url = "https://github.com/#{repo}"
when "PullRequest"
short_type = "PR"
action = payload["action"]
title = payload["pull_request"]["title"]
content += "\n#{action} \"#{title}\""
url = payload["pull_request"]["html_url"]
when "PullRequestReviewComment"
short_type = "PRReviewComment"
action = payload["action"]
pull_request_title = payload["pull_request"]["title"]
content += "\n#{action} \"#{pull_request_title}\""
url = payload["comment"]["html_url"]
when "Push"
short_type = "Push"
before = payload["before"].slice(0, 10)
head = payload["head"].slice(0, 10)
url = "https://github.com/#{repo}/compare/#{before}...#{head}"
when "Release"
short_type = "Release"
action = payload["action"]
tag_name = payload["release"]["tag_name"]
content += "\n#{action} \"#{tag_name}\""
url = payload["release"]["html_url"]
when "TeamAdd"
short_type = "TeamAdd"
team_name = payload["team"]["name"]
content += "\n#{team_name}"
url = "https://github.com/#{repo}"
when "Watch"
short_type = "Watch"
action = payload["action"]
content += "\n#{action}"
url = "https://github.com/#{repo}"
end
{
created_at: created_at,
content: "#{created_at} [#{short_type}]\n#{content}",
url: url
}
}
xs.reverse
end
def new_twitter_client
twitter_consumer_key = ENV['TWITTER_CONSUMER_KEY_FOR_GITHUB']
twitter_consumer_secret = ENV['TWITTER_CONSUMER_SECRET_FOR_GITHUB']
twitter_access_token = ENV['TWITTER_ACCESS_TOKEN_FOR_GITHUB']
twitter_access_token_secret = ENV['TWITTER_ACCESS_TOKEN_SECRET_FOR_GITHUB']
Twitter::REST::Client.new do |config|
config.consumer_key = twitter_consumer_key
config.consumer_secret = twitter_consumer_secret
config.access_token = twitter_access_token
config.access_token_secret = twitter_access_token_secret
end
end
def tweet(twitter_client, content)
twitter_client.update(content)
end
def read_previous_created_at(twitter_client)
timeline = twitter_client.home_timeline
if timeline.empty?
'2000-01-01T00:00:00Z'
else
timeline.first.text.split(' ').first
end
end
response = download_events
if response.code.to_i == 200
twitter_client = new_twitter_client
previous_created_at = read_previous_created_at(twitter_client)
events =
to_array(response.body).reject {|event|
event.nil?
}.select {|event|
previous_created_at.nil? ||
(DateTime.parse(event[:created_at]) > DateTime.parse(previous_created_at))
}
tco_length = 23 # t.co length
lf_length = 2 # \n length
text_limit_size = 140 - tco_length - lf_length
events.each {|event|
text =
if event[:content].size > text_limit_size
n = event[:content].size - text_limit_size
event[:content][0, event[:content].size - n] + "\n" + event[:url]
else
event[:content] + "\n" + event[:url]
end
tweet(twitter_client, text)
}
else
raise "GitHub API Error. http_status_code: #{response.code}"
end
Fix can not parse clash
# -*- coding: utf-8 -*-
require 'net/https'
require 'json'
require 'twitter'
def download_events
github_user = ENV['GITHUB_USER']
github_oauth_token = ENV['GITHUB_PERSONAL_ACCESS_TOKEN']
https = Net::HTTP.new('api.github.com',443)
https.use_ssl = true
https.start {
https.get("/users/#{github_user}/received_events?access_token=#{github_oauth_token}")
}
end
def to_array(s)
xs = JSON.parse(s).map {|json|
created_at = json["created_at"]
user = json["actor"]["login"]
repo = json["repo"]["name"]
type = json["type"].sub(/Event$/, "")
content = "#{user}, #{repo}"
url = ""
payload = json["payload"]
case type
when "CommitComment"
short_type = "CommitComment"
url = payload["comment"]["html_url"]
when "Create"
short_type = "Create"
ref_type = payload["ref_type"]
content += "\n#{ref_type}"
url = "https://github.com/#{repo}"
when "Delete"
short_type = "Delete"
ref_type = payload["ref_type"]
content += "\n#{ref_type}"
url = "https://github.com/#{repo}"
when "Fork"
short_type = "Fork"
full_name = payload["forkee"]["full_name"]
content += "\n#{full_name}"
url = payload["forkee"]["html_url"]
when "Gollum"
short_type = "Gollum"
url = "https://github.com/#{repo}/wiki"
when "IssueComment"
short_type = "IssueComment"
issue_title = payload["issue"]["title"]
content += "\n\"#{issue_title}\""
url = payload["comment"]["html_url"]
when "Issues"
short_type = "Issues"
action = payload["action"]
issue_title = payload["issue"]["title"]
content += "\n#{action} \"#{issue_title}\""
url = payload["issue"]["html_url"]
when "Member"
short_type = "Member"
action = payload["action"]
acted_user = payload["member"]["login"]
content += "\n#{action} \"#{acted_user}\""
url = "https://github.com/#{repo}"
when "PullRequest"
short_type = "PR"
action = payload["action"]
title = payload["pull_request"]["title"]
content += "\n#{action} \"#{title}\""
url = payload["pull_request"]["html_url"]
when "PullRequestReviewComment"
short_type = "PRReviewComment"
action = payload["action"]
pull_request_title = payload["pull_request"]["title"]
content += "\n#{action} \"#{pull_request_title}\""
url = payload["comment"]["html_url"]
when "Push"
short_type = "Push"
before = payload["before"].slice(0, 10)
head = payload["head"].slice(0, 10)
url = "https://github.com/#{repo}/compare/#{before}...#{head}"
when "Release"
short_type = "Release"
action = payload["action"]
tag_name = payload["release"]["tag_name"]
content += "\n#{action} \"#{tag_name}\""
url = payload["release"]["html_url"]
when "TeamAdd"
short_type = "TeamAdd"
team_name = payload["team"]["name"]
content += "\n#{team_name}"
url = "https://github.com/#{repo}"
when "Watch"
short_type = "Watch"
action = payload["action"]
content += "\n#{action}"
url = "https://github.com/#{repo}"
end
{
created_at: created_at,
content: "#{created_at} [#{short_type}]\n#{content}",
url: url
}
}
xs.reverse
end
def new_twitter_client
twitter_consumer_key = ENV['TWITTER_CONSUMER_KEY_FOR_GITHUB']
twitter_consumer_secret = ENV['TWITTER_CONSUMER_SECRET_FOR_GITHUB']
twitter_access_token = ENV['TWITTER_ACCESS_TOKEN_FOR_GITHUB']
twitter_access_token_secret = ENV['TWITTER_ACCESS_TOKEN_SECRET_FOR_GITHUB']
Twitter::REST::Client.new do |config|
config.consumer_key = twitter_consumer_key
config.consumer_secret = twitter_consumer_secret
config.access_token = twitter_access_token
config.access_token_secret = twitter_access_token_secret
end
end
def tweet(twitter_client, content)
twitter_client.update(content)
end
def datetime_string?(s)
begin
DateTime.parse(s)
true
rescue
false
end
end
def read_previous_created_at(twitter_client)
timeline = twitter_client.home_timeline
github_tweet = timeline.find {|tweet|
datetime_string? tweet.text.split(' ').first
}
if github_tweet.nil?
'2000-01-01T00:00:00Z'
else
github_tweet.text.split(' ').first
end
end
response = download_events
if response.code.to_i == 200
twitter_client = new_twitter_client
previous_created_at = read_previous_created_at(twitter_client)
events =
to_array(response.body).reject {|event|
event.nil?
}.select {|event|
previous_created_at.nil? ||
(DateTime.parse(event[:created_at]) > DateTime.parse(previous_created_at))
}
tco_length = 23 # t.co length
lf_length = 2 # \n length
text_limit_size = 140 - tco_length - lf_length
events.each {|event|
text =
if event[:content].size > text_limit_size
n = event[:content].size - text_limit_size
event[:content][0, event[:content].size - n] + "\n" + event[:url]
else
event[:content] + "\n" + event[:url]
end
tweet(twitter_client, text)
}
else
raise "GitHub API Error. http_status_code: #{response.code}"
end
|
Added an array snippet on how you can work with arrays
# For documentation on all the array functions:
# http://www.ruby-doc.org/core/classes/Array.html
# String join using * operator with a string
# 1, 2, 3
puts [1,2,3] * ', '
# Using an int repeats instead
# [1, 2, 3, 1, 2, 3, 1, 2, 3]
puts ([1,2,3] * 3).inspect
# Can't be converted
# nil
puts Array.try_convert({ :test1 => "test" }).inspect
# Can convert a class with to_ary method
# ["Fido", "Lab"]
class Dog
attr_accessor :name
attr_accessor :breed
def to_ary
[@name, @breed]
end
end
dog = Dog.new
dog.name = "Fido"
dog.breed = "Lab"
puts Array.try_convert(dog).inspect
# Combine arrays using the + operator
# [1, 2, 3, 4, 5, 6]
puts ([1,2,3] + [4,5,6]).inspect
# The - operator removes all elements in the second array from the first
# [2, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 18, 19, 20]
puts ( (1..20).to_a - [1,17,3] ).inspect
# Comparison operator is just weird...
#0
#1
#-1
# So this is equal
puts [1,2,3] <=> [1,2,3]
# Comparison goes in order and stops when it finds a value in the second array
# that is greater than or less than the current value of the first array.
# This means that the total sum of a numeric array has no bearing whatsoever
puts [1,5,7] <=> [1,3,100]
puts [1,5,7] <=> [1,7,10]
# Returns a new array ( non-destructive)
# [1, 2, 3] [1, 4, 9]
a = [1,2,3]
b = [1,2,3].map { | x | x * x }
puts a.inspect + " " + b.inspect
# Modifies array in place ( destructive)
# [1, 4, 9]
a = [1,2,3]
a.map! { | x | x * x }
puts a.inspect
# Combinations can be used to show how many ways you can combine an array
# of elements creating unique parrings. Take for example finding out how many
# passwords you could get with a length of 5 using all lowercase characters
# 65780
password_chars = ('a'..'z').to_a
puts password_chars.combination(5).to_a.length
# Hashes are fixnums that are different from object_id in that they are
# specific to the contents of the array. That means two arrays with different
# object ids but with the same contents will have the same hash.
a = [1,2,3]
b = [1,2,3]
puts a.hash.inspect + " " + b.hash.inspect
# Sample takes n number of random elements from an array, attempting to ensure
# they are unique. This can be used for a very simple random password generator
def password_generator(size)
chars = ('a'..'z').to_a + ('A'..'Z').to_a + (0..9).to_a
(chars.sample(size)) * ''
end
# Example: lU2y3XTvC8SGKBPWFNI0
puts password_generator(20)
# Restricted to number of elements:
# Example: bRJ7rWdglIchEXOmao2x1H93CzAYuMnNwGsek0FyfQ4ZLBjTt6PK8pqv5SDiVU
puts password_generator(2000)
# Shuffle randomizes
# [2, "H", "t", "A", "u", "J", "C", "U", "j", 3, "i", "E", "s", "z", "S", "l",
# "y", "O", 1, "K", 7, 0, 8, "F", "X", "e", "c", "W", "I", "p", "T", "R", "w",
# "Q", "Y", "L", "k", "v", "n", "G", "a", "Z", 6, "d", "r", 4, "N", 9, "x", "f",
# "h", "g", 5, "o", "m", "D", "V", "B", "M", "q", "P", "b"]
chars = ('a'..'z').to_a + ('A'..'Z').to_a + (0..9).to_a
shuffled_array = chars.shuffle
puts shuffled_array.inspect
# And sort restorts if possible (ie. an array with mixed types won't work
# unless you provide a custom function)
# [8, 7, 2, 6, 0, 1, 3, 5, 4, 9]
# [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
shuffled_numbers = (0..9).to_a.shuffle
puts shuffled_numbers.inspect
puts shuffled_numbers.sort.inspect |
require File.join(pwd, 'lib/build/jake.rb')
require 'fileutils'
namespace 'device' do
namespace 'android' do
task :make_container, [:target_path] => 'device:android:production' do |t,args|
target_path = args[:target_path]
puts "Target path for prebuilt binaries: #{args}"
FileUtils.mkdir_p target_path
FileUtils.cp( File.join($bindir,'classes.dex'), target_path )
FileUtils.mkdir_p File.join(target_path,'native','lib')
Dir.glob(File.join($bindir,'tmp','**','lib*.so')) { |lib|
arch = File.basename(File.dirname(lib))
FileUtils::mkdir_p File.join(target_path,'native','lib',arch)
FileUtils.cp(lib, File.join(target_path,'native','lib',arch))
}
cp_r( File.join($bindir,'tmp', 'assets' ), File.join( target_path, 'assets' ) )
cp_r( File.join($bindir,'tmp', 'res' ), File.join( target_path, 'res' ) )
FileUtils.cp( File.join($app_path,'build.yml'), target_path )
#save manifest changes
$ext_android_manifest_changes.each do |ext, manifest_changes|
addspath = File.join(target_path, 'extensions', ext, 'adds')
mkdir_p addspath
manifest_changes.each do |path|
if File.extname(path) == '.xml'
cp path, File.join(addspath, 'AndroidManifest.xml')
else
if File.extname(path) == '.rb'
cp path, File.join(addspath, 'AndroidManifest.rb')
else
if File.extname(path) == '.erb'
cp path, addspath
else
raise "Wrong AndroidManifest patch file: #{path}"
end
end
end
end
end
end
module AndroidPrebuild
def self.determine_prebuild_path(config)
require 'rhodes/containers'
Rhodes::Containers::get_container_path_prefix('android', config)
end
def self.make_app_bundle
print_timestamp('AndroidPrebuild.make_app_bundle START')
$use_prebuild_data = true
$skip_build_rhodes_main = true
$skip_build_extensions = true
$skip_build_xmls = true
Rake::Task['build:android:rhobundle'].execute
print_timestamp('AndroidPrebuild.make_app_bundle FINISH')
return $appassets
end
def self.generate_manifest(prebuilt_path,prebuilt_config,app_config)
print_timestamp('AndroidPrebuild.generate_manifest START')
version = {'major' => 0, 'minor' => 0, 'patch' => 0, "build" => 0}
if $app_config["version"]
if $app_config["version"] =~ /^(\d+)$/
version["major"] = $1.to_i
elsif $app_config["version"] =~ /^(\d+)\.(\d+)$/
version["major"] = $1.to_i
version["minor"] = $2.to_i
elsif $app_config["version"] =~ /^(\d+)\.(\d+)\.(\d+)$/
version["major"] = $1.to_i
version["minor"] = $2.to_i
version["patch"] = $3.to_i
elsif $app_config["version"] =~ /^(\d+)\.(\d+)\.(\d+)\.(\d+)$/
version["major"] = $1.to_i
version["minor"] = $2.to_i
version["patch"] = $3.to_i
version["build"] = $4.to_i
else
raise "Version number must be numeric and in one of these formats: major, major.minor, major.minor.patch, or major.minor.patch.build."
end
end
version = version["major"]*1000000 + version["minor"]*10000 + version["patch"]*100 + version["build"]
usesPermissions = ['android.permission.INTERNET', 'android.permission.PERSISTENT_ACTIVITY', 'android.permission.WAKE_LOCK']
capabilities = []
capabilities.concat(app_config['capabilities']) if app_config['capabilities']
capabilities.uniq!
capabilities.each do |cap|
cap = ANDROID_PERMISSIONS[cap]
next if cap.nil?
cap = [cap] unless cap.is_a? Array
cap.each do |cap_item|
if cap_item.is_a? String
usesPermissions << "android.permission.#{cap_item}"
next
end
end
end
usesPermissions.uniq!
hidden = get_boolean(prebuilt_config['hidden_app'])
generator = ManifestGenerator.new JAVA_PACKAGE_NAME, $app_package_name, hidden, usesPermissions
generator.versionName = prebuilt_config["version"]
generator.versionCode = version
generator.installLocation = 'auto'
generator.minSdkVer = $min_sdk_level
generator.maxSdkVer = $max_sdk_level
generator.screenOrientation = $android_orientation unless $android_orientation.nil?
generator.hardwareAcceleration = true if $app_config["capabilities"].index('hardware_acceleration')
generator.apikey = $gapikey if $gapikey
generator.addUriParams $uri_scheme, $uri_host, $uri_path_prefix
puts "Apply app's extensions manifest changes in generator..."
Dir.glob(File.join($app_builddir, 'extensions', '*', 'adds', 'AndroidManifest.rb')).each do |extscript|
puts "Evaluating #{extscript}"
eval(File.new(extscript).read)
end
Dir.glob(File.join($app_builddir, 'extensions', '*', 'adds', 'Manifest*.erb')).each do |exttemplate|
puts "Adding template #{exttemplate}"
generator.manifestManifestAdds << exttemplate
end
Dir.glob(File.join($app_builddir, 'extensions', '*', 'adds', 'Application*.erb')).each do |exttemplate|
puts "Adding template #{exttemplate}"
generator.applicationManifestAdds << exttemplate
end
puts "Apply container's extensions manifest changes in generator..."
$app_config['extensions'].each { |ext|
addspath = File.join(prebuilt_path,'extensions',ext,'adds')
if (File.directory?(addspath))
extscript = File.join(addspath,'AndroidManifest.rb')
if (File.file?(extscript))
puts "Evaluating #{extscript}"
eval(File.new(extscript).read)
end
Dir.glob(File.join(addspath, 'Manifest*.erb')).each do |exttemplate|
puts "Adding template #{exttemplate}"
generator.manifestManifestAdds << exttemplate
end
Dir.glob(File.join(addspath, 'Application*.erb')).each do |exttemplate|
puts "Adding template #{exttemplate}"
generator.applicationManifestAdds << exttemplate
end
end
}
manifest = generator.render $rhomanifesterb
File.open($appmanifest, "w") { |f| f.write manifest }
ext_manifest_changes = []
puts "Collecting legacy manifest changes for container extensions..."
$app_config['extensions'].each { |ext|
extmanifest = File.join(prebuilt_path,'extensions',ext,'adds','AndroidManifest.xml')
if (File.file?(extmanifest))
ext_manifest_changes << extmanifest
end
}
puts "Collecting legacy manifest changes for app extensions..."
Dir.glob(File.join($app_builddir, 'extensions', '*', 'adds', 'AndroidManifest.xml')).each do |ext_manifest|
if File.file? ext_manifest
ext_manifest_changes << ext_manifest
end
end
puts "Applying legacy manifest changes..."
apply_manifest_ext_changes($appmanifest,ext_manifest_changes)
print_timestamp('AndroidPrebuild.generate_manifest FINISH')
return $appmanifest
end
def self.apply_manifest_ext_changes(target_manifest, manifest_changes)
#######################################################
# Deprecated staff below
print_timestamp('AndroidPrebuild.apply_manifest_ext_changes START')
app_f = File.new(target_manifest)
manifest_orig_doc = REXML::Document.new(app_f)
app_f.close
dst_manifest = manifest_orig_doc.elements["manifest"]
dst_application = manifest_orig_doc.elements["manifest/application"]
dst_main_activity = nil
puts '$$$ try to found MainActivity'
dst_application.elements.each("activity") do |a|
puts '$$$ activity with attr = '+a.attribute('name', 'android').to_s
if a.attribute('name', 'android').to_s == 'com.rhomobile.rhodes.RhodesActivity'
puts ' $$$ FOUND !'
dst_main_activity = a
end
end
manifest_changes.each do |ext_manifest|
if File.exists? ext_manifest
puts 'AndroidManifest.xml['+ext_manifest+'] from native extension found !'
manifest_ext_doc = REXML::Document.new(File.new(ext_manifest))
src_manifest = manifest_ext_doc.elements["manifest"]
src_application = manifest_ext_doc.elements["manifest/application"]
if src_application != nil
puts 'Extension Manifest process application item :'
src_application.elements.each do |e|
puts '$$$ process element with attr = '+e.attribute('name', 'android').to_s
if e.attribute('name', 'android').to_s == 'com.rhomobile.rhodes.RhodesActivity'
e.elements.each do |sube|
puts ' add item to MainActivity['+sube.xpath+']'
dst_main_activity.add sube
end
else
puts ' add item ['+e.xpath+']'
dst_application.add e
end
end
end
puts 'Extension Manifest process root <manifest> item :'
src_manifest.elements.each do |e|
p = e.xpath
if p != '/manifest/application'
dst_e = manifest_orig_doc.elements[p]
if dst_e != nil
if p == '/manifest/uses-sdk'
puts ' found and delete original item ['+p+']'
manifest_orig_doc.elements.delete p
end
end
puts ' and new item ['+p+']'
dst_manifest.add e
end
end
else
puts 'AndroidManifest change file ['+m+'] from native extension not found !'
end
end
puts 'delete original manifest'
File.delete(target_manifest)
updated_f = File.open(target_manifest, "w")
manifest_orig_doc.write updated_f, 2
updated_f.close
print_timestamp('AndroidPrebuild.apply_manifest_ext_changes FINISH')
puts 'Manifest updated by extension is saved!'
end
def self.build_resources(prebuilt_builddir)
print_timestamp('AndroidPrebuild.build_resources START')
set_app_name_android($appname)
puts 'EXT: add additional files to project before build'
Dir.glob(File.join(prebuilt_builddir, 'extensions', '*', 'adds', '*')).each do |res|
if File.directory?(res) && (res != '.') && (res != '..')
puts "add resources from extension [#{res}] to [#{$tmpdir}]"
cp_r res, $tmpdir
end
end
#copy icon after extension resources in case it overwrites them (like rhoelementsext...)
set_app_icon_android
if $config_xml
puts "Copying custom config.xml"
rawres_path = File.join($tmpdir, 'res', 'raw')
mkdir_p rawres_path unless File.exist? rawres_path
cp $config_xml, File.join(rawres_path, 'config.xml')
end
mkdir_p File.join($applibs,'armeabi')
mkdir_p File.join($applibs,'armeabi-v7a')
mkdir_p File.join($applibs,'x86')
# Add .so libraries
Dir.glob($app_builddir + "/**/lib*.so").each do |lib|
arch = File.basename(File.dirname(lib))
file = File.basename(lib)
cp_r lib, File.join($applibs,arch,file)
end
=begin
$ext_android_additional_lib.each do |lib|
arch = File.basename(File.dirname(lib))
file = File.basename(lib)
cp_r lib, File.join($applibs,arch,file)
end
=end
print_timestamp('AndroidPrebuild.build_resources FINISH')
return $appres
end
def self.get_underscore_files_from_bundle(bundle_path)
underscores = []
Dir.glob(File.join(bundle_path, "**/*")).each do |f|
next unless File.basename(f) =~ /^_/
relpath = Pathname.new(f).relative_path_from(Pathname.new($tmpdir)).to_s
undersrores << relpath
end
return underscores
end
def self.get_native_libs_path(prebuilt_path)
return File.join(prebuilt_path,'native')
end
def self.make_output_path
return $targetdir + '/' + $appname + '_signed.apk'
end
def self.make_package(manifest_path, resources_path, assets_path, underscore_files, native_libs_path, classes_dex, output_path)
print_timestamp('AndroidPrebuild.make_package START')
resourcepkg = $bindir + "/rhodes.ap_"
puts "Packaging Assets and Jars"
print_timestamp('Packaging Assets and Jars START')
args = ["package", "-f", "-M", manifest_path, "-S", resources_path, "-A", assets_path, "-I", $androidjar, "-F", resourcepkg]
if $no_compression
$no_compression.each do |ext|
args << '-0'
args << ext
end
end
Jake.run($aapt, args)
unless $?.success?
raise "Error running AAPT (1)"
end
print_timestamp('Packaging Assets and Jars FINISH')
# Workaround: manually add files starting with '_' because aapt silently ignore such files when creating package
print_timestamp('Packaging underscore START')
underscore_files.each do |relpath|
puts "Add #{relpath} to #{resourcepkg}..."
args = ["uf", resourcepkg, relpath]
Jake.run($jarbin, args, $tmpdir)
unless $?.success?
raise "Error packaging assets"
end
end
print_timestamp('Packaging underscore FINISH')
puts "Packaging Native Libs"
print_timestamp('Packaging Native Libs START')
args = ["uf", resourcepkg]
Dir.glob(File.join(native_libs_path,'**','lib*.so')) do |lib|
arch = File.basename(File.dirname(lib))
args << "lib/#{arch}/#{File.basename(lib)}"
end
Jake.run($jarbin, args, native_libs_path)
unless $?.success?
raise "Error packaging native libraries"
end
print_timestamp('Packaging Native Libs FINISH')
dexfile = classes_dex
simple_apkfile = $targetdir + "/" + $appname + "_tmp.apk"
final_apkfile = output_path
signed_apkfile = $targetdir + "/" + $appname + "_tmp_signed.apk"
resourcepkg = $bindir + "/rhodes.ap_"
print_timestamp('build APK START')
apk_build $androidsdkpath, simple_apkfile, resourcepkg, dexfile, false
print_timestamp('build APK FINISH')
if not File.exists? $keystore
puts "Generating private keystore..."
mkdir_p File.dirname($keystore) unless File.directory? File.dirname($keystore)
args = []
args << "-genkey"
args << "-alias"
args << $storealias
args << "-keyalg"
args << "RSA"
args << "-validity"
args << "20000"
args << "-keystore"
args << $keystore
args << "-storepass"
args << $storepass
args << "-keypass"
args << $keypass
Jake.run($keytool, args)
unless $?.success?
puts "Error generating keystore file"
exit 1
end
end
print_timestamp('Signing APK file START')
puts "Signing APK file"
args = []
args << "-sigalg"
args << "MD5withRSA"
args << "-digestalg"
args << "SHA1"
args << "-verbose"
args << "-keystore"
args << $keystore
args << "-storepass"
args << $storepass
args << "-signedjar"
args << signed_apkfile
args << simple_apkfile
args << $storealias
Jake.run($jarsigner, args)
unless $?.success?
puts "Error running jarsigner"
exit 1
end
print_timestamp('Signing APK file FINISH')
puts "Align APK file"
print_timestamp('Align APK file START')
args = []
args << "-f"
args << "-v"
args << "4"
args << '"' + signed_apkfile + '"'
args << '"' + final_apkfile + '"'
Jake.run($zipalign, args)
unless $?.success?
puts "Error running zipalign"
exit 1
end
print_timestamp('Align APK file FINISH')
#remove temporary files
rm_rf simple_apkfile
rm_rf signed_apkfile
File.open(File.join(File.dirname(final_apkfile), "app_info.txt"), "w") do |f|
f.puts $app_package_name
end
print_timestamp('AndroidPrebuild.make_package FINISH')
end
def self.merge_assets( prebuilt_assets, app_assets )
print_timestamp('AndroidPrebuild.merge_assets START')
target_assets = app_assets + '_merged'
FileUtils.mkdir_p( target_assets )
cp_r( File.join(prebuilt_assets,'.'), target_assets, { :verbose => true } )
cp_r( File.join(app_assets,'.'), target_assets, { :verbose => true } )
target_public_api = File.join(target_assets,'apps','public','api')
FileUtils.mkdir_p( target_public_api )
cp_r( File.join(prebuilt_assets,'apps','public','api','.'), target_public_api, { :verbose => true } )
target_db = File.join( target_assets, 'db' )
FileUtils.mkdir_p( target_db )
cp_r( File.join(prebuilt_assets,'db','.'), target_db, { :verbose => true } )
hash = nil
["apps", "db", "lib"].each do |d|
# Calculate hash of directories
hash = get_dir_hash(File.join(target_assets, d), hash)
end
rm File.join(target_assets, "hash")
rm File.join(target_assets, "name")
rm File.join(target_assets, "rho.dat")
Jake.build_file_map(target_assets, "rho.dat")
File.open(File.join(target_assets, "hash"), "w") { |f| f.write(hash.hexdigest) }
File.open(File.join(target_assets, "name"), "w") { |f| f.write($appname) }
print_timestamp('AndroidPrebuild.merge_assets FINISH')
return target_assets
end
def self.merge_resources( prebuilt_res, app_res )
print_timestamp('AndroidPrebuild.merge_resources START')
target_res = app_res + '_merged'
FileUtils.mkdir_p( target_res )
cp_r( File.join(prebuilt_res,'.'), target_res, { :verbose => true } )
rhostrings = File.join(prebuilt_res, "values", "strings.xml")
appstrings = File.join(target_res, "values", "strings.xml")
doc = REXML::Document.new(File.new(rhostrings))
doc.elements["resources/string[@name='app_name']"].text = $appname
File.open(appstrings, "w") { |f| doc.write f }
iconappname = File.join($app_path, "icon", "icon.png")
['drawable', 'drawable-hdpi', 'drawable-mdpi', 'drawable-ldpi'].each do |dpi|
drawable = File.join(target_res, dpi)
iconresname = File.join(drawable, "icon.png")
rm_f iconresname
cp iconappname, iconresname if File.exist? drawable
end
#cp_r( File.join(app_res,'.'), target_res, { :verbose => true } )
print_timestamp('AndroidPrebuild.merge_resources FINISH')
return target_res
end
def self.production_with_prebuild_binary
print_timestamp('AndroidPrebuild.production_with_prebuild_binary START')
Rake::Task['config:android'].invoke
prebuilt_path = determine_prebuild_path($app_config)
bundle_path = make_app_bundle
prebuilt_config = Jake.config(File.open(File.join(prebuilt_path, 'build.yml')))
manifest_path = generate_manifest(prebuilt_path,prebuilt_config,$app_config)
prebuilt_builddir = File.join(bundle_path,'bin','target','android',$confdir)
resources_path = build_resources(prebuilt_builddir)
assets_path = merge_assets( File.join( prebuilt_path,'assets' ), bundle_path )
resources_path = merge_resources( File.join( prebuilt_path,'res' ), resources_path )
underscore_files = get_underscore_files_from_bundle(bundle_path)
#jars = build_java
native_libs_path = get_native_libs_path(prebuilt_path)
output_path = make_output_path
classes_dex = File.join(prebuilt_path,'classes.dex')
make_package(manifest_path,resources_path,assets_path,underscore_files,native_libs_path, classes_dex, output_path)
print_timestamp('AndroidPrebuild.production_with_prebuild_binary FINISH')
end
end
task :production_with_prebuild_binary do
AndroidPrebuild.production_with_prebuild_binary
end
end
end
=begin
namespace 'build' do
namespace 'android' do
def get_bundle_map( stream )
bundle_map = {}
stream.each { |line|
vals = line.split('|')
fpath = vals[0]
type = vals[1]
len = vals[2].to_i
timestamp = vals[3].strip!.to_i
bundle_map[fpath] = { :type => type, :len => len, :timestamp => timestamp }
}
return bundle_map
end
task :repack do
puts 'Repacking final APK with updated bundle'
Rake::Task['config:android'].execute
Rake::Task['build:android:rhobundle'].execute
if File.exists?(resourcepkg) then
puts 'Make diff maps and repack only changed files'
begin
zipfile = Zip::File.open(resourcepkg)
packed_items = {}
zipfile.get_input_stream('assets/RhoBundleMap.txt') do |filelist|
packed_items = get_bundle_map(filelist)
end
zipfile.close
bundle_items = {}
File.open( File.join( $appassets, 'RhoBundleMap.txt' ) ) do |filelist|
bundle_items = get_bundle_map(filelist)
end
remove_items = []
add_items = []
packed_items.each { |key,value| remove_items << key if (!bundle_items.has_key?(key) and value[:type]=='file') }
bundle_items.each { |key,value|
if value!=packed_items[key] and value[:type]=='file' then
add_items << key unless key=='rho.dat'
remove_items << key if packed_items.has_key?(key)
end
}
currentdir = Dir.pwd()
Dir.chdir $appassets+'/..'
unless remove_items.empty? then
args = [ 'remove', resourcepkg ]
remove_items.each { |key|
puts "Removing item from package: #{key}"
args << 'assets/'+key
packed_items.delete(key)
}
Jake.run($aapt, args)
unless $?.success?
raise "Error running AAPT (1)"
end
end
unless add_items.empty? then
args = [ 'add', resourcepkg ]
add_items.each { |key|
puts "Adding item to package: #{key}:#{bundle_items[key]}"
args << 'assets/'+key
}
Jake.run($aapt, args)
unless $?.success?
raise "Error running AAPT (1)"
end
end
has_changes = !(remove_items.empty? and add_items.empty?)
if has_changes then
puts 'Replacing bundle map and commiting package changes'
Jake.run($aapt, ['remove',resourcepkg,'assets/RhoBundleMap.txt','assets/rho.dat'])
unless $?.success?
raise "Error running AAPT (1)"
end
Jake.run($aapt, ['add',resourcepkg,'assets/RhoBundleMap.txt','assets/rho.dat'])
unless $?.success?
raise "Error running AAPT (1)"
end
else
puts "No changes detected in the bundle, do nothing"
end
Dir.chdir currentdir
#zipfile.close
rescue => e
puts "EXCEPTION: #{e.inspect}"
raise e
end
Rake::Task['device:android:production'].execute if has_changes
else
puts 'Pack everything from scratch'
Rake::Task['device:android:production'].invoke
end
end
end
end
=end
use only abis specified for the target app
require File.join(pwd, 'lib/build/jake.rb')
require 'fileutils'
namespace 'device' do
namespace 'android' do
task :make_container, [:target_path] => 'device:android:production' do |t,args|
target_path = args[:target_path]
puts "Target path for prebuilt binaries: #{args}"
FileUtils.mkdir_p target_path
FileUtils.cp( File.join($bindir,'classes.dex'), target_path )
FileUtils.mkdir_p File.join(target_path,'native','lib')
Dir.glob(File.join($bindir,'tmp','**','lib*.so')) { |lib|
arch = File.basename(File.dirname(lib))
FileUtils::mkdir_p File.join(target_path,'native','lib',arch)
FileUtils.cp(lib, File.join(target_path,'native','lib',arch))
}
cp_r( File.join($bindir,'tmp', 'assets' ), File.join( target_path, 'assets' ) )
cp_r( File.join($bindir,'tmp', 'res' ), File.join( target_path, 'res' ) )
FileUtils.cp( File.join($app_path,'build.yml'), target_path )
#save manifest changes
$ext_android_manifest_changes.each do |ext, manifest_changes|
addspath = File.join(target_path, 'extensions', ext, 'adds')
mkdir_p addspath
manifest_changes.each do |path|
if File.extname(path) == '.xml'
cp path, File.join(addspath, 'AndroidManifest.xml')
else
if File.extname(path) == '.rb'
cp path, File.join(addspath, 'AndroidManifest.rb')
else
if File.extname(path) == '.erb'
cp path, addspath
else
raise "Wrong AndroidManifest patch file: #{path}"
end
end
end
end
end
end
module AndroidPrebuild
def self.determine_prebuild_path(config)
require 'rhodes/containers'
Rhodes::Containers::get_container_path_prefix('android', config)
end
def self.make_app_bundle
print_timestamp('AndroidPrebuild.make_app_bundle START')
$use_prebuild_data = true
$skip_build_rhodes_main = true
$skip_build_extensions = true
$skip_build_xmls = true
Rake::Task['build:android:rhobundle'].execute
print_timestamp('AndroidPrebuild.make_app_bundle FINISH')
return $appassets
end
def self.generate_manifest(prebuilt_path,prebuilt_config,app_config)
print_timestamp('AndroidPrebuild.generate_manifest START')
version = {'major' => 0, 'minor' => 0, 'patch' => 0, "build" => 0}
if $app_config["version"]
if $app_config["version"] =~ /^(\d+)$/
version["major"] = $1.to_i
elsif $app_config["version"] =~ /^(\d+)\.(\d+)$/
version["major"] = $1.to_i
version["minor"] = $2.to_i
elsif $app_config["version"] =~ /^(\d+)\.(\d+)\.(\d+)$/
version["major"] = $1.to_i
version["minor"] = $2.to_i
version["patch"] = $3.to_i
elsif $app_config["version"] =~ /^(\d+)\.(\d+)\.(\d+)\.(\d+)$/
version["major"] = $1.to_i
version["minor"] = $2.to_i
version["patch"] = $3.to_i
version["build"] = $4.to_i
else
raise "Version number must be numeric and in one of these formats: major, major.minor, major.minor.patch, or major.minor.patch.build."
end
end
version = version["major"]*1000000 + version["minor"]*10000 + version["patch"]*100 + version["build"]
usesPermissions = ['android.permission.INTERNET', 'android.permission.PERSISTENT_ACTIVITY', 'android.permission.WAKE_LOCK']
capabilities = []
capabilities.concat(app_config['capabilities']) if app_config['capabilities']
capabilities.uniq!
capabilities.each do |cap|
cap = ANDROID_PERMISSIONS[cap]
next if cap.nil?
cap = [cap] unless cap.is_a? Array
cap.each do |cap_item|
if cap_item.is_a? String
usesPermissions << "android.permission.#{cap_item}"
next
end
end
end
usesPermissions.uniq!
hidden = get_boolean(prebuilt_config['hidden_app'])
generator = ManifestGenerator.new JAVA_PACKAGE_NAME, $app_package_name, hidden, usesPermissions
generator.versionName = prebuilt_config["version"]
generator.versionCode = version
generator.installLocation = 'auto'
generator.minSdkVer = $min_sdk_level
generator.maxSdkVer = $max_sdk_level
generator.screenOrientation = $android_orientation unless $android_orientation.nil?
generator.hardwareAcceleration = true if $app_config["capabilities"].index('hardware_acceleration')
generator.apikey = $gapikey if $gapikey
generator.addUriParams $uri_scheme, $uri_host, $uri_path_prefix
puts "Apply app's extensions manifest changes in generator..."
Dir.glob(File.join($app_builddir, 'extensions', '*', 'adds', 'AndroidManifest.rb')).each do |extscript|
puts "Evaluating #{extscript}"
eval(File.new(extscript).read)
end
Dir.glob(File.join($app_builddir, 'extensions', '*', 'adds', 'Manifest*.erb')).each do |exttemplate|
puts "Adding template #{exttemplate}"
generator.manifestManifestAdds << exttemplate
end
Dir.glob(File.join($app_builddir, 'extensions', '*', 'adds', 'Application*.erb')).each do |exttemplate|
puts "Adding template #{exttemplate}"
generator.applicationManifestAdds << exttemplate
end
puts "Apply container's extensions manifest changes in generator..."
$app_config['extensions'].each { |ext|
addspath = File.join(prebuilt_path,'extensions',ext,'adds')
if (File.directory?(addspath))
extscript = File.join(addspath,'AndroidManifest.rb')
if (File.file?(extscript))
puts "Evaluating #{extscript}"
eval(File.new(extscript).read)
end
Dir.glob(File.join(addspath, 'Manifest*.erb')).each do |exttemplate|
puts "Adding template #{exttemplate}"
generator.manifestManifestAdds << exttemplate
end
Dir.glob(File.join(addspath, 'Application*.erb')).each do |exttemplate|
puts "Adding template #{exttemplate}"
generator.applicationManifestAdds << exttemplate
end
end
}
manifest = generator.render $rhomanifesterb
File.open($appmanifest, "w") { |f| f.write manifest }
ext_manifest_changes = []
puts "Collecting legacy manifest changes for container extensions..."
$app_config['extensions'].each { |ext|
extmanifest = File.join(prebuilt_path,'extensions',ext,'adds','AndroidManifest.xml')
if (File.file?(extmanifest))
ext_manifest_changes << extmanifest
end
}
puts "Collecting legacy manifest changes for app extensions..."
Dir.glob(File.join($app_builddir, 'extensions', '*', 'adds', 'AndroidManifest.xml')).each do |ext_manifest|
if File.file? ext_manifest
ext_manifest_changes << ext_manifest
end
end
puts "Applying legacy manifest changes..."
apply_manifest_ext_changes($appmanifest,ext_manifest_changes)
print_timestamp('AndroidPrebuild.generate_manifest FINISH')
return $appmanifest
end
def self.apply_manifest_ext_changes(target_manifest, manifest_changes)
#######################################################
# Deprecated staff below
print_timestamp('AndroidPrebuild.apply_manifest_ext_changes START')
app_f = File.new(target_manifest)
manifest_orig_doc = REXML::Document.new(app_f)
app_f.close
dst_manifest = manifest_orig_doc.elements["manifest"]
dst_application = manifest_orig_doc.elements["manifest/application"]
dst_main_activity = nil
puts '$$$ try to found MainActivity'
dst_application.elements.each("activity") do |a|
puts '$$$ activity with attr = '+a.attribute('name', 'android').to_s
if a.attribute('name', 'android').to_s == 'com.rhomobile.rhodes.RhodesActivity'
puts ' $$$ FOUND !'
dst_main_activity = a
end
end
manifest_changes.each do |ext_manifest|
if File.exists? ext_manifest
puts 'AndroidManifest.xml['+ext_manifest+'] from native extension found !'
manifest_ext_doc = REXML::Document.new(File.new(ext_manifest))
src_manifest = manifest_ext_doc.elements["manifest"]
src_application = manifest_ext_doc.elements["manifest/application"]
if src_application != nil
puts 'Extension Manifest process application item :'
src_application.elements.each do |e|
puts '$$$ process element with attr = '+e.attribute('name', 'android').to_s
if e.attribute('name', 'android').to_s == 'com.rhomobile.rhodes.RhodesActivity'
e.elements.each do |sube|
puts ' add item to MainActivity['+sube.xpath+']'
dst_main_activity.add sube
end
else
puts ' add item ['+e.xpath+']'
dst_application.add e
end
end
end
puts 'Extension Manifest process root <manifest> item :'
src_manifest.elements.each do |e|
p = e.xpath
if p != '/manifest/application'
dst_e = manifest_orig_doc.elements[p]
if dst_e != nil
if p == '/manifest/uses-sdk'
puts ' found and delete original item ['+p+']'
manifest_orig_doc.elements.delete p
end
end
puts ' and new item ['+p+']'
dst_manifest.add e
end
end
else
puts 'AndroidManifest change file ['+m+'] from native extension not found !'
end
end
puts 'delete original manifest'
File.delete(target_manifest)
updated_f = File.open(target_manifest, "w")
manifest_orig_doc.write updated_f, 2
updated_f.close
print_timestamp('AndroidPrebuild.apply_manifest_ext_changes FINISH')
puts 'Manifest updated by extension is saved!'
end
def self.build_resources(prebuilt_builddir)
print_timestamp('AndroidPrebuild.build_resources START')
set_app_name_android($appname)
puts 'EXT: add additional files to project before build'
Dir.glob(File.join(prebuilt_builddir, 'extensions', '*', 'adds', '*')).each do |res|
if File.directory?(res) && (res != '.') && (res != '..')
puts "add resources from extension [#{res}] to [#{$tmpdir}]"
cp_r res, $tmpdir
end
end
#copy icon after extension resources in case it overwrites them (like rhoelementsext...)
set_app_icon_android
if $config_xml
puts "Copying custom config.xml"
rawres_path = File.join($tmpdir, 'res', 'raw')
mkdir_p rawres_path unless File.exist? rawres_path
cp $config_xml, File.join(rawres_path, 'config.xml')
end
mkdir_p File.join($applibs,'armeabi')
mkdir_p File.join($applibs,'armeabi-v7a')
mkdir_p File.join($applibs,'x86')
# Add .so libraries
Dir.glob($app_builddir + "/**/lib*.so").each do |lib|
arch = File.basename(File.dirname(lib))
file = File.basename(lib)
cp_r lib, File.join($applibs,arch,file)
end
=begin
$ext_android_additional_lib.each do |lib|
arch = File.basename(File.dirname(lib))
file = File.basename(lib)
cp_r lib, File.join($applibs,arch,file)
end
=end
print_timestamp('AndroidPrebuild.build_resources FINISH')
return $appres
end
def self.get_underscore_files_from_bundle(bundle_path)
underscores = []
Dir.glob(File.join(bundle_path, "**/*")).each do |f|
next unless File.basename(f) =~ /^_/
relpath = Pathname.new(f).relative_path_from(Pathname.new($tmpdir)).to_s
undersrores << relpath
end
return underscores
end
def self.get_native_libs_path(prebuilt_path)
return File.join(prebuilt_path,'native')
end
def self.make_output_path
return $targetdir + '/' + $appname + '_signed.apk'
end
def self.make_package(manifest_path, resources_path, assets_path, underscore_files, native_libs_path, classes_dex, output_path)
print_timestamp('AndroidPrebuild.make_package START')
resourcepkg = $bindir + "/rhodes.ap_"
puts "Packaging Assets and Jars"
print_timestamp('Packaging Assets and Jars START')
args = ["package", "-f", "-M", manifest_path, "-S", resources_path, "-A", assets_path, "-I", $androidjar, "-F", resourcepkg]
if $no_compression
$no_compression.each do |ext|
args << '-0'
args << ext
end
end
Jake.run($aapt, args)
unless $?.success?
raise "Error running AAPT (1)"
end
print_timestamp('Packaging Assets and Jars FINISH')
# Workaround: manually add files starting with '_' because aapt silently ignore such files when creating package
print_timestamp('Packaging underscore START')
underscore_files.each do |relpath|
puts "Add #{relpath} to #{resourcepkg}..."
args = ["uf", resourcepkg, relpath]
Jake.run($jarbin, args, $tmpdir)
unless $?.success?
raise "Error packaging assets"
end
end
print_timestamp('Packaging underscore FINISH')
puts "Packaging Native Libs"
print_timestamp('Packaging Native Libs START')
args = ["uf", resourcepkg]
abis = $abis
Dir.glob(File.join(native_libs_path,'**','lib*.so')) do |lib|
arch = File.basename(File.dirname(lib))
args << "lib/#{arch}/#{File.basename(lib)}" if arch in abis
abis.delete(arch)
end
puts "WARNING: Requested ABIs not found in container: #{abis}" unless abis.empty?
Jake.run($jarbin, args, native_libs_path)
unless $?.success?
raise "Error packaging native libraries"
end
print_timestamp('Packaging Native Libs FINISH')
dexfile = classes_dex
simple_apkfile = $targetdir + "/" + $appname + "_tmp.apk"
final_apkfile = output_path
signed_apkfile = $targetdir + "/" + $appname + "_tmp_signed.apk"
resourcepkg = $bindir + "/rhodes.ap_"
print_timestamp('build APK START')
apk_build $androidsdkpath, simple_apkfile, resourcepkg, dexfile, false
print_timestamp('build APK FINISH')
if not File.exists? $keystore
puts "Generating private keystore..."
mkdir_p File.dirname($keystore) unless File.directory? File.dirname($keystore)
args = []
args << "-genkey"
args << "-alias"
args << $storealias
args << "-keyalg"
args << "RSA"
args << "-validity"
args << "20000"
args << "-keystore"
args << $keystore
args << "-storepass"
args << $storepass
args << "-keypass"
args << $keypass
Jake.run($keytool, args)
unless $?.success?
puts "Error generating keystore file"
exit 1
end
end
print_timestamp('Signing APK file START')
puts "Signing APK file"
args = []
args << "-sigalg"
args << "MD5withRSA"
args << "-digestalg"
args << "SHA1"
args << "-verbose"
args << "-keystore"
args << $keystore
args << "-storepass"
args << $storepass
args << "-signedjar"
args << signed_apkfile
args << simple_apkfile
args << $storealias
Jake.run($jarsigner, args)
unless $?.success?
puts "Error running jarsigner"
exit 1
end
print_timestamp('Signing APK file FINISH')
puts "Align APK file"
print_timestamp('Align APK file START')
args = []
args << "-f"
args << "-v"
args << "4"
args << '"' + signed_apkfile + '"'
args << '"' + final_apkfile + '"'
Jake.run($zipalign, args)
unless $?.success?
puts "Error running zipalign"
exit 1
end
print_timestamp('Align APK file FINISH')
#remove temporary files
rm_rf simple_apkfile
rm_rf signed_apkfile
File.open(File.join(File.dirname(final_apkfile), "app_info.txt"), "w") do |f|
f.puts $app_package_name
end
print_timestamp('AndroidPrebuild.make_package FINISH')
end
def self.merge_assets( prebuilt_assets, app_assets )
print_timestamp('AndroidPrebuild.merge_assets START')
target_assets = app_assets + '_merged'
FileUtils.mkdir_p( target_assets )
cp_r( File.join(prebuilt_assets,'.'), target_assets, { :verbose => true } )
cp_r( File.join(app_assets,'.'), target_assets, { :verbose => true } )
target_public_api = File.join(target_assets,'apps','public','api')
FileUtils.mkdir_p( target_public_api )
cp_r( File.join(prebuilt_assets,'apps','public','api','.'), target_public_api, { :verbose => true } )
target_db = File.join( target_assets, 'db' )
FileUtils.mkdir_p( target_db )
cp_r( File.join(prebuilt_assets,'db','.'), target_db, { :verbose => true } )
hash = nil
["apps", "db", "lib"].each do |d|
# Calculate hash of directories
hash = get_dir_hash(File.join(target_assets, d), hash)
end
rm File.join(target_assets, "hash")
rm File.join(target_assets, "name")
rm File.join(target_assets, "rho.dat")
Jake.build_file_map(target_assets, "rho.dat")
File.open(File.join(target_assets, "hash"), "w") { |f| f.write(hash.hexdigest) }
File.open(File.join(target_assets, "name"), "w") { |f| f.write($appname) }
print_timestamp('AndroidPrebuild.merge_assets FINISH')
return target_assets
end
def self.merge_resources( prebuilt_res, app_res )
print_timestamp('AndroidPrebuild.merge_resources START')
target_res = app_res + '_merged'
FileUtils.mkdir_p( target_res )
cp_r( File.join(prebuilt_res,'.'), target_res, { :verbose => true } )
rhostrings = File.join(prebuilt_res, "values", "strings.xml")
appstrings = File.join(target_res, "values", "strings.xml")
doc = REXML::Document.new(File.new(rhostrings))
doc.elements["resources/string[@name='app_name']"].text = $appname
File.open(appstrings, "w") { |f| doc.write f }
iconappname = File.join($app_path, "icon", "icon.png")
['drawable', 'drawable-hdpi', 'drawable-mdpi', 'drawable-ldpi'].each do |dpi|
drawable = File.join(target_res, dpi)
iconresname = File.join(drawable, "icon.png")
rm_f iconresname
cp iconappname, iconresname if File.exist? drawable
end
#cp_r( File.join(app_res,'.'), target_res, { :verbose => true } )
print_timestamp('AndroidPrebuild.merge_resources FINISH')
return target_res
end
def self.production_with_prebuild_binary
print_timestamp('AndroidPrebuild.production_with_prebuild_binary START')
Rake::Task['config:android'].invoke
prebuilt_path = determine_prebuild_path($app_config)
bundle_path = make_app_bundle
prebuilt_config = Jake.config(File.open(File.join(prebuilt_path, 'build.yml')))
manifest_path = generate_manifest(prebuilt_path,prebuilt_config,$app_config)
prebuilt_builddir = File.join(bundle_path,'bin','target','android',$confdir)
resources_path = build_resources(prebuilt_builddir)
assets_path = merge_assets( File.join( prebuilt_path,'assets' ), bundle_path )
resources_path = merge_resources( File.join( prebuilt_path,'res' ), resources_path )
underscore_files = get_underscore_files_from_bundle(bundle_path)
#jars = build_java
native_libs_path = get_native_libs_path(prebuilt_path)
output_path = make_output_path
classes_dex = File.join(prebuilt_path,'classes.dex')
make_package(manifest_path,resources_path,assets_path,underscore_files,native_libs_path, classes_dex, output_path)
print_timestamp('AndroidPrebuild.production_with_prebuild_binary FINISH')
end
end
task :production_with_prebuild_binary do
AndroidPrebuild.production_with_prebuild_binary
end
end
end
=begin
namespace 'build' do
namespace 'android' do
def get_bundle_map( stream )
bundle_map = {}
stream.each { |line|
vals = line.split('|')
fpath = vals[0]
type = vals[1]
len = vals[2].to_i
timestamp = vals[3].strip!.to_i
bundle_map[fpath] = { :type => type, :len => len, :timestamp => timestamp }
}
return bundle_map
end
task :repack do
puts 'Repacking final APK with updated bundle'
Rake::Task['config:android'].execute
Rake::Task['build:android:rhobundle'].execute
if File.exists?(resourcepkg) then
puts 'Make diff maps and repack only changed files'
begin
zipfile = Zip::File.open(resourcepkg)
packed_items = {}
zipfile.get_input_stream('assets/RhoBundleMap.txt') do |filelist|
packed_items = get_bundle_map(filelist)
end
zipfile.close
bundle_items = {}
File.open( File.join( $appassets, 'RhoBundleMap.txt' ) ) do |filelist|
bundle_items = get_bundle_map(filelist)
end
remove_items = []
add_items = []
packed_items.each { |key,value| remove_items << key if (!bundle_items.has_key?(key) and value[:type]=='file') }
bundle_items.each { |key,value|
if value!=packed_items[key] and value[:type]=='file' then
add_items << key unless key=='rho.dat'
remove_items << key if packed_items.has_key?(key)
end
}
currentdir = Dir.pwd()
Dir.chdir $appassets+'/..'
unless remove_items.empty? then
args = [ 'remove', resourcepkg ]
remove_items.each { |key|
puts "Removing item from package: #{key}"
args << 'assets/'+key
packed_items.delete(key)
}
Jake.run($aapt, args)
unless $?.success?
raise "Error running AAPT (1)"
end
end
unless add_items.empty? then
args = [ 'add', resourcepkg ]
add_items.each { |key|
puts "Adding item to package: #{key}:#{bundle_items[key]}"
args << 'assets/'+key
}
Jake.run($aapt, args)
unless $?.success?
raise "Error running AAPT (1)"
end
end
has_changes = !(remove_items.empty? and add_items.empty?)
if has_changes then
puts 'Replacing bundle map and commiting package changes'
Jake.run($aapt, ['remove',resourcepkg,'assets/RhoBundleMap.txt','assets/rho.dat'])
unless $?.success?
raise "Error running AAPT (1)"
end
Jake.run($aapt, ['add',resourcepkg,'assets/RhoBundleMap.txt','assets/rho.dat'])
unless $?.success?
raise "Error running AAPT (1)"
end
else
puts "No changes detected in the bundle, do nothing"
end
Dir.chdir currentdir
#zipfile.close
rescue => e
puts "EXCEPTION: #{e.inspect}"
raise e
end
Rake::Task['device:android:production'].execute if has_changes
else
puts 'Pack everything from scratch'
Rake::Task['device:android:production'].invoke
end
end
end
end
=end
|
# import some logic/helpers from lib/*.rb
include NanoBox::Engine
include NanoBox::Output
logtap.print(bullet("Running detect hook..."), 'debug')
# By this point, engine should be set in the registry
# if an engine is specified in the Boxfile
engine = registry('engine')
# If an engine is not already specified, we need to iterate through the
# installed engines calling the "sniff" script until one of them exits with 0
if not engine
logtap.print(bullet('Detecting app language & engine'))
::Dir.glob("#{ENGINE_DIR}/*").select { |f| ::File.directory?(f) }.sort.each do |e|
# once engine is set, we can stop looping
break if engine
# make sure we have a sniff script
next if not ::File.exist? "#{e}/bin/sniff"
# for convenience, we only want the engine name
basename = ::File.basename(e)
# execute 'sniff' to see if we qualify
execute 'sniff' do
command %Q(#{e}/bin/sniff "#{CODE_LIVE_DIR}")
cwd "#{e}/bin"
path GONANO_PATH
user 'gonano'
on_exit { |code| engine = basename if code == 0 }
end
end
if engine
# set the engine in the registry for later use
registry('engine', engine)
else
logtap.print(no_engine)
exit Hookit::Exit::ABORT
end
end
if not engine_id
logtap.print no_enginefile
end
info = engine_info(
engine_id,
enginefile[:name],
enginefile[:language],
enginefile[:generic]
)
logtap.print info
Improve check for Enginefile
# import some logic/helpers from lib/*.rb
include NanoBox::Engine
include NanoBox::Output
logtap.print(bullet("Running detect hook..."), 'debug')
# By this point, engine should be set in the registry
# if an engine is specified in the Boxfile
engine = registry('engine')
# If an engine is not already specified, we need to iterate through the
# installed engines calling the "sniff" script until one of them exits with 0
if not engine
logtap.print(bullet('Detecting app language & engine'))
::Dir.glob("#{ENGINE_DIR}/*").select { |f| ::File.directory?(f) }.sort.each do |e|
# once engine is set, we can stop looping
break if engine
# make sure we have a sniff script
next if not ::File.exist? "#{e}/bin/sniff"
# for convenience, we only want the engine name
basename = ::File.basename(e)
# execute 'sniff' to see if we qualify
execute 'sniff' do
command %Q(#{e}/bin/sniff "#{CODE_LIVE_DIR}")
cwd "#{e}/bin"
path GONANO_PATH
user 'gonano'
on_exit { |code| engine = basename if code == 0 }
end
end
if engine
# set the engine in the registry for later use
registry('engine', engine)
else
logtap.print(no_engine)
exit Hookit::Exit::ABORT
end
end
# Can't use engine_id because that is added by nanobox
# and won't exist in their local dev engine
if not enginefile[:name]
logtap.print no_enginefile
end
info = engine_info(
engine_id,
enginefile[:name],
enginefile[:language],
enginefile[:generic]
)
logtap.print info
|
class PersonDecorator < ApplicationDecorator
decorates_association :team
decorates_association :fire_sport_statistics_person
def full_name
"#{first_name} #{last_name}"
end
def team_name assessment_type=nil
team_assessment_type_name [team.to_s], assessment_type
end
def team_assessment_type_name name, assessment_type
name.push("E") if assessment_type == "single_competitor"
name.push("A") if assessment_type == "out_of_competition"
name.join(" ")
end
def team_shortcut_name assessment_type=nil
team_assessment_type_name [team.shortcut_name], assessment_type
end
def translated_gender
t("gender.#{gender}")
end
def to_s
full_name
end
def self.human_name_cols
["Vorname", "Nachname", "Mannschaft"]
end
def name_cols assessment_type, shortcut
team = shortcut ? team_shortcut_name(assessment_type) : team_name(assessment_type)
[first_name, last_name, team]
end
def translated_youth
youth? ? "Ja" : "Nein"
end
end
Personen ohne Mannschaft korrekt anzeigen
class PersonDecorator < ApplicationDecorator
decorates_association :team
decorates_association :fire_sport_statistics_person
def full_name
"#{first_name} #{last_name}"
end
def team_name assessment_type=nil
team_assessment_type_name [team.to_s], assessment_type
end
def team_assessment_type_name name, assessment_type
name.push("E") if assessment_type == "single_competitor"
name.push("A") if assessment_type == "out_of_competition"
name.join(" ")
end
def team_shortcut_name assessment_type=nil
team_assessment_type_name [team.try(:shortcut_name)], assessment_type
end
def translated_gender
t("gender.#{gender}")
end
def to_s
full_name
end
def self.human_name_cols
["Vorname", "Nachname", "Mannschaft"]
end
def name_cols assessment_type, shortcut
team = shortcut ? team_shortcut_name(assessment_type) : team_name(assessment_type)
[first_name, last_name, team]
end
def translated_youth
youth? ? "Ja" : "Nein"
end
end
|
#!/usr/bin/env ruby
require 'rexml/document'
require 'xcodeproj'
platformDir = Pathname('platforms').join('ios')
plugin_id = Pathname(ENV['CORDOVA_HOOK']).dirname.dirname.dirname.dirname.basename
union_file = Pathname.glob(platformDir.join('*').join('Plugins').join(plugin_id).join('union-Bridging-Header.h'))[0]
puts "Union Header: #{union_file}"
lines = []
Pathname.glob('plugins/*/plugin.xml').each { |xmlFile|
begin
xml = REXML::Document.new(File.open(xmlFile))
xml.elements.each('plugin/platform/bridging-header-file') { |elm|
src_path = xmlFile.dirname.join(elm.attributes['src'])
puts "Appending #{src_path}"
File.readlines(src_path) { |line|
if line.length > 0 then
lines << line
end
}
}
rescue => ex
puts "Error on '#{xmlFile}': #{ex.message}"
end
}
File.open(union_file, "a") { |dst|
dst << lines.uniq.join('\n')
}
def build_settings(project, params)
project.targets.each do |target|
target.build_configurations.each do |conf|
params.each do |key, value|
conf.build_settings[key] = value
end
end
end
end
proj = Pathname.glob(platformDir.join('*.xcodeproj').to_path)[0]
puts "Editing #{proj}"
project = Xcodeproj::Project.open(proj)
project.recreate_user_schemes
build_settings(project,
"OTHER_LDFLAGS" => "\$(inherited)",
"ENABLE_BITCODE" => "NO",
"SWIFT_OBJC_BRIDGING_HEADER" => union_file.relative_path_from(platformDir)
)
project.save
Pathname.join を使ってパスを指定
#!/usr/bin/env ruby
require 'rexml/document'
require 'xcodeproj'
platformDir = Pathname('platforms').join('ios')
plugin_id = Pathname(ENV['CORDOVA_HOOK']).dirname.dirname.dirname.dirname.basename
lines = []
Pathname.glob(Pathname('plugins').join('*').join('plugin.xml')).each { |xmlFile|
begin
xml = REXML::Document.new(File.open(xmlFile))
xml.elements.each('plugin/platform/bridging-header-file') { |elm|
src_path = xmlFile.dirname.join(elm.attributes['src'])
puts "Appending #{src_path}"
File.readlines(src_path) { |line|
if !(line.empty?) then
lines << line
end
}
}
rescue => ex
puts "Error on '#{xmlFile}': #{ex.message}"
end
}
union_file = Pathname.glob(platformDir.join('*').join('Plugins').join(plugin_id).join('union-Bridging-Header.h'))[0]
puts "Union Header: #{union_file}: #{lines}"
File.open(union_file, "a") { |dst|
dst << lines.uniq.join('\n')
}
def build_settings(project, params)
project.targets.each do |target|
target.build_configurations.each do |conf|
params.each do |key, value|
conf.build_settings[key] = value
end
end
end
end
proj = Pathname.glob(platformDir.join('*.xcodeproj').to_path)[0]
puts "Editing #{proj}"
project = Xcodeproj::Project.open(proj)
project.recreate_user_schemes
build_settings(project,
"OTHER_LDFLAGS" => "\$(inherited)",
"ENABLE_BITCODE" => "NO",
"SWIFT_OBJC_BRIDGING_HEADER" => union_file.relative_path_from(platformDir)
)
project.save
|
# coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "settings-manager/version"
Gem::Specification.new do |spec|
spec.name = "rails-settings-manager"
spec.version = SettingsManager::VERSION
spec.authors = ["Florian Nitschmann"]
spec.email = ["f.nitschmann@googlemail.com"]
spec.homepage = "https://github.com/fnitschmann/rails-settings-manager"
spec.license = "MIT"
spec.summary = ""
spec.description = %q{TODO: Write a longer description or delete this line.}
spec.files = Dir.glob("lib/**/*") + ["README.md", "LICENSE.txt"]
spec.require_paths = ["lib"]
spec.required_ruby_version = ">= 2.1.0"
spec.add_dependency "rails", "~> 4.2.0"
spec.add_development_dependency "bundler", "~> 1.10"
spec.add_development_dependency "rspec"
spec.add_development_dependency "pry-rails"
end
descriptions for .gemspec
# coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "settings-manager/version"
Gem::Specification.new do |spec|
spec.name = "rails-settings-manager"
spec.version = SettingsManager::VERSION
spec.authors = ["Florian Nitschmann"]
spec.email = ["f.nitschmann@googlemail.com"]
spec.homepage = "https://github.com/fnitschmann/rails-settings-manager"
spec.license = "MIT"
spec.summary = "Global settings management for Rails applications with ActiveRecord"
spec.description = """
A simple extension-plugin for Ruby on Rails application for global settings management in the Database with an easy key-value interface.
It keeps track of the settings with the help of standard ActiveRecord methods.
"""
spec.files = Dir.glob("lib/**/*") + ["README.md", "LICENSE.txt"]
spec.require_paths = ["lib"]
spec.required_ruby_version = ">= 2.1.0"
spec.add_dependency "rails", "~> 4.2.0"
spec.add_development_dependency "bundler", "~> 1.10"
spec.add_development_dependency "rspec"
spec.add_development_dependency "pry-rails"
end
|
module DoorRandomizer
class NotAllRoomsAreConnectedError < StandardError ; end
def randomize_transition_doors
queued_door_changes = Hash.new{|h, k| h[k] = {}}
game.areas.each do |area|
all_area_transition_rooms = @transition_rooms.select do |transition_room|
transition_room.area_index == area.area_index
end
if all_area_transition_rooms.size <= 2
# Not enough transition rooms in this area to properly randomize it. Need at least 3.
next
end
all_area_subsectors = []
area.sectors.each do |sector|
subsectors = get_subsectors(sector, include_transitions: true, use_subrooms: false)
all_area_subsectors += subsectors
end
remaining_transitions = {
left: [],
right: [],
}
other_transitions_in_same_subsector = {}
accessible_unused_transitions = []
transition_doors_by_subsector = Array.new(all_area_subsectors.size){ [] }
starting_transition = nil
# First we make a list of the transition doors, specifically the left door in a transition room, and the right door that leads into that transition room.
all_area_transition_rooms.each do |transition_room|
if GAME == "por" && transition_room.room_str == "00-01-01"
# The first transition room between the outside and inside parts of the Entrance.
# Don't randomize this connection, so the shop and Wind are always close to the start of the game.
next
end
if GAME == "por" && transition_room.room_str == "00-0A-00"
# The transition room leading to the Throne Room behind the barrier.
# Don't randomize this connection, always have the Throne Room behind the barrier.
next
end
if GAME == "ooe" && ["00-0A-00", "00-0A-07", "00-0A-13"].include?(transition_room.room_str)
# The transition rooms in the Final Approach.
# Don't randomize these connections, since it could result in progression being impossible.
next
end
transition_door = transition_room.doors.find{|door| door.direction == :left}
dest_door = transition_door.destination_door
remaining_transitions[transition_door.direction] << transition_door
remaining_transitions[dest_door.direction] << dest_door
end
# Then we go through each transition door and keep track of what subsector it's located in.
remaining_transitions.values.flatten.each do |transition_door|
if transition_door.direction == :right
# The door leading right into a transition room.
# This is part of the sector proper, so we just use this room itself to detect the proper subsector.
room_in_desired_subsector = transition_door.room
else
# The door leading left out of the transition room.
# We want the subsector to the right. But since this is in the transition room, we have no idea what subsector the transition room itself is in.
# So follow the right door out of the transition room, and use the room there to detect the proper subsector.
room_in_desired_subsector = transition_door.room.doors.find{|d| d.direction == :right}.destination_room
end
all_area_subsectors.each_with_index do |subsector_rooms, subsector_index|
if subsector_rooms.include?(room_in_desired_subsector)
transition_doors_by_subsector[subsector_index] << transition_door
other_transitions_in_same_subsector[transition_door] = transition_doors_by_subsector[subsector_index]
break
end
end
if other_transitions_in_same_subsector[transition_door].nil?
puts all_area_subsectors.flatten.map{|x| x.room_str}
raise "#{transition_door.door_str} can't be found in any subsector"
end
end
#other_transitions_in_same_subsector.each do |k, v|
# puts "#{k.door_str}: #{v.map{|d| d.door_str}.join(", ")}"
#end
starting_transition = remaining_transitions.values.flatten.sample(random: rng)
on_first = true
while true
debug = false
#debug = (area.area_index == 5)
if on_first
inside_transition_door = starting_transition
on_first = false
else
inside_transition_door = accessible_unused_transitions.sample(random: rng)
end
puts "(area connections) inside door: #{inside_transition_door.door_str}" if debug
inside_door_opposite_direction = case inside_transition_door.direction
when :left
:right
when :right
:left
end
inaccessible_remaining_matching_doors = remaining_transitions[inside_door_opposite_direction] - accessible_unused_transitions
inaccessible_remaining_matching_doors -= other_transitions_in_same_subsector[inside_transition_door]
inaccessible_remaining_matching_doors_with_other_exits = inaccessible_remaining_matching_doors.select do |door|
new_subsector_exits = (other_transitions_in_same_subsector[door] & remaining_transitions.values.flatten) - [door]
new_subsector_exits.any?
end
if inaccessible_remaining_matching_doors_with_other_exits.any?
# There are doors we can swap with that allow more progress to new subsectors.
possible_dest_doors = inaccessible_remaining_matching_doors_with_other_exits
puts "TRANSITION TYPE 1" if debug
elsif inaccessible_remaining_matching_doors.any?
# There are doors we can swap with that will allow you to reach one new subsector which is a dead end.
possible_dest_doors = inaccessible_remaining_matching_doors
puts "TRANSITION TYPE 2" if debug
elsif remaining_transitions[inside_door_opposite_direction].any?
# This door direction doesn't have any more matching doors left to swap with that will result in progress.
# So just pick any matching door.
possible_dest_doors = remaining_transitions[inside_door_opposite_direction]
puts "TRANSITION TYPE 3" if debug
else
# This door direction doesn't have any matching doors left.
puts "TRANSITION TYPE 4" if debug
raise "Area connections randomizer: Could not link all subsectors!"
end
outside_transition_door = possible_dest_doors.sample(random: rng)
puts "(area connections) outside door: #{outside_transition_door.door_str}" if debug
remaining_transitions[inside_transition_door.direction].delete(inside_transition_door)
remaining_transitions[outside_transition_door.direction].delete(outside_transition_door)
if queued_door_changes[inside_transition_door].any?
puts "changed inside transition door twice: #{inside_transition_door.door_str}"
raise "Changed a transition door twice"
end
if queued_door_changes[outside_transition_door].any?
puts "changed outside transition door twice: #{outside_transition_door.door_str}"
raise "Changed a transition door twice"
end
queued_door_changes[inside_transition_door]["destination_room_metadata_ram_pointer"] = outside_transition_door.room.room_metadata_ram_pointer
queued_door_changes[inside_transition_door]["dest_x"] = outside_transition_door.destination_door.dest_x
queued_door_changes[inside_transition_door]["dest_y"] = outside_transition_door.destination_door.dest_y
queued_door_changes[outside_transition_door]["destination_room_metadata_ram_pointer"] = inside_transition_door.room.room_metadata_ram_pointer
queued_door_changes[outside_transition_door]["dest_x"] = inside_transition_door.destination_door.dest_x
queued_door_changes[outside_transition_door]["dest_y"] = inside_transition_door.destination_door.dest_y
#puts "accessible_unused_transitions before: #{accessible_unused_transitions.map{|d| d.door_str}}"
accessible_unused_transitions.delete(inside_transition_door)
accessible_unused_transitions.delete(outside_transition_door)
accessible_unused_transitions += (other_transitions_in_same_subsector[inside_transition_door] & remaining_transitions.values.flatten)
accessible_unused_transitions += (other_transitions_in_same_subsector[outside_transition_door] & remaining_transitions.values.flatten)
accessible_unused_transitions.uniq!
#puts "accessible_unused_transitions after: #{accessible_unused_transitions.map{|d| d.door_str}}"
if accessible_unused_transitions.empty?
if remaining_transitions.values.flatten.size == 0
break
else
raise "Area connections randomizer: Not all sectors connected: #{remaining_transitions.values.flatten.map{|door| door.door_str}}"
end
end
end
end
doors_to_line_up = []
queued_door_changes.each do |door, changes|
changes.each do |attribute_name, new_value|
door.send("#{attribute_name}=", new_value)
end
unless doors_to_line_up.include?(door.destination_door)
doors_to_line_up << door
end
door.write_to_rom()
end
lined_up_door_strs = []
doors_to_line_up.each do |door|
next if lined_up_door_strs.include?(door.destination_door.door_str)
lined_up_door_strs << door.door_str
line_up_door(door)
end
end
def randomize_non_transition_doors
# We make sure every room in an area is accessible. This is to prevent infinite loops of a small number of rooms that connect to each other with no way to progress.
# Loop through each room. search for remaining rooms that have a matching door. But the room we find must also have remaining doors in it besides the one we swap with so it's not a dead end, or a loop. If there are no rooms that meet those conditions, then we go with the more lax condition of just having a matching door, allowing dead ends.
# Make a list of doors that lead into transition rooms so we can tell these apart from regular doors.
transition_doors = []
@transition_rooms.each do |room|
room.doors.each do |inside_door|
transition_doors << inside_door.destination_door
end
end
@randomize_up_down_doors = true
queued_door_changes = Hash.new{|h, k| h[k] = {}}
game.areas.each do |area|
if GAME == "ooe" && area.area_index == 2
# Don't randomize Ecclesia.
next
end
area.sectors.each do |sector|
if GAME == "ooe" && area.area_index == 7 && sector.sector_index == 1
# Don't randomize Rusalka's sector. It's too small to do anything with properly.
next
end
# First get the "subsectors" in this sector.
# A subsector is a group of rooms in a sector that can access each other.
# This separates certains sectors into multiple parts like the first sector of PoR.
subsectors = get_subsectors(sector)
redo_counts_for_subsector = Hash.new(0)
subsectors.each_with_index do |subsector_rooms, subsector_index|
orig_queued_door_changes = queued_door_changes.dup
begin
randomize_non_transition_doors_for_subsector(subsector_rooms, subsector_index, area, sector, queued_door_changes, transition_doors)
rescue NotAllRoomsAreConnectedError => e
redo_counts_for_subsector[subsector_index] += 1
if redo_counts_for_subsector[subsector_index] > @max_room_rando_subsector_redos
raise "Bug: Door randomizer failed to connect all rooms in subsector #{subsector_index} in %02X-%02X more than #{@max_room_rando_subsector_redos} times" % [area.area_index, sector.sector_index]
end
puts "Door randomizer needed to redo subsector #{subsector_index} in %02X-%02X" % [area.area_index, sector.sector_index]
queued_door_changes = orig_queued_door_changes
redo
end
end
end
end
doors_to_line_up = []
queued_door_changes.each do |door, changes|
changes.each do |attribute_name, new_value|
door.send("#{attribute_name}=", new_value)
end
unless doors_to_line_up.include?(door.destination_door)
doors_to_line_up << door
end
door.write_to_rom()
end
lined_up_door_strs = []
doors_to_line_up.each do |door|
next if lined_up_door_strs.include?(door.destination_door.door_str)
lined_up_door_strs << door.door_str
line_up_door(door)
end
replace_outer_boss_doors()
center_bosses_for_room_rando()
end
def randomize_non_transition_doors_for_subsector(subsector_rooms, subsector_index, area, sector, queued_door_changes, transition_doors)
if GAME == "por" && area.area_index == 0 && sector.sector_index == 0 && subsector_index == 0
# Don't randomize first subsector in PoR.
return
end
if GAME == "por" && area.area_index == 5 && sector.sector_index == 2 && subsector_index == 0
# Don't randomize the middle sector in Nation of Fools with Legion.
# The randomizer never connects all the rooms properly, and Legion further complicates things anyway, so don't bother.
return
end
prioritize_up_down = true
if GAME == "por" && area.area_index == 6 && sector.sector_index == 0 && [1, 3].include?(subsector_index)
# The center-left and center-right parts of Burnt Paradise. These only have a few left/right doors, so it screws up if we prioritize up/down doors.
prioritize_up_down = false
end
#if sector.sector_index == 2
# puts "On subsector: #{subsector_index}"
# puts "Subsector rooms:"
# subsector_rooms.each do |room|
# puts " %08X" % room.room_metadata_ram_pointer
# end
#end
remaining_doors = get_valid_doors(subsector_rooms, sector)
if remaining_doors[:left].size != remaining_doors[:right].size
raise "Subsector #{subsector_index} of %02X-%02X has an unmatching number of left/right doors!\nleft: #{remaining_doors[:left].size}, right: #{remaining_doors[:right].size}, up: #{remaining_doors[:up].size}, down: #{remaining_doors[:down].size}," % [area.area_index, sector.sector_index]
end
if remaining_doors[:up].size != remaining_doors[:down].size
raise "Subsector #{subsector_index} of %02X-%02X has an unmatching number of up/down doors!\nleft: #{remaining_doors[:left].size}, right: #{remaining_doors[:right].size}, up: #{remaining_doors[:up].size}, down: #{remaining_doors[:down].size}," % [area.area_index, sector.sector_index]
end
#if sector.sector_index == 1
# remaining_doors.values.flatten.each do |door|
# puts " #{door.door_str}"
# end
# puts "num doors: #{remaining_doors.values.flatten.size}"
# gets
#end
all_randomizable_doors = remaining_doors.values.flatten
all_rooms = all_randomizable_doors.map{|door| door.room}.uniq
if all_rooms.empty?
# No doors in this sector
return
end
unvisited_rooms = all_rooms.dup
accessible_remaining_doors = []
current_room = unvisited_rooms.sample(random: rng)
while true
debug = false
#debug = (area.area_index == 0x6 && sector.sector_index == 0 && subsector_index == 1)
#puts remaining_doors[:down].map{|d| d.door_str} if debug
#gets if debug
puts "on room #{current_room.room_str}" if debug
unvisited_rooms.delete(current_room)
accessible_remaining_doors += remaining_doors.values.flatten.select{|door| door.room == current_room}
accessible_remaining_doors.uniq!
accessible_remaining_doors = accessible_remaining_doors & remaining_doors.values.flatten
if accessible_remaining_doors.empty?
break
end
accessible_remaining_updown_doors = accessible_remaining_doors.select{|door| [:up, :down].include?(door.direction)}
if accessible_remaining_updown_doors.any?
# Always prioritize doing up and down doors first.
inside_door = accessible_remaining_updown_doors.sample(random: rng)
else
inside_door = accessible_remaining_doors.sample(random: rng)
end
remaining_doors[inside_door.direction].delete(inside_door)
accessible_remaining_doors.delete(inside_door)
accessible_remaining_leftright_doors = accessible_remaining_doors.select{|door| [:left, :right].include?(door.direction)}
puts "inside door chosen: #{inside_door.door_str}" if debug
inside_door_opposite_direction = case inside_door.direction
when :left
:right
when :right
:left
when :up
:down
when :down
:up
end
inaccessible_remaining_matching_doors = remaining_doors[inside_door_opposite_direction] - accessible_remaining_doors
#puts "REMAINING: #{remaining_doors[inside_door_opposite_direction].map{|x| " #{x.door_str}\n"}}"
inaccessible_remaining_matching_doors_with_other_exits = inaccessible_remaining_matching_doors.select do |door|
((door.room.doors & all_randomizable_doors) - transition_doors).length > 1 && unvisited_rooms.include?(door.room)
end
inaccessible_remaining_matching_doors_with_updown_door_exits_via_leftright = []
inaccessible_remaining_matching_doors_with_no_leftright_door_exits = []
inaccessible_remaining_matching_doors_with_no_leftright_door_exits_and_other_exits = []
if [:left, :right].include?(inside_door.direction)
# If we're on a left/right door, prioritize going to new rooms that have an up/down door so we don't get locked out of having any up/down doors to work with.
inaccessible_remaining_matching_doors_with_updown_door_exits_via_leftright = inaccessible_remaining_matching_doors_with_other_exits.select do |door|
if door.direction == :left || door.direction == :right
((door.room.doors & all_randomizable_doors) - transition_doors).any?{|x| x.direction == :up || x.direction == :down}
end
end
else
# If we're on an up/down door, prioritize going to new rooms that DON'T have any usable left/right doors in them.
# This is because those rooms with left/right doors are more easily accessible via left/right doors. We need to prioritize the ones that only have up/down doors as they're trickier to make the logic place.
inaccessible_remaining_matching_doors_with_no_leftright_door_exits = inaccessible_remaining_matching_doors.select do |door|
if door.direction == :up || door.direction == :down
((door.room.doors & all_randomizable_doors) - transition_doors).none?{|x| x.direction == :left || x.direction == :right}
end
end
inaccessible_remaining_matching_doors_with_no_leftright_door_exits_and_other_exits = inaccessible_remaining_matching_doors_with_other_exits.select do |door|
if door.direction == :up || door.direction == :down
((door.room.doors & all_randomizable_doors) - transition_doors).none?{|x| x.direction == :left || x.direction == :right}
end
end
if debug && inaccessible_remaining_matching_doors_with_no_leftright_door_exits.any?
puts "Found up/down doors with no left/right exits in destination:"
inaccessible_remaining_matching_doors_with_no_leftright_door_exits.each{|x| puts " #{x.door_str}"}
end
end
remaining_inaccessible_rooms_with_up_down_doors = (remaining_doors[:up] + remaining_doors[:down] - accessible_remaining_doors).map{|d| d.room}.uniq
if inaccessible_remaining_matching_doors_with_no_leftright_door_exits_and_other_exits.any? && remaining_inaccessible_rooms_with_up_down_doors.size > 1 && prioritize_up_down
# There are doors we can swap with that allow you to reach a new room which allows more progress, but only via up/down doors.
# We want to prioritize these because they can't be gotten into via left/right doors like rooms that have at least one left/right.
possible_dest_doors = inaccessible_remaining_matching_doors_with_no_leftright_door_exits_and_other_exits
puts "TYPE 1" if debug
elsif inaccessible_remaining_matching_doors_with_no_leftright_door_exits.any? && accessible_remaining_leftright_doors.size >= 1
# There are doors we can swap with that allow you to reach a new room which is a dead end, but is a dead end with only up/down doors.
# We want to prioritize these because they can't be gotten into via left/right doors like rooms that have at least one left/right.
# Note that we also only take this option if there's at least 1 accessible left/right door for us to still use. If there's not this would deadend us instantly.
possible_dest_doors = inaccessible_remaining_matching_doors_with_no_leftright_door_exits
puts "TYPE 2" if debug
elsif inaccessible_remaining_matching_doors_with_updown_door_exits_via_leftright.any? && remaining_inaccessible_rooms_with_up_down_doors.size > 1 && prioritize_up_down
# There are doors we can swap with that allow more progress, and also allow accessing a new up/down door from a left/right door.
possible_dest_doors = inaccessible_remaining_matching_doors_with_updown_door_exits_via_leftright
puts "TYPE 3" if debug
elsif inaccessible_remaining_matching_doors_with_other_exits.any?
# There are doors we can swap with that allow more progress.
possible_dest_doors = inaccessible_remaining_matching_doors_with_other_exits
puts "TYPE 4" if debug
elsif inaccessible_remaining_matching_doors.any?
# There are doors we can swap with that will allow you to reach one new room which is a dead end.
possible_dest_doors = inaccessible_remaining_matching_doors
puts "TYPE 5" if debug
elsif remaining_doors[inside_door_opposite_direction].any?
# This door direction doesn't have any more matching doors left to swap with that will result in progress.
# So just pick any matching door.
possible_dest_doors = remaining_doors[inside_door_opposite_direction]
puts "TYPE 6" if debug
else
# This door direction doesn't have any matching doors left.
# Don't do anything to this door.
puts "TYPE 7" if debug
#puts "#{inside_door.direction} empty"
#
#accessible_rooms = accessible_remaining_doors.map{|door| door.room}.uniq
#accessible_rooms -= [current_room]
#
#current_room = accessible_rooms.sample(random: rng)
#p accessible_remaining_doors.size
#gets
raise "No remaining matching doors to connect to! Door #{inside_door.door_str}, subsector #{subsector_index} of %02X-%02X" % [area.area_index, sector.sector_index]
current_room = unvisited_rooms.sample(random: rng)
if current_room.nil?
current_room = all_rooms.sample(random: rng)
end
if remaining_doors.values.flatten.empty?
break
end
next
end
if !@randomize_up_down_doors && [:up, :down].include?(inside_door.direction)
# Don't randomize up/down doors. This is a temporary hacky measure to greatly reduce failures at connecting all rooms in a subsector.
new_dest_door = inside_door.destination_door
# Also need to convert this door to a subroomdoor, if applicable.
new_dest_door = all_randomizable_doors.find{|subroomdoor| subroomdoor.door_str == new_dest_door.door_str}
else
new_dest_door = possible_dest_doors.sample(random: rng)
end
remaining_doors[new_dest_door.direction].delete(new_dest_door)
current_room = new_dest_door.room
if queued_door_changes[inside_door].any? || queued_door_changes[new_dest_door].any?
raise "Changed a door twice"
end
queued_door_changes[inside_door]["destination_room_metadata_ram_pointer"] = new_dest_door.room.room_metadata_ram_pointer
queued_door_changes[inside_door]["dest_x"] = new_dest_door.destination_door.dest_x
queued_door_changes[inside_door]["dest_y"] = new_dest_door.destination_door.dest_y
queued_door_changes[new_dest_door]["destination_room_metadata_ram_pointer"] = inside_door.room.room_metadata_ram_pointer
queued_door_changes[new_dest_door]["dest_x"] = inside_door.destination_door.dest_x
queued_door_changes[new_dest_door]["dest_y"] = inside_door.destination_door.dest_y
if debug
puts "inside_door: #{inside_door.door_str}"
#puts "old_outside_door: %08X" % old_outside_door.door_ram_pointer
#puts "inside_door_to_swap_with: %08X" % inside_door_to_swap_with.door_ram_pointer
puts "new_outside_door: #{new_dest_door.door_str}"
puts "dest room: #{new_dest_door.room.room_str}"
puts
#break
end
end
if unvisited_rooms.any?
puts "Failed to connect the following rooms:"
unvisited_rooms.each do |room|
puts " #{room.room_str}"
end
raise NotAllRoomsAreConnectedError.new("Room connections randomizer failed to connect some rooms.")
end
end
def get_subsectors(sector, include_transitions: false, use_subrooms: true)
subsectors = []
debug = false
#debug = (sector.area_index == 5 && sector.sector_index == 2)
transition_room_strs = @transition_rooms.map{|room| room.room_str}
if options[:randomize_rooms_map_friendly]
room_strs_unused_by_map_rando = @rooms_unused_by_map_rando.map{|room| room.room_str}
end
if room_rando? && use_subrooms
# First convert the rooms to subrooms.
sector_subrooms = checker.convert_rooms_to_subrooms(sector.rooms)
else
sector_subrooms = sector.rooms
end
remaining_rooms_to_check = sector_subrooms.dup
remaining_rooms_to_check -= @transition_rooms unless include_transitions
while remaining_rooms_to_check.any?
current_subsector = []
puts "STARTING NEW SUBSECTOR" if debug
current_room = remaining_rooms_to_check.first
while true
puts "Current room: #{current_room.room_str}" if debug
remaining_rooms_to_check.delete(current_room)
if options[:randomize_rooms_map_friendly] && room_strs_unused_by_map_rando.include?(current_room.room_str)
# Skip rooms not used by the map friendly room randomizer.
remaining_subsector_rooms = current_subsector & remaining_rooms_to_check
break if remaining_subsector_rooms.empty?
current_room = remaining_subsector_rooms.first
next
end
current_room_doors = current_room.doors.reject{|door| checker.inaccessible_doors.include?(door.door_str)}
current_room_doors = current_room_doors.reject{|door| door.destination_room_metadata_ram_pointer == 0} # Door dummied out by the map-friendly room randomizer.
if current_room_doors.empty?
# Unused room with no door. Don't add it to the list of rooms in the subsector.
remaining_subsector_rooms = current_subsector & remaining_rooms_to_check
break if remaining_subsector_rooms.empty?
current_room = remaining_subsector_rooms.first
next
end
current_subsector << current_room
connected_dest_door_strs = current_room_doors.map{|door| door.destination_door.door_str}
connected_rooms = sector_subrooms.select do |room|
(room.doors.map{|d| d.door_str} & connected_dest_door_strs).any?
end
if GAME == "dos" && current_room.sector.name == "Condemned Tower & Mine of Judgment"
# Need to split Condemned Tower from Mine of Judgement into separate subsectors.
if current_room.room_ypos_on_map >= 0x17
# Current subsector is Mine of Judgement, so remove Condemned Tower rooms.
connected_rooms.reject!{|room| room.room_ypos_on_map < 0x17}
else
# Current subsector is Condemned Tower, so remove Mine of Judgement rooms.
connected_rooms.reject!{|room| room.room_ypos_on_map >= 0x17}
end
end
unless include_transitions
connected_rooms.reject!{|connected_room| transition_room_strs.include?(connected_room.room_str)}
end
current_subsector += connected_rooms
current_subsector.uniq!
puts "Current subsector so far: #{current_subsector.map{|room| room.room_str}}" if debug
puts "Remaining rooms to check: #{remaining_rooms_to_check.map{|room| room.room_str}}" if debug
puts "A: #{current_subsector.map{|x| x.class.to_s}.join(", ")}" if debug
puts "B: #{remaining_rooms_to_check.map{|x| x.class.to_s}.join(", ")}" if debug
remaining_subsector_rooms = current_subsector & remaining_rooms_to_check
break if remaining_subsector_rooms.empty?
current_room = remaining_subsector_rooms.first
end
subsectors += [current_subsector]
end
return subsectors
end
def get_valid_doors(rooms, sector)
remaining_doors = {
left: [],
up: [],
right: [],
down: []
}
map = game.get_map(sector.area_index, sector.sector_index)
rooms.each do |room|
next if @transition_rooms.include?(room)
room.doors.each do |door|
next if @transition_rooms.include?(door.destination_door.room)
next if checker.inaccessible_doors.include?(door.door_str)
if GAME == "dos" && ["00-01-1C_001", "00-01-20_000"].include?(door.door_str)
# Don't randomize the door connecting Paranoia and Mini-Paranoia.
next
end
if GAME == "dos" && ["00-05-0C_001", "00-05-18_000"].include?(door.door_str)
# Don't randomize the up/down door connecting Condemned Tower and Mine of Judgement.
next
end
if GAME == "por" && ["00-01-04_001", "00-01-03_005", "00-01-03_000", "00-01-18_000", "00-01-03_004", "00-01-06_000", "00-01-06_001", "00-01-09_000"].include?(door.door_str)
# Don't randomize the rooms around the Entrance hub (warp room, Wind, shop, etc).
next
end
if GAME == "ooe" && ["0C-00-0E_000", "0C-00-0F_000", "0C-00-0F_001", "0C-00-10_000"].include?(door.door_str)
# Don't randomize the doors in Large Cavern connecting the warp room, one-way room, and boss room.
next
end
map_tile_x_pos = room.room_xpos_on_map
map_tile_y_pos = room.room_ypos_on_map
if door.x_pos == 0xFF
# Do nothing
elsif door.x_pos >= room.main_layer_width
map_tile_x_pos += room.main_layer_width - 1
else
map_tile_x_pos += door.x_pos
end
if door.y_pos == 0xFF
# Do nothing
elsif door.y_pos >= room.main_layer_height
map_tile_y_pos += room.main_layer_height - 1
else
map_tile_y_pos += door.y_pos
end
map_tile = map.tiles.find{|tile| tile.x_pos == map_tile_x_pos && tile.y_pos == map_tile_y_pos}
if map_tile.nil?
# Door that's not on the map, just an unused door.
next
end
# If the door is shown on the map as a wall, skip it.
# Those are leftover doors not intended to be used, and are inaccessible (except with warp glitches).
case door.direction
when :left
next if map_tile.left_wall
when :right
next if map_tile.right_wall
if GAME == "dos" || GAME == "aos"
# Right walls in DoS are handled as the left wall of the tile to the right.
right_map_tile = map.tiles.find{|tile| tile.x_pos == map_tile_x_pos+1 && tile.y_pos == map_tile_y_pos}
next if right_map_tile.left_wall
end
when :up
next if map_tile.top_wall
when :down
next if map_tile.bottom_wall
if GAME == "dos" || GAME == "aos"
# Bottom walls in DoS are handled as the top wall of the tile below.
below_map_tile = map.tiles.find{|tile| tile.x_pos == map_tile_x_pos && tile.y_pos == map_tile_y_pos+1}
next if below_map_tile.top_wall
end
end
remaining_doors[door.direction] << door
end
end
return remaining_doors
end
def remove_door_blockers
obj_subtypes_to_remove = case GAME
when "dos"
[0x43, 0x44, 0x46, 0x57, 0x1E, 0x2B, 0x26, 0x2A, 0x29, 0x45, 0x24, 0x37, 0x04]
when "por"
[0x37, 0x30, 0x89, 0x38, 0x2F, 0x36, 0x32, 0x31, 0x88, 0x26, 0x46, 0x41, 0xB2, 0xB3, 0x2E, 0x40, 0x83]
when "ooe"
[0x5B, 0x5A, 0x59]
end
breakable_wall_subtype = case GAME
when "dos"
nil # All breakable walls are path blocking in DoS, so they're instead included in obj_subtypes_to_remove.
when "por"
0x3B
when "ooe"
0x5C
end
game.each_room do |room|
room.entities.each do |entity|
if entity.is_special_object? && obj_subtypes_to_remove.include?(entity.subtype)
entity.type = 0
entity.write_to_rom()
elsif entity.is_special_object? && entity.subtype == breakable_wall_subtype
case GAME
when "por"
PATH_BLOCKING_BREAKABLE_WALLS.each do |wall_data|
if entity.var_a == wall_data[:var_a] && entity.room.area_index == wall_data[:area_index] && (entity.room.area_index != 0 || entity.room.sector_index == wall_data[:sector_index])
entity.type = 0
entity.write_to_rom()
break
end
end
when "ooe"
PATH_BLOCKING_BREAKABLE_WALLS.each do |wall_vars|
if entity.var_a == wall_vars[:var_a] && entity.var_b == wall_vars[:var_b]
entity.type = 0
entity.write_to_rom()
break
end
end
end
end
end
end
case GAME
when "dos"
# Remove the water from the drawbridge room.
drawbridge_room_waterlevel = game.entity_by_str("00-00-15_04")
drawbridge_room_waterlevel.type = 0
drawbridge_room_waterlevel.write_to_rom()
# Remove the cutscene with Yoko before Flying Armor since it doesn't work properly if you don't have Yoko with you.
pre_flying_armor_room = game.room_by_str("00-00-0E")
[9, 0xA, 0xB].each do |entity_index|
entity = pre_flying_armor_room.entities[entity_index]
entity.type = 0
entity.write_to_rom()
end
# Remove the cutscene with Dmitrii because it doesn't work properly when you enter from the left side.
dmitrii_room = game.room_by_str("00-04-10")
[3, 4, 6, 7].each do |entity_index|
entity = dmitrii_room.entities[entity_index]
entity.type = 0
entity.write_to_rom()
end
# And change Dmitrii to boss rush Dmitrii so he doesn't crash when there's no event.
dmitrii = dmitrii_room.entities[5]
dmitrii.var_a = 0
dmitrii.write_to_rom()
# Remove the cutscene with Dario because it doesn't work properly when you enter from the left side.
dario_room = game.room_by_str("00-03-0B")
[2, 3, 4, 6].each do |entity_index|
entity = dario_room.entities[entity_index]
entity.type = 0
entity.write_to_rom()
end
when "por"
# The whole drill cart puzzle from City of Haze was removed.
# So let's also remove the entity hider so that the enemies from the alternate game modes appear.
drill_room_entity_hider = game.entity_by_str("01-01-04_02")
drill_room_entity_hider.type = 0
drill_room_entity_hider.write_to_rom()
when "ooe"
# Remove the small gate in Forsaken Cloiser, but not the big gate in Final Approach.
forsaken_cloister_gate = game.entity_by_str("00-09-06_00")
forsaken_cloister_gate.type = 0
forsaken_cloister_gate.write_to_rom()
end
# Remove breakable walls from the map as well so it matches visually with the level design.
maps = []
if GAME == "dos"
maps << game.get_map(0, 0)
maps << game.get_map(0, 0xA)
else
AREA_INDEX_TO_OVERLAY_INDEX.keys.each do |area_index|
maps << game.get_map(area_index, 0)
end
end
maps.each do |map|
map.tiles.each do |tile|
if tile.left_secret
tile.left_secret = false
tile.left_door = true
end
if tile.top_secret
tile.top_secret = false
tile.top_door = true
end
if tile.right_secret
tile.right_secret = false
tile.right_door = true
end
if tile.bottom_secret
tile.bottom_secret = false
tile.bottom_door = true
end
end
map.write_to_rom()
end
end
def replace_outer_boss_doors
boss_rooms = []
if GAME == "dos"
boss_rooms << game.room_by_str("00-03-0E") # Doppelganger event room
end
# Make a list of boss rooms to fix the boss doors for.
game.each_room do |room|
next if room.area.name == "Ecclesia"
next if room.area.name == "Nest of Evil"
next if room.area.name == "Large Cavern"
next if room.area.name == "Unused Boss Rush"
if options[:randomize_rooms_map_friendly] && @rooms_unused_by_map_rando.include?(room)
# Skip rooms not used by the map friendly room randomizer.
next
end
room.entities.each do |entity|
if entity.is_boss_door? && entity.var_a == 0
# Boss door outside a boss room. Remove it.
entity.type = 0
entity.write_to_rom()
end
if entity.is_boss_door? && entity.var_a != 0
boss_rooms << room
end
end
end
# Replace boss doors.
boss_rooms.uniq.each do |boss_room|
if GAME == "dos" && boss_room.room_str == "00-03-0E"
# Doppelganger event room
boss_index = 0xE
num_boss_doors_on_each_side = 2
else
boss_index = boss_room.entities.find{|e| e.is_boss_door?}.var_b
num_boss_doors_on_each_side = 1
end
doors = boss_room.doors
if GAME == "dos" && boss_index == 7 # Gergoth
doors = doors[0..1] # Only do the top two doors in the tower.
end
doors.each do |door|
next unless [:left, :right].include?(door.direction)
next if checker.inaccessible_doors.include?(door.door_str)
dest_door = door.destination_door
dest_room = dest_door.room
# Don't add a boss door when two boss rooms are connected to each other, that would result in overlapping boss doors.
if boss_rooms.include?(dest_room)
if GAME == "dos" && dest_room.room_str == "00-05-07"
# Special exception for Gergoth's boss room connecting back in on another lower part of his tower.
# We do add the boss door in this case.
else
next
end
end
gap_start_index, gap_end_index, tiles_in_biggest_gap = get_biggest_door_gap(dest_door)
gap_end_offset = gap_end_index * 0x10 + 0x10
num_boss_doors_on_each_side.times do |dup_boss_door_num|
new_boss_door = Entity.new(dest_room, game.fs)
new_boss_door.x_pos = door.dest_x
new_boss_door.y_pos = door.dest_y + gap_end_offset
if door.direction == :left
new_boss_door.x_pos += 0xF0 - dup_boss_door_num*0x10
else
new_boss_door.x_pos += dup_boss_door_num*0x10
end
new_boss_door.type = 2
new_boss_door.subtype = BOSS_DOOR_SUBTYPE
new_boss_door.var_a = 0
new_boss_door.var_b = boss_index
dest_room.entities << new_boss_door
dest_room.write_entities_to_rom()
end
end
end
# Fix Nest of Evil/Large Cavern boss doors.
game.each_room do |room|
next unless room.area.name == "Nest of Evil" || room.area.name == "Large Cavern"
room_has_enemies = room.entities.find{|e| e.type == 1}
room.entities.each do |entity|
if entity.is_boss_door? && entity.var_a == 2
# Boss door in Nest of Evil/Large Cavern that never opens.
if room_has_enemies
# We switch these to the normal ones that open when the room is cleared so the player can progress even with rooms randomized.
entity.var_a = 1
else
# This is one of the vertical corridors with no enemies. A normal boss door wouldn't open here since there's no way to clear a room with no enemies.
# So instead just delete the door.
entity.type = 0
end
entity.write_to_rom()
end
end
end
end
def center_bosses_for_room_rando
# Move some bosses to the center of their room so the player doesn't get hit by them as soon as they enter from the wrong side.
case GAME
when "dos"
abaddon = game.entity_by_str("00-0B-13_00")
abaddon.x_pos = 0x80
abaddon.write_to_rom()
when "por"
creature = game.entity_by_str("08-00-04_00")
creature.x_pos = 0x100
creature.write_to_rom()
when "ooe"
arthroverta = game.entity_by_str("12-00-13_00")
arthroverta.x_pos = 0x80
arthroverta.write_to_rom()
end
end
def line_up_door(door)
# Sometimes two doors don't line up perfectly. For example if the opening is at the bottom of one room but the middle of the other.
# We change the dest_x/dest_y of these so they line up correctly.
dest_door = door.destination_door
dest_room = dest_door.room
case door.direction
when :left
left_door = door
right_door = dest_door
when :right
right_door = door
left_door = dest_door
when :up
up_door = door
down_door = dest_door
when :down
down_door = door
up_door = dest_door
end
if GAME == "por" && left_door && left_door.destination_room.room_str == "00-01-07"
# Left door that leads into the Behemoth chase sequence room.
# If the player enters through this door first instead of from the normal direction, they can get stuck in the gate at the right side of the room.
# Give it an x offset 2 blocks to the left so the player gets past the gate.
left_door.dest_x_2 -= 0x20
end
case door.direction
when :left, :right
left_first_tile_i, left_last_tile_i, left_tiles_in_biggest_gap = get_biggest_door_gap(left_door)
right_first_tile_i, right_last_tile_i, right_tiles_in_biggest_gap = get_biggest_door_gap(right_door)
unless left_last_tile_i == right_last_tile_i
left_door_dest_y_offset = (right_last_tile_i - left_last_tile_i) * 0x10
right_door_dest_y_offset = (left_last_tile_i - right_last_tile_i) * 0x10
# We use the unused dest offsets because they still work fine and this way we don't mess up the code Door#destination_door uses to guess the destination door, since that's based off the used dest_x and dest_y.
left_door.dest_y_2 = left_door_dest_y_offset
right_door.dest_y_2 = right_door_dest_y_offset
end
# If the gaps are not the same size we need to block off part of the bigger gap so that they are the same size.
# Otherwise the player could enter a room inside a solid wall, and get thrown out of bounds.
if left_tiles_in_biggest_gap.size < right_tiles_in_biggest_gap.size
num_tiles_to_remove = right_tiles_in_biggest_gap.size - left_tiles_in_biggest_gap.size
tiles_to_remove = right_tiles_in_biggest_gap[0, num_tiles_to_remove]
# For those huge doorways that take up the entire screen (e.g. Kalidus), we want to make sure the bottommost tile of that screen is solid so it's properly delineated from the doorway of the screen below.
if right_tiles_in_biggest_gap.size == SCREEN_HEIGHT_IN_TILES
# We move up the gap by one block.
tiles_to_remove = tiles_to_remove[0..-2]
tiles_to_remove << right_tiles_in_biggest_gap.last
# Then we also have to readjust the dest y offsets.
left_door.dest_y_2 -= 0x10
right_door.dest_y_2 += 0x10
end
block_off_tiles(right_door.room, tiles_to_remove)
elsif right_tiles_in_biggest_gap.size < left_tiles_in_biggest_gap.size
num_tiles_to_remove = left_tiles_in_biggest_gap.size - right_tiles_in_biggest_gap.size
tiles_to_remove = left_tiles_in_biggest_gap[0, num_tiles_to_remove]
# For those huge doorways that take up the entire screen (e.g. Kalidus), we want to make sure the bottommost tile of that screen is solid so it's properly delineated from the doorway of the screen below.
if left_tiles_in_biggest_gap.size == SCREEN_HEIGHT_IN_TILES
# We move up the gap by one block.
tiles_to_remove = tiles_to_remove[0..-2]
tiles_to_remove << left_tiles_in_biggest_gap.last
# Then we also have to readjust the dest y offsets.
right_door.dest_y_2 -= 0x10
left_door.dest_y_2 += 0x10
end
block_off_tiles(left_door.room, tiles_to_remove)
end
left_door.write_to_rom()
right_door.write_to_rom()
when :up, :down
up_first_tile_i, up_last_tile_i, up_tiles_in_biggest_gap = get_biggest_door_gap(up_door)
down_first_tile_i, down_last_tile_i, down_tiles_in_biggest_gap = get_biggest_door_gap(down_door)
unless up_last_tile_i == down_last_tile_i
up_door_dest_x_offset = (down_last_tile_i - up_last_tile_i) * 0x10
down_door_dest_x_offset = (up_last_tile_i - down_last_tile_i) * 0x10
# We use the unused dest offsets because they still work fine and this way we don't mess up the code Door#destination_door uses to guess the destination door, since that's based off the used dest_x and dest_y.
up_door.dest_x_2 = up_door_dest_x_offset
down_door.dest_x_2 = down_door_dest_x_offset
end
# If the gaps are not the same size we need to block off part of the bigger gap so that they are the same size.
# Otherwise the player could enter a room inside a solid wall, and get thrown out of bounds.
if up_tiles_in_biggest_gap.size < down_tiles_in_biggest_gap.size
num_tiles_to_remove = down_tiles_in_biggest_gap.size - up_tiles_in_biggest_gap.size
tiles_to_remove = down_tiles_in_biggest_gap[0, num_tiles_to_remove]
block_off_tiles(down_door.room, tiles_to_remove)
elsif down_tiles_in_biggest_gap.size < up_tiles_in_biggest_gap.size
num_tiles_to_remove = up_tiles_in_biggest_gap.size - down_tiles_in_biggest_gap.size
tiles_to_remove = up_tiles_in_biggest_gap[0, num_tiles_to_remove]
block_off_tiles(up_door.room, tiles_to_remove)
end
up_door.write_to_rom()
down_door.write_to_rom()
end
end
def get_biggest_door_gap(door)
coll = RoomCollision.new(door.room, game.fs)
tiles = []
case door.direction
when :left, :right
if door.direction == :left
x = 0
else
x = door.x_pos*SCREEN_WIDTH_IN_TILES - 1
end
y_start = door.y_pos*SCREEN_HEIGHT_IN_TILES
(y_start..y_start+SCREEN_HEIGHT_IN_TILES-1).each_with_index do |y, i|
is_solid = coll[x*0x10,y*0x10].is_solid?
tiles << {is_solid: is_solid, i: i, x: x, y: y}
end
# Keep track of gaps that extend all the way to the top/bottom of the room so we can ignore these gaps later.
bottom_y_of_top_edge_gap = -1
(0..door.room.height*SCREEN_HEIGHT_IN_TILES-1).each do |y|
if coll[x*0x10,y*0x10].is_solid?
break
else
bottom_y_of_top_edge_gap = y
end
end
top_y_of_bottom_edge_gap = door.room.height*SCREEN_HEIGHT_IN_TILES
(0..door.room.height*SCREEN_HEIGHT_IN_TILES-1).reverse_each do |y|
if coll[x*0x10,y*0x10].is_solid?
break
else
top_y_of_bottom_edge_gap = y
end
end
when :up, :down
if door.direction == :up
y = 0
else
y = door.y_pos*SCREEN_HEIGHT_IN_TILES - 1
if GAME == "por" && door.room.room_str == "03-01-01"
# One specific room in sandy grave doesn't have any tiles at the very bottom row. Instead we use the row second closest to the bottom.
y -= 1
end
end
x_start = door.x_pos*SCREEN_WIDTH_IN_TILES
(x_start..x_start+SCREEN_WIDTH_IN_TILES-1).each_with_index do |x, i|
is_solid = coll[x*0x10,y*0x10].is_solid?
tiles << {is_solid: is_solid, i: i, x: x, y: y}
end
# Keep track of gaps that extend all the way to the left/right of the room so we can ignore these gaps later.
right_x_of_left_edge_gap = -1
(0..door.room.width*SCREEN_WIDTH_IN_TILES-1).each do |x|
if coll[x*0x10,y*0x10].is_solid?
break
else
right_x_of_left_edge_gap = x
end
end
left_x_of_right_edge_gap = door.room.width*SCREEN_WIDTH_IN_TILES
(0..door.room.width*SCREEN_WIDTH_IN_TILES-1).reverse_each do |x|
if coll[x*0x10,y*0x10].is_solid?
break
else
left_x_of_right_edge_gap = x
end
end
end
chunks = tiles.chunk{|tile| tile[:is_solid]}
gaps = chunks.select{|is_solid, tiles| !is_solid}.map{|is_solid, tiles| tiles}
# Try to limit to gaps that aren't touching the edge of the room if possible.
case door.direction
when :left, :right
gaps_not_on_room_edge = gaps.reject do |tiles|
if tiles.first[:y] <= bottom_y_of_top_edge_gap
true
elsif tiles.last[:y] >= top_y_of_bottom_edge_gap
true
else
false
end
end
when :up, :down
gaps_not_on_room_edge = gaps.reject do |tiles|
if tiles.first[:x] <= right_x_of_left_edge_gap
true
elsif tiles.last[:x] >= left_x_of_right_edge_gap
true
else
false
end
end
end
if gaps_not_on_room_edge.any?
gaps = gaps_not_on_room_edge
end
if gaps.empty?
raise "Door #{door.door_str} has no gaps."
end
tiles_in_biggest_gap = gaps.max_by{|tiles| tiles.length}
first_tile_i = tiles_in_biggest_gap.first[:i]
last_tile_i = tiles_in_biggest_gap.last[:i]
return [first_tile_i, last_tile_i, tiles_in_biggest_gap]
end
def block_off_tiles(room, tiles)
room.sector.load_necessary_overlay()
coll_layer = room.layers.first
solid_tile_index_on_tileset = SOLID_BLOCKADE_TILE_INDEX_FOR_TILESET[room.overlay_id][coll_layer.collision_tileset_pointer]
tiles.each do |tile|
tile_i = tile[:x] + tile[:y]*SCREEN_WIDTH_IN_TILES*coll_layer.width
coll_layer.tiles[tile_i].index_on_tileset = solid_tile_index_on_tileset
coll_layer.tiles[tile_i].horizontal_flip = false
coll_layer.tiles[tile_i].vertical_flip = false
end
coll_layer.write_to_rom()
end
end
Center behemoth in room for room/map rando
module DoorRandomizer
class NotAllRoomsAreConnectedError < StandardError ; end
def randomize_transition_doors
queued_door_changes = Hash.new{|h, k| h[k] = {}}
game.areas.each do |area|
all_area_transition_rooms = @transition_rooms.select do |transition_room|
transition_room.area_index == area.area_index
end
if all_area_transition_rooms.size <= 2
# Not enough transition rooms in this area to properly randomize it. Need at least 3.
next
end
all_area_subsectors = []
area.sectors.each do |sector|
subsectors = get_subsectors(sector, include_transitions: true, use_subrooms: false)
all_area_subsectors += subsectors
end
remaining_transitions = {
left: [],
right: [],
}
other_transitions_in_same_subsector = {}
accessible_unused_transitions = []
transition_doors_by_subsector = Array.new(all_area_subsectors.size){ [] }
starting_transition = nil
# First we make a list of the transition doors, specifically the left door in a transition room, and the right door that leads into that transition room.
all_area_transition_rooms.each do |transition_room|
if GAME == "por" && transition_room.room_str == "00-01-01"
# The first transition room between the outside and inside parts of the Entrance.
# Don't randomize this connection, so the shop and Wind are always close to the start of the game.
next
end
if GAME == "por" && transition_room.room_str == "00-0A-00"
# The transition room leading to the Throne Room behind the barrier.
# Don't randomize this connection, always have the Throne Room behind the barrier.
next
end
if GAME == "ooe" && ["00-0A-00", "00-0A-07", "00-0A-13"].include?(transition_room.room_str)
# The transition rooms in the Final Approach.
# Don't randomize these connections, since it could result in progression being impossible.
next
end
transition_door = transition_room.doors.find{|door| door.direction == :left}
dest_door = transition_door.destination_door
remaining_transitions[transition_door.direction] << transition_door
remaining_transitions[dest_door.direction] << dest_door
end
# Then we go through each transition door and keep track of what subsector it's located in.
remaining_transitions.values.flatten.each do |transition_door|
if transition_door.direction == :right
# The door leading right into a transition room.
# This is part of the sector proper, so we just use this room itself to detect the proper subsector.
room_in_desired_subsector = transition_door.room
else
# The door leading left out of the transition room.
# We want the subsector to the right. But since this is in the transition room, we have no idea what subsector the transition room itself is in.
# So follow the right door out of the transition room, and use the room there to detect the proper subsector.
room_in_desired_subsector = transition_door.room.doors.find{|d| d.direction == :right}.destination_room
end
all_area_subsectors.each_with_index do |subsector_rooms, subsector_index|
if subsector_rooms.include?(room_in_desired_subsector)
transition_doors_by_subsector[subsector_index] << transition_door
other_transitions_in_same_subsector[transition_door] = transition_doors_by_subsector[subsector_index]
break
end
end
if other_transitions_in_same_subsector[transition_door].nil?
puts all_area_subsectors.flatten.map{|x| x.room_str}
raise "#{transition_door.door_str} can't be found in any subsector"
end
end
#other_transitions_in_same_subsector.each do |k, v|
# puts "#{k.door_str}: #{v.map{|d| d.door_str}.join(", ")}"
#end
starting_transition = remaining_transitions.values.flatten.sample(random: rng)
on_first = true
while true
debug = false
#debug = (area.area_index == 5)
if on_first
inside_transition_door = starting_transition
on_first = false
else
inside_transition_door = accessible_unused_transitions.sample(random: rng)
end
puts "(area connections) inside door: #{inside_transition_door.door_str}" if debug
inside_door_opposite_direction = case inside_transition_door.direction
when :left
:right
when :right
:left
end
inaccessible_remaining_matching_doors = remaining_transitions[inside_door_opposite_direction] - accessible_unused_transitions
inaccessible_remaining_matching_doors -= other_transitions_in_same_subsector[inside_transition_door]
inaccessible_remaining_matching_doors_with_other_exits = inaccessible_remaining_matching_doors.select do |door|
new_subsector_exits = (other_transitions_in_same_subsector[door] & remaining_transitions.values.flatten) - [door]
new_subsector_exits.any?
end
if inaccessible_remaining_matching_doors_with_other_exits.any?
# There are doors we can swap with that allow more progress to new subsectors.
possible_dest_doors = inaccessible_remaining_matching_doors_with_other_exits
puts "TRANSITION TYPE 1" if debug
elsif inaccessible_remaining_matching_doors.any?
# There are doors we can swap with that will allow you to reach one new subsector which is a dead end.
possible_dest_doors = inaccessible_remaining_matching_doors
puts "TRANSITION TYPE 2" if debug
elsif remaining_transitions[inside_door_opposite_direction].any?
# This door direction doesn't have any more matching doors left to swap with that will result in progress.
# So just pick any matching door.
possible_dest_doors = remaining_transitions[inside_door_opposite_direction]
puts "TRANSITION TYPE 3" if debug
else
# This door direction doesn't have any matching doors left.
puts "TRANSITION TYPE 4" if debug
raise "Area connections randomizer: Could not link all subsectors!"
end
outside_transition_door = possible_dest_doors.sample(random: rng)
puts "(area connections) outside door: #{outside_transition_door.door_str}" if debug
remaining_transitions[inside_transition_door.direction].delete(inside_transition_door)
remaining_transitions[outside_transition_door.direction].delete(outside_transition_door)
if queued_door_changes[inside_transition_door].any?
puts "changed inside transition door twice: #{inside_transition_door.door_str}"
raise "Changed a transition door twice"
end
if queued_door_changes[outside_transition_door].any?
puts "changed outside transition door twice: #{outside_transition_door.door_str}"
raise "Changed a transition door twice"
end
queued_door_changes[inside_transition_door]["destination_room_metadata_ram_pointer"] = outside_transition_door.room.room_metadata_ram_pointer
queued_door_changes[inside_transition_door]["dest_x"] = outside_transition_door.destination_door.dest_x
queued_door_changes[inside_transition_door]["dest_y"] = outside_transition_door.destination_door.dest_y
queued_door_changes[outside_transition_door]["destination_room_metadata_ram_pointer"] = inside_transition_door.room.room_metadata_ram_pointer
queued_door_changes[outside_transition_door]["dest_x"] = inside_transition_door.destination_door.dest_x
queued_door_changes[outside_transition_door]["dest_y"] = inside_transition_door.destination_door.dest_y
#puts "accessible_unused_transitions before: #{accessible_unused_transitions.map{|d| d.door_str}}"
accessible_unused_transitions.delete(inside_transition_door)
accessible_unused_transitions.delete(outside_transition_door)
accessible_unused_transitions += (other_transitions_in_same_subsector[inside_transition_door] & remaining_transitions.values.flatten)
accessible_unused_transitions += (other_transitions_in_same_subsector[outside_transition_door] & remaining_transitions.values.flatten)
accessible_unused_transitions.uniq!
#puts "accessible_unused_transitions after: #{accessible_unused_transitions.map{|d| d.door_str}}"
if accessible_unused_transitions.empty?
if remaining_transitions.values.flatten.size == 0
break
else
raise "Area connections randomizer: Not all sectors connected: #{remaining_transitions.values.flatten.map{|door| door.door_str}}"
end
end
end
end
doors_to_line_up = []
queued_door_changes.each do |door, changes|
changes.each do |attribute_name, new_value|
door.send("#{attribute_name}=", new_value)
end
unless doors_to_line_up.include?(door.destination_door)
doors_to_line_up << door
end
door.write_to_rom()
end
lined_up_door_strs = []
doors_to_line_up.each do |door|
next if lined_up_door_strs.include?(door.destination_door.door_str)
lined_up_door_strs << door.door_str
line_up_door(door)
end
end
def randomize_non_transition_doors
# We make sure every room in an area is accessible. This is to prevent infinite loops of a small number of rooms that connect to each other with no way to progress.
# Loop through each room. search for remaining rooms that have a matching door. But the room we find must also have remaining doors in it besides the one we swap with so it's not a dead end, or a loop. If there are no rooms that meet those conditions, then we go with the more lax condition of just having a matching door, allowing dead ends.
# Make a list of doors that lead into transition rooms so we can tell these apart from regular doors.
transition_doors = []
@transition_rooms.each do |room|
room.doors.each do |inside_door|
transition_doors << inside_door.destination_door
end
end
@randomize_up_down_doors = true
queued_door_changes = Hash.new{|h, k| h[k] = {}}
game.areas.each do |area|
if GAME == "ooe" && area.area_index == 2
# Don't randomize Ecclesia.
next
end
area.sectors.each do |sector|
if GAME == "ooe" && area.area_index == 7 && sector.sector_index == 1
# Don't randomize Rusalka's sector. It's too small to do anything with properly.
next
end
# First get the "subsectors" in this sector.
# A subsector is a group of rooms in a sector that can access each other.
# This separates certains sectors into multiple parts like the first sector of PoR.
subsectors = get_subsectors(sector)
redo_counts_for_subsector = Hash.new(0)
subsectors.each_with_index do |subsector_rooms, subsector_index|
orig_queued_door_changes = queued_door_changes.dup
begin
randomize_non_transition_doors_for_subsector(subsector_rooms, subsector_index, area, sector, queued_door_changes, transition_doors)
rescue NotAllRoomsAreConnectedError => e
redo_counts_for_subsector[subsector_index] += 1
if redo_counts_for_subsector[subsector_index] > @max_room_rando_subsector_redos
raise "Bug: Door randomizer failed to connect all rooms in subsector #{subsector_index} in %02X-%02X more than #{@max_room_rando_subsector_redos} times" % [area.area_index, sector.sector_index]
end
puts "Door randomizer needed to redo subsector #{subsector_index} in %02X-%02X" % [area.area_index, sector.sector_index]
queued_door_changes = orig_queued_door_changes
redo
end
end
end
end
doors_to_line_up = []
queued_door_changes.each do |door, changes|
changes.each do |attribute_name, new_value|
door.send("#{attribute_name}=", new_value)
end
unless doors_to_line_up.include?(door.destination_door)
doors_to_line_up << door
end
door.write_to_rom()
end
lined_up_door_strs = []
doors_to_line_up.each do |door|
next if lined_up_door_strs.include?(door.destination_door.door_str)
lined_up_door_strs << door.door_str
line_up_door(door)
end
replace_outer_boss_doors()
center_bosses_for_room_rando()
end
def randomize_non_transition_doors_for_subsector(subsector_rooms, subsector_index, area, sector, queued_door_changes, transition_doors)
if GAME == "por" && area.area_index == 0 && sector.sector_index == 0 && subsector_index == 0
# Don't randomize first subsector in PoR.
return
end
if GAME == "por" && area.area_index == 5 && sector.sector_index == 2 && subsector_index == 0
# Don't randomize the middle sector in Nation of Fools with Legion.
# The randomizer never connects all the rooms properly, and Legion further complicates things anyway, so don't bother.
return
end
prioritize_up_down = true
if GAME == "por" && area.area_index == 6 && sector.sector_index == 0 && [1, 3].include?(subsector_index)
# The center-left and center-right parts of Burnt Paradise. These only have a few left/right doors, so it screws up if we prioritize up/down doors.
prioritize_up_down = false
end
#if sector.sector_index == 2
# puts "On subsector: #{subsector_index}"
# puts "Subsector rooms:"
# subsector_rooms.each do |room|
# puts " %08X" % room.room_metadata_ram_pointer
# end
#end
remaining_doors = get_valid_doors(subsector_rooms, sector)
if remaining_doors[:left].size != remaining_doors[:right].size
raise "Subsector #{subsector_index} of %02X-%02X has an unmatching number of left/right doors!\nleft: #{remaining_doors[:left].size}, right: #{remaining_doors[:right].size}, up: #{remaining_doors[:up].size}, down: #{remaining_doors[:down].size}," % [area.area_index, sector.sector_index]
end
if remaining_doors[:up].size != remaining_doors[:down].size
raise "Subsector #{subsector_index} of %02X-%02X has an unmatching number of up/down doors!\nleft: #{remaining_doors[:left].size}, right: #{remaining_doors[:right].size}, up: #{remaining_doors[:up].size}, down: #{remaining_doors[:down].size}," % [area.area_index, sector.sector_index]
end
#if sector.sector_index == 1
# remaining_doors.values.flatten.each do |door|
# puts " #{door.door_str}"
# end
# puts "num doors: #{remaining_doors.values.flatten.size}"
# gets
#end
all_randomizable_doors = remaining_doors.values.flatten
all_rooms = all_randomizable_doors.map{|door| door.room}.uniq
if all_rooms.empty?
# No doors in this sector
return
end
unvisited_rooms = all_rooms.dup
accessible_remaining_doors = []
current_room = unvisited_rooms.sample(random: rng)
while true
debug = false
#debug = (area.area_index == 0x6 && sector.sector_index == 0 && subsector_index == 1)
#puts remaining_doors[:down].map{|d| d.door_str} if debug
#gets if debug
puts "on room #{current_room.room_str}" if debug
unvisited_rooms.delete(current_room)
accessible_remaining_doors += remaining_doors.values.flatten.select{|door| door.room == current_room}
accessible_remaining_doors.uniq!
accessible_remaining_doors = accessible_remaining_doors & remaining_doors.values.flatten
if accessible_remaining_doors.empty?
break
end
accessible_remaining_updown_doors = accessible_remaining_doors.select{|door| [:up, :down].include?(door.direction)}
if accessible_remaining_updown_doors.any?
# Always prioritize doing up and down doors first.
inside_door = accessible_remaining_updown_doors.sample(random: rng)
else
inside_door = accessible_remaining_doors.sample(random: rng)
end
remaining_doors[inside_door.direction].delete(inside_door)
accessible_remaining_doors.delete(inside_door)
accessible_remaining_leftright_doors = accessible_remaining_doors.select{|door| [:left, :right].include?(door.direction)}
puts "inside door chosen: #{inside_door.door_str}" if debug
inside_door_opposite_direction = case inside_door.direction
when :left
:right
when :right
:left
when :up
:down
when :down
:up
end
inaccessible_remaining_matching_doors = remaining_doors[inside_door_opposite_direction] - accessible_remaining_doors
#puts "REMAINING: #{remaining_doors[inside_door_opposite_direction].map{|x| " #{x.door_str}\n"}}"
inaccessible_remaining_matching_doors_with_other_exits = inaccessible_remaining_matching_doors.select do |door|
((door.room.doors & all_randomizable_doors) - transition_doors).length > 1 && unvisited_rooms.include?(door.room)
end
inaccessible_remaining_matching_doors_with_updown_door_exits_via_leftright = []
inaccessible_remaining_matching_doors_with_no_leftright_door_exits = []
inaccessible_remaining_matching_doors_with_no_leftright_door_exits_and_other_exits = []
if [:left, :right].include?(inside_door.direction)
# If we're on a left/right door, prioritize going to new rooms that have an up/down door so we don't get locked out of having any up/down doors to work with.
inaccessible_remaining_matching_doors_with_updown_door_exits_via_leftright = inaccessible_remaining_matching_doors_with_other_exits.select do |door|
if door.direction == :left || door.direction == :right
((door.room.doors & all_randomizable_doors) - transition_doors).any?{|x| x.direction == :up || x.direction == :down}
end
end
else
# If we're on an up/down door, prioritize going to new rooms that DON'T have any usable left/right doors in them.
# This is because those rooms with left/right doors are more easily accessible via left/right doors. We need to prioritize the ones that only have up/down doors as they're trickier to make the logic place.
inaccessible_remaining_matching_doors_with_no_leftright_door_exits = inaccessible_remaining_matching_doors.select do |door|
if door.direction == :up || door.direction == :down
((door.room.doors & all_randomizable_doors) - transition_doors).none?{|x| x.direction == :left || x.direction == :right}
end
end
inaccessible_remaining_matching_doors_with_no_leftright_door_exits_and_other_exits = inaccessible_remaining_matching_doors_with_other_exits.select do |door|
if door.direction == :up || door.direction == :down
((door.room.doors & all_randomizable_doors) - transition_doors).none?{|x| x.direction == :left || x.direction == :right}
end
end
if debug && inaccessible_remaining_matching_doors_with_no_leftright_door_exits.any?
puts "Found up/down doors with no left/right exits in destination:"
inaccessible_remaining_matching_doors_with_no_leftright_door_exits.each{|x| puts " #{x.door_str}"}
end
end
remaining_inaccessible_rooms_with_up_down_doors = (remaining_doors[:up] + remaining_doors[:down] - accessible_remaining_doors).map{|d| d.room}.uniq
if inaccessible_remaining_matching_doors_with_no_leftright_door_exits_and_other_exits.any? && remaining_inaccessible_rooms_with_up_down_doors.size > 1 && prioritize_up_down
# There are doors we can swap with that allow you to reach a new room which allows more progress, but only via up/down doors.
# We want to prioritize these because they can't be gotten into via left/right doors like rooms that have at least one left/right.
possible_dest_doors = inaccessible_remaining_matching_doors_with_no_leftright_door_exits_and_other_exits
puts "TYPE 1" if debug
elsif inaccessible_remaining_matching_doors_with_no_leftright_door_exits.any? && accessible_remaining_leftright_doors.size >= 1
# There are doors we can swap with that allow you to reach a new room which is a dead end, but is a dead end with only up/down doors.
# We want to prioritize these because they can't be gotten into via left/right doors like rooms that have at least one left/right.
# Note that we also only take this option if there's at least 1 accessible left/right door for us to still use. If there's not this would deadend us instantly.
possible_dest_doors = inaccessible_remaining_matching_doors_with_no_leftright_door_exits
puts "TYPE 2" if debug
elsif inaccessible_remaining_matching_doors_with_updown_door_exits_via_leftright.any? && remaining_inaccessible_rooms_with_up_down_doors.size > 1 && prioritize_up_down
# There are doors we can swap with that allow more progress, and also allow accessing a new up/down door from a left/right door.
possible_dest_doors = inaccessible_remaining_matching_doors_with_updown_door_exits_via_leftright
puts "TYPE 3" if debug
elsif inaccessible_remaining_matching_doors_with_other_exits.any?
# There are doors we can swap with that allow more progress.
possible_dest_doors = inaccessible_remaining_matching_doors_with_other_exits
puts "TYPE 4" if debug
elsif inaccessible_remaining_matching_doors.any?
# There are doors we can swap with that will allow you to reach one new room which is a dead end.
possible_dest_doors = inaccessible_remaining_matching_doors
puts "TYPE 5" if debug
elsif remaining_doors[inside_door_opposite_direction].any?
# This door direction doesn't have any more matching doors left to swap with that will result in progress.
# So just pick any matching door.
possible_dest_doors = remaining_doors[inside_door_opposite_direction]
puts "TYPE 6" if debug
else
# This door direction doesn't have any matching doors left.
# Don't do anything to this door.
puts "TYPE 7" if debug
#puts "#{inside_door.direction} empty"
#
#accessible_rooms = accessible_remaining_doors.map{|door| door.room}.uniq
#accessible_rooms -= [current_room]
#
#current_room = accessible_rooms.sample(random: rng)
#p accessible_remaining_doors.size
#gets
raise "No remaining matching doors to connect to! Door #{inside_door.door_str}, subsector #{subsector_index} of %02X-%02X" % [area.area_index, sector.sector_index]
current_room = unvisited_rooms.sample(random: rng)
if current_room.nil?
current_room = all_rooms.sample(random: rng)
end
if remaining_doors.values.flatten.empty?
break
end
next
end
if !@randomize_up_down_doors && [:up, :down].include?(inside_door.direction)
# Don't randomize up/down doors. This is a temporary hacky measure to greatly reduce failures at connecting all rooms in a subsector.
new_dest_door = inside_door.destination_door
# Also need to convert this door to a subroomdoor, if applicable.
new_dest_door = all_randomizable_doors.find{|subroomdoor| subroomdoor.door_str == new_dest_door.door_str}
else
new_dest_door = possible_dest_doors.sample(random: rng)
end
remaining_doors[new_dest_door.direction].delete(new_dest_door)
current_room = new_dest_door.room
if queued_door_changes[inside_door].any? || queued_door_changes[new_dest_door].any?
raise "Changed a door twice"
end
queued_door_changes[inside_door]["destination_room_metadata_ram_pointer"] = new_dest_door.room.room_metadata_ram_pointer
queued_door_changes[inside_door]["dest_x"] = new_dest_door.destination_door.dest_x
queued_door_changes[inside_door]["dest_y"] = new_dest_door.destination_door.dest_y
queued_door_changes[new_dest_door]["destination_room_metadata_ram_pointer"] = inside_door.room.room_metadata_ram_pointer
queued_door_changes[new_dest_door]["dest_x"] = inside_door.destination_door.dest_x
queued_door_changes[new_dest_door]["dest_y"] = inside_door.destination_door.dest_y
if debug
puts "inside_door: #{inside_door.door_str}"
#puts "old_outside_door: %08X" % old_outside_door.door_ram_pointer
#puts "inside_door_to_swap_with: %08X" % inside_door_to_swap_with.door_ram_pointer
puts "new_outside_door: #{new_dest_door.door_str}"
puts "dest room: #{new_dest_door.room.room_str}"
puts
#break
end
end
if unvisited_rooms.any?
puts "Failed to connect the following rooms:"
unvisited_rooms.each do |room|
puts " #{room.room_str}"
end
raise NotAllRoomsAreConnectedError.new("Room connections randomizer failed to connect some rooms.")
end
end
def get_subsectors(sector, include_transitions: false, use_subrooms: true)
subsectors = []
debug = false
#debug = (sector.area_index == 5 && sector.sector_index == 2)
transition_room_strs = @transition_rooms.map{|room| room.room_str}
if options[:randomize_rooms_map_friendly]
room_strs_unused_by_map_rando = @rooms_unused_by_map_rando.map{|room| room.room_str}
end
if room_rando? && use_subrooms
# First convert the rooms to subrooms.
sector_subrooms = checker.convert_rooms_to_subrooms(sector.rooms)
else
sector_subrooms = sector.rooms
end
remaining_rooms_to_check = sector_subrooms.dup
remaining_rooms_to_check -= @transition_rooms unless include_transitions
while remaining_rooms_to_check.any?
current_subsector = []
puts "STARTING NEW SUBSECTOR" if debug
current_room = remaining_rooms_to_check.first
while true
puts "Current room: #{current_room.room_str}" if debug
remaining_rooms_to_check.delete(current_room)
if options[:randomize_rooms_map_friendly] && room_strs_unused_by_map_rando.include?(current_room.room_str)
# Skip rooms not used by the map friendly room randomizer.
remaining_subsector_rooms = current_subsector & remaining_rooms_to_check
break if remaining_subsector_rooms.empty?
current_room = remaining_subsector_rooms.first
next
end
current_room_doors = current_room.doors.reject{|door| checker.inaccessible_doors.include?(door.door_str)}
current_room_doors = current_room_doors.reject{|door| door.destination_room_metadata_ram_pointer == 0} # Door dummied out by the map-friendly room randomizer.
if current_room_doors.empty?
# Unused room with no door. Don't add it to the list of rooms in the subsector.
remaining_subsector_rooms = current_subsector & remaining_rooms_to_check
break if remaining_subsector_rooms.empty?
current_room = remaining_subsector_rooms.first
next
end
current_subsector << current_room
connected_dest_door_strs = current_room_doors.map{|door| door.destination_door.door_str}
connected_rooms = sector_subrooms.select do |room|
(room.doors.map{|d| d.door_str} & connected_dest_door_strs).any?
end
if GAME == "dos" && current_room.sector.name == "Condemned Tower & Mine of Judgment"
# Need to split Condemned Tower from Mine of Judgement into separate subsectors.
if current_room.room_ypos_on_map >= 0x17
# Current subsector is Mine of Judgement, so remove Condemned Tower rooms.
connected_rooms.reject!{|room| room.room_ypos_on_map < 0x17}
else
# Current subsector is Condemned Tower, so remove Mine of Judgement rooms.
connected_rooms.reject!{|room| room.room_ypos_on_map >= 0x17}
end
end
unless include_transitions
connected_rooms.reject!{|connected_room| transition_room_strs.include?(connected_room.room_str)}
end
current_subsector += connected_rooms
current_subsector.uniq!
puts "Current subsector so far: #{current_subsector.map{|room| room.room_str}}" if debug
puts "Remaining rooms to check: #{remaining_rooms_to_check.map{|room| room.room_str}}" if debug
puts "A: #{current_subsector.map{|x| x.class.to_s}.join(", ")}" if debug
puts "B: #{remaining_rooms_to_check.map{|x| x.class.to_s}.join(", ")}" if debug
remaining_subsector_rooms = current_subsector & remaining_rooms_to_check
break if remaining_subsector_rooms.empty?
current_room = remaining_subsector_rooms.first
end
subsectors += [current_subsector]
end
return subsectors
end
def get_valid_doors(rooms, sector)
remaining_doors = {
left: [],
up: [],
right: [],
down: []
}
map = game.get_map(sector.area_index, sector.sector_index)
rooms.each do |room|
next if @transition_rooms.include?(room)
room.doors.each do |door|
next if @transition_rooms.include?(door.destination_door.room)
next if checker.inaccessible_doors.include?(door.door_str)
if GAME == "dos" && ["00-01-1C_001", "00-01-20_000"].include?(door.door_str)
# Don't randomize the door connecting Paranoia and Mini-Paranoia.
next
end
if GAME == "dos" && ["00-05-0C_001", "00-05-18_000"].include?(door.door_str)
# Don't randomize the up/down door connecting Condemned Tower and Mine of Judgement.
next
end
if GAME == "por" && ["00-01-04_001", "00-01-03_005", "00-01-03_000", "00-01-18_000", "00-01-03_004", "00-01-06_000", "00-01-06_001", "00-01-09_000"].include?(door.door_str)
# Don't randomize the rooms around the Entrance hub (warp room, Wind, shop, etc).
next
end
if GAME == "ooe" && ["0C-00-0E_000", "0C-00-0F_000", "0C-00-0F_001", "0C-00-10_000"].include?(door.door_str)
# Don't randomize the doors in Large Cavern connecting the warp room, one-way room, and boss room.
next
end
map_tile_x_pos = room.room_xpos_on_map
map_tile_y_pos = room.room_ypos_on_map
if door.x_pos == 0xFF
# Do nothing
elsif door.x_pos >= room.main_layer_width
map_tile_x_pos += room.main_layer_width - 1
else
map_tile_x_pos += door.x_pos
end
if door.y_pos == 0xFF
# Do nothing
elsif door.y_pos >= room.main_layer_height
map_tile_y_pos += room.main_layer_height - 1
else
map_tile_y_pos += door.y_pos
end
map_tile = map.tiles.find{|tile| tile.x_pos == map_tile_x_pos && tile.y_pos == map_tile_y_pos}
if map_tile.nil?
# Door that's not on the map, just an unused door.
next
end
# If the door is shown on the map as a wall, skip it.
# Those are leftover doors not intended to be used, and are inaccessible (except with warp glitches).
case door.direction
when :left
next if map_tile.left_wall
when :right
next if map_tile.right_wall
if GAME == "dos" || GAME == "aos"
# Right walls in DoS are handled as the left wall of the tile to the right.
right_map_tile = map.tiles.find{|tile| tile.x_pos == map_tile_x_pos+1 && tile.y_pos == map_tile_y_pos}
next if right_map_tile.left_wall
end
when :up
next if map_tile.top_wall
when :down
next if map_tile.bottom_wall
if GAME == "dos" || GAME == "aos"
# Bottom walls in DoS are handled as the top wall of the tile below.
below_map_tile = map.tiles.find{|tile| tile.x_pos == map_tile_x_pos && tile.y_pos == map_tile_y_pos+1}
next if below_map_tile.top_wall
end
end
remaining_doors[door.direction] << door
end
end
return remaining_doors
end
def remove_door_blockers
obj_subtypes_to_remove = case GAME
when "dos"
[0x43, 0x44, 0x46, 0x57, 0x1E, 0x2B, 0x26, 0x2A, 0x29, 0x45, 0x24, 0x37, 0x04]
when "por"
[0x37, 0x30, 0x89, 0x38, 0x2F, 0x36, 0x32, 0x31, 0x88, 0x26, 0x46, 0x41, 0xB2, 0xB3, 0x2E, 0x40, 0x83]
when "ooe"
[0x5B, 0x5A, 0x59]
end
breakable_wall_subtype = case GAME
when "dos"
nil # All breakable walls are path blocking in DoS, so they're instead included in obj_subtypes_to_remove.
when "por"
0x3B
when "ooe"
0x5C
end
game.each_room do |room|
room.entities.each do |entity|
if entity.is_special_object? && obj_subtypes_to_remove.include?(entity.subtype)
entity.type = 0
entity.write_to_rom()
elsif entity.is_special_object? && entity.subtype == breakable_wall_subtype
case GAME
when "por"
PATH_BLOCKING_BREAKABLE_WALLS.each do |wall_data|
if entity.var_a == wall_data[:var_a] && entity.room.area_index == wall_data[:area_index] && (entity.room.area_index != 0 || entity.room.sector_index == wall_data[:sector_index])
entity.type = 0
entity.write_to_rom()
break
end
end
when "ooe"
PATH_BLOCKING_BREAKABLE_WALLS.each do |wall_vars|
if entity.var_a == wall_vars[:var_a] && entity.var_b == wall_vars[:var_b]
entity.type = 0
entity.write_to_rom()
break
end
end
end
end
end
end
case GAME
when "dos"
# Remove the water from the drawbridge room.
drawbridge_room_waterlevel = game.entity_by_str("00-00-15_04")
drawbridge_room_waterlevel.type = 0
drawbridge_room_waterlevel.write_to_rom()
# Remove the cutscene with Yoko before Flying Armor since it doesn't work properly if you don't have Yoko with you.
pre_flying_armor_room = game.room_by_str("00-00-0E")
[9, 0xA, 0xB].each do |entity_index|
entity = pre_flying_armor_room.entities[entity_index]
entity.type = 0
entity.write_to_rom()
end
# Remove the cutscene with Dmitrii because it doesn't work properly when you enter from the left side.
dmitrii_room = game.room_by_str("00-04-10")
[3, 4, 6, 7].each do |entity_index|
entity = dmitrii_room.entities[entity_index]
entity.type = 0
entity.write_to_rom()
end
# And change Dmitrii to boss rush Dmitrii so he doesn't crash when there's no event.
dmitrii = dmitrii_room.entities[5]
dmitrii.var_a = 0
dmitrii.write_to_rom()
# Remove the cutscene with Dario because it doesn't work properly when you enter from the left side.
dario_room = game.room_by_str("00-03-0B")
[2, 3, 4, 6].each do |entity_index|
entity = dario_room.entities[entity_index]
entity.type = 0
entity.write_to_rom()
end
when "por"
# The whole drill cart puzzle from City of Haze was removed.
# So let's also remove the entity hider so that the enemies from the alternate game modes appear.
drill_room_entity_hider = game.entity_by_str("01-01-04_02")
drill_room_entity_hider.type = 0
drill_room_entity_hider.write_to_rom()
when "ooe"
# Remove the small gate in Forsaken Cloiser, but not the big gate in Final Approach.
forsaken_cloister_gate = game.entity_by_str("00-09-06_00")
forsaken_cloister_gate.type = 0
forsaken_cloister_gate.write_to_rom()
end
# Remove breakable walls from the map as well so it matches visually with the level design.
maps = []
if GAME == "dos"
maps << game.get_map(0, 0)
maps << game.get_map(0, 0xA)
else
AREA_INDEX_TO_OVERLAY_INDEX.keys.each do |area_index|
maps << game.get_map(area_index, 0)
end
end
maps.each do |map|
map.tiles.each do |tile|
if tile.left_secret
tile.left_secret = false
tile.left_door = true
end
if tile.top_secret
tile.top_secret = false
tile.top_door = true
end
if tile.right_secret
tile.right_secret = false
tile.right_door = true
end
if tile.bottom_secret
tile.bottom_secret = false
tile.bottom_door = true
end
end
map.write_to_rom()
end
end
def replace_outer_boss_doors
boss_rooms = []
if GAME == "dos"
boss_rooms << game.room_by_str("00-03-0E") # Doppelganger event room
end
# Make a list of boss rooms to fix the boss doors for.
game.each_room do |room|
next if room.area.name == "Ecclesia"
next if room.area.name == "Nest of Evil"
next if room.area.name == "Large Cavern"
next if room.area.name == "Unused Boss Rush"
if options[:randomize_rooms_map_friendly] && @rooms_unused_by_map_rando.include?(room)
# Skip rooms not used by the map friendly room randomizer.
next
end
room.entities.each do |entity|
if entity.is_boss_door? && entity.var_a == 0
# Boss door outside a boss room. Remove it.
entity.type = 0
entity.write_to_rom()
end
if entity.is_boss_door? && entity.var_a != 0
boss_rooms << room
end
end
end
# Replace boss doors.
boss_rooms.uniq.each do |boss_room|
if GAME == "dos" && boss_room.room_str == "00-03-0E"
# Doppelganger event room
boss_index = 0xE
num_boss_doors_on_each_side = 2
else
boss_index = boss_room.entities.find{|e| e.is_boss_door?}.var_b
num_boss_doors_on_each_side = 1
end
doors = boss_room.doors
if GAME == "dos" && boss_index == 7 # Gergoth
doors = doors[0..1] # Only do the top two doors in the tower.
end
doors.each do |door|
next unless [:left, :right].include?(door.direction)
next if checker.inaccessible_doors.include?(door.door_str)
dest_door = door.destination_door
dest_room = dest_door.room
# Don't add a boss door when two boss rooms are connected to each other, that would result in overlapping boss doors.
if boss_rooms.include?(dest_room)
if GAME == "dos" && dest_room.room_str == "00-05-07"
# Special exception for Gergoth's boss room connecting back in on another lower part of his tower.
# We do add the boss door in this case.
else
next
end
end
gap_start_index, gap_end_index, tiles_in_biggest_gap = get_biggest_door_gap(dest_door)
gap_end_offset = gap_end_index * 0x10 + 0x10
num_boss_doors_on_each_side.times do |dup_boss_door_num|
new_boss_door = Entity.new(dest_room, game.fs)
new_boss_door.x_pos = door.dest_x
new_boss_door.y_pos = door.dest_y + gap_end_offset
if door.direction == :left
new_boss_door.x_pos += 0xF0 - dup_boss_door_num*0x10
else
new_boss_door.x_pos += dup_boss_door_num*0x10
end
new_boss_door.type = 2
new_boss_door.subtype = BOSS_DOOR_SUBTYPE
new_boss_door.var_a = 0
new_boss_door.var_b = boss_index
dest_room.entities << new_boss_door
dest_room.write_entities_to_rom()
end
end
end
# Fix Nest of Evil/Large Cavern boss doors.
game.each_room do |room|
next unless room.area.name == "Nest of Evil" || room.area.name == "Large Cavern"
room_has_enemies = room.entities.find{|e| e.type == 1}
room.entities.each do |entity|
if entity.is_boss_door? && entity.var_a == 2
# Boss door in Nest of Evil/Large Cavern that never opens.
if room_has_enemies
# We switch these to the normal ones that open when the room is cleared so the player can progress even with rooms randomized.
entity.var_a = 1
else
# This is one of the vertical corridors with no enemies. A normal boss door wouldn't open here since there's no way to clear a room with no enemies.
# So instead just delete the door.
entity.type = 0
end
entity.write_to_rom()
end
end
end
end
def center_bosses_for_room_rando
# Move some bosses to the center of their room so the player doesn't get hit by them as soon as they enter from the wrong side.
case GAME
when "dos"
abaddon = game.entity_by_str("00-0B-13_00")
abaddon.x_pos = 0x80
abaddon.write_to_rom()
when "por"
behemoth = game.entity_by_str("00-00-09_02")
behemoth.x_pos = 0x100
behemoth.write_to_rom()
creature = game.entity_by_str("08-00-04_00")
creature.x_pos = 0x100
creature.write_to_rom()
when "ooe"
arthroverta = game.entity_by_str("12-00-13_00")
arthroverta.x_pos = 0x80
arthroverta.write_to_rom()
end
end
def line_up_door(door)
# Sometimes two doors don't line up perfectly. For example if the opening is at the bottom of one room but the middle of the other.
# We change the dest_x/dest_y of these so they line up correctly.
dest_door = door.destination_door
dest_room = dest_door.room
case door.direction
when :left
left_door = door
right_door = dest_door
when :right
right_door = door
left_door = dest_door
when :up
up_door = door
down_door = dest_door
when :down
down_door = door
up_door = dest_door
end
if GAME == "por" && left_door && left_door.destination_room.room_str == "00-01-07"
# Left door that leads into the Behemoth chase sequence room.
# If the player enters through this door first instead of from the normal direction, they can get stuck in the gate at the right side of the room.
# Give it an x offset 2 blocks to the left so the player gets past the gate.
left_door.dest_x_2 -= 0x20
end
case door.direction
when :left, :right
left_first_tile_i, left_last_tile_i, left_tiles_in_biggest_gap = get_biggest_door_gap(left_door)
right_first_tile_i, right_last_tile_i, right_tiles_in_biggest_gap = get_biggest_door_gap(right_door)
unless left_last_tile_i == right_last_tile_i
left_door_dest_y_offset = (right_last_tile_i - left_last_tile_i) * 0x10
right_door_dest_y_offset = (left_last_tile_i - right_last_tile_i) * 0x10
# We use the unused dest offsets because they still work fine and this way we don't mess up the code Door#destination_door uses to guess the destination door, since that's based off the used dest_x and dest_y.
left_door.dest_y_2 = left_door_dest_y_offset
right_door.dest_y_2 = right_door_dest_y_offset
end
# If the gaps are not the same size we need to block off part of the bigger gap so that they are the same size.
# Otherwise the player could enter a room inside a solid wall, and get thrown out of bounds.
if left_tiles_in_biggest_gap.size < right_tiles_in_biggest_gap.size
num_tiles_to_remove = right_tiles_in_biggest_gap.size - left_tiles_in_biggest_gap.size
tiles_to_remove = right_tiles_in_biggest_gap[0, num_tiles_to_remove]
# For those huge doorways that take up the entire screen (e.g. Kalidus), we want to make sure the bottommost tile of that screen is solid so it's properly delineated from the doorway of the screen below.
if right_tiles_in_biggest_gap.size == SCREEN_HEIGHT_IN_TILES
# We move up the gap by one block.
tiles_to_remove = tiles_to_remove[0..-2]
tiles_to_remove << right_tiles_in_biggest_gap.last
# Then we also have to readjust the dest y offsets.
left_door.dest_y_2 -= 0x10
right_door.dest_y_2 += 0x10
end
block_off_tiles(right_door.room, tiles_to_remove)
elsif right_tiles_in_biggest_gap.size < left_tiles_in_biggest_gap.size
num_tiles_to_remove = left_tiles_in_biggest_gap.size - right_tiles_in_biggest_gap.size
tiles_to_remove = left_tiles_in_biggest_gap[0, num_tiles_to_remove]
# For those huge doorways that take up the entire screen (e.g. Kalidus), we want to make sure the bottommost tile of that screen is solid so it's properly delineated from the doorway of the screen below.
if left_tiles_in_biggest_gap.size == SCREEN_HEIGHT_IN_TILES
# We move up the gap by one block.
tiles_to_remove = tiles_to_remove[0..-2]
tiles_to_remove << left_tiles_in_biggest_gap.last
# Then we also have to readjust the dest y offsets.
right_door.dest_y_2 -= 0x10
left_door.dest_y_2 += 0x10
end
block_off_tiles(left_door.room, tiles_to_remove)
end
left_door.write_to_rom()
right_door.write_to_rom()
when :up, :down
up_first_tile_i, up_last_tile_i, up_tiles_in_biggest_gap = get_biggest_door_gap(up_door)
down_first_tile_i, down_last_tile_i, down_tiles_in_biggest_gap = get_biggest_door_gap(down_door)
unless up_last_tile_i == down_last_tile_i
up_door_dest_x_offset = (down_last_tile_i - up_last_tile_i) * 0x10
down_door_dest_x_offset = (up_last_tile_i - down_last_tile_i) * 0x10
# We use the unused dest offsets because they still work fine and this way we don't mess up the code Door#destination_door uses to guess the destination door, since that's based off the used dest_x and dest_y.
up_door.dest_x_2 = up_door_dest_x_offset
down_door.dest_x_2 = down_door_dest_x_offset
end
# If the gaps are not the same size we need to block off part of the bigger gap so that they are the same size.
# Otherwise the player could enter a room inside a solid wall, and get thrown out of bounds.
if up_tiles_in_biggest_gap.size < down_tiles_in_biggest_gap.size
num_tiles_to_remove = down_tiles_in_biggest_gap.size - up_tiles_in_biggest_gap.size
tiles_to_remove = down_tiles_in_biggest_gap[0, num_tiles_to_remove]
block_off_tiles(down_door.room, tiles_to_remove)
elsif down_tiles_in_biggest_gap.size < up_tiles_in_biggest_gap.size
num_tiles_to_remove = up_tiles_in_biggest_gap.size - down_tiles_in_biggest_gap.size
tiles_to_remove = up_tiles_in_biggest_gap[0, num_tiles_to_remove]
block_off_tiles(up_door.room, tiles_to_remove)
end
up_door.write_to_rom()
down_door.write_to_rom()
end
end
def get_biggest_door_gap(door)
coll = RoomCollision.new(door.room, game.fs)
tiles = []
case door.direction
when :left, :right
if door.direction == :left
x = 0
else
x = door.x_pos*SCREEN_WIDTH_IN_TILES - 1
end
y_start = door.y_pos*SCREEN_HEIGHT_IN_TILES
(y_start..y_start+SCREEN_HEIGHT_IN_TILES-1).each_with_index do |y, i|
is_solid = coll[x*0x10,y*0x10].is_solid?
tiles << {is_solid: is_solid, i: i, x: x, y: y}
end
# Keep track of gaps that extend all the way to the top/bottom of the room so we can ignore these gaps later.
bottom_y_of_top_edge_gap = -1
(0..door.room.height*SCREEN_HEIGHT_IN_TILES-1).each do |y|
if coll[x*0x10,y*0x10].is_solid?
break
else
bottom_y_of_top_edge_gap = y
end
end
top_y_of_bottom_edge_gap = door.room.height*SCREEN_HEIGHT_IN_TILES
(0..door.room.height*SCREEN_HEIGHT_IN_TILES-1).reverse_each do |y|
if coll[x*0x10,y*0x10].is_solid?
break
else
top_y_of_bottom_edge_gap = y
end
end
when :up, :down
if door.direction == :up
y = 0
else
y = door.y_pos*SCREEN_HEIGHT_IN_TILES - 1
if GAME == "por" && door.room.room_str == "03-01-01"
# One specific room in sandy grave doesn't have any tiles at the very bottom row. Instead we use the row second closest to the bottom.
y -= 1
end
end
x_start = door.x_pos*SCREEN_WIDTH_IN_TILES
(x_start..x_start+SCREEN_WIDTH_IN_TILES-1).each_with_index do |x, i|
is_solid = coll[x*0x10,y*0x10].is_solid?
tiles << {is_solid: is_solid, i: i, x: x, y: y}
end
# Keep track of gaps that extend all the way to the left/right of the room so we can ignore these gaps later.
right_x_of_left_edge_gap = -1
(0..door.room.width*SCREEN_WIDTH_IN_TILES-1).each do |x|
if coll[x*0x10,y*0x10].is_solid?
break
else
right_x_of_left_edge_gap = x
end
end
left_x_of_right_edge_gap = door.room.width*SCREEN_WIDTH_IN_TILES
(0..door.room.width*SCREEN_WIDTH_IN_TILES-1).reverse_each do |x|
if coll[x*0x10,y*0x10].is_solid?
break
else
left_x_of_right_edge_gap = x
end
end
end
chunks = tiles.chunk{|tile| tile[:is_solid]}
gaps = chunks.select{|is_solid, tiles| !is_solid}.map{|is_solid, tiles| tiles}
# Try to limit to gaps that aren't touching the edge of the room if possible.
case door.direction
when :left, :right
gaps_not_on_room_edge = gaps.reject do |tiles|
if tiles.first[:y] <= bottom_y_of_top_edge_gap
true
elsif tiles.last[:y] >= top_y_of_bottom_edge_gap
true
else
false
end
end
when :up, :down
gaps_not_on_room_edge = gaps.reject do |tiles|
if tiles.first[:x] <= right_x_of_left_edge_gap
true
elsif tiles.last[:x] >= left_x_of_right_edge_gap
true
else
false
end
end
end
if gaps_not_on_room_edge.any?
gaps = gaps_not_on_room_edge
end
if gaps.empty?
raise "Door #{door.door_str} has no gaps."
end
tiles_in_biggest_gap = gaps.max_by{|tiles| tiles.length}
first_tile_i = tiles_in_biggest_gap.first[:i]
last_tile_i = tiles_in_biggest_gap.last[:i]
return [first_tile_i, last_tile_i, tiles_in_biggest_gap]
end
def block_off_tiles(room, tiles)
room.sector.load_necessary_overlay()
coll_layer = room.layers.first
solid_tile_index_on_tileset = SOLID_BLOCKADE_TILE_INDEX_FOR_TILESET[room.overlay_id][coll_layer.collision_tileset_pointer]
tiles.each do |tile|
tile_i = tile[:x] + tile[:y]*SCREEN_WIDTH_IN_TILES*coll_layer.width
coll_layer.tiles[tile_i].index_on_tileset = solid_tile_index_on_tileset
coll_layer.tiles[tile_i].horizontal_flip = false
coll_layer.tiles[tile_i].vertical_flip = false
end
coll_layer.write_to_rom()
end
end
|
#
# Cookbook Name:: bcpc
# Recipe:: mysql
#
# Copyright 2013, Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe 'bcpc::default'
include_recipe 'bcpc::mysql_client'
include_recipe 'bcpc::mysql_data_bags'
#
# These data bags and vault items are pre-populated at compile time by
# the bcpc::mysql_data_bags recipe.
#
check_user = get_config!('mysql-check-user')
check_password = get_config!('password', 'mysql-check', 'os')
galera_user = get_config!('mysql-galera-user')
galera_password = get_config!('password', 'mysql-galera', 'os')
root_user = get_config!('mysql-root-user')
#
# The value in the databag should ALWAYS be root.
# If it is not, warn the user and bail out.
#
if root_user != 'root'
raise 'mysql-root-user is not "root" ! ' \
'This would have unpredictable effects in this version of chef-bach!'
end
root_password = get_config!('password', 'mysql-root', 'os')
#
# Since the root password is set in debconf before the package is
# installed, Percona XtraDB will come up with the password already
# set for the root user.
#
package 'debconf-utils'
[
'root_password',
'root_password_again'
].each do |preseed_item|
execute "percona-preseed-#{preseed_item}" do
command 'echo "percona-xtradb-cluster-server-5.6 ' \
"percona-xtradb-cluster-server/#{preseed_item} " \
"password #{root_password}\" | debconf-set-selections"
sensitive true if respond_to?(:sensitive)
end
end
directory '/etc/mysql' do
owner 'root'
group 'root'
mode 00755
end
template '/etc/mysql/my.cnf' do
source 'mysql/my.cnf.erb'
mode 00644
notifies :reload, 'service[mysql]', :delayed
end
template '/etc/mysql/debian.cnf' do
source 'mysql/my-debian.cnf.erb'
mode 00644
notifies :reload, 'service[mysql]', :delayed
end
directory '/etc/mysql/conf.d' do
owner 'root'
group 'root'
mode 00755
end
apt_package 'percona-xtradb-cluster-56' do
#
# This is an ":install" and not an ":upgrade" to avoid momentary
# disruptions in the event of a chef run when only a bare quorum is
# available.
#
# In theory, all 5.6.x revisions should be compatible, so adding new
# cluster members with a different subversion should be OK.
#
action :install
# These dpkg options allow apt to ignore the pre-existing my.cnf file.
options '-o Dpkg::Options::="--force-confdef" ' \
'-o Dpkg::Options::="--force-confold"'
end
service 'mysql' do
action [:enable, :start]
end
[
'localhost',
'%'
].each do |host_name|
mysql_database_user galera_user do
connection mysql_local_connection_info
host host_name
password galera_password
action :create
end
mysql_database_user galera_user do
connection mysql_local_connection_info
host host_name
privileges ['ALL PRIVILEGES']
action :grant
end
mysql_database_user galera_user do
connection mysql_local_connection_info
host host_name
database_name '*.*'
privileges ['ALL PRIVILEGES']
action :grant
end
end
mysql_database_user check_user do
connection mysql_local_connection_info
host 'localhost'
password check_password
action :create
end
mysql_database_user check_user do
connection mysql_local_connection_info
privileges ['PROCESS']
action :grant
end
#
# We re-create the root user with host '%' so that it is usable over
# remote TCP sessions.
#
mysql_database_user root_user do
connection mysql_local_connection_info
host '%'
password root_password
action :create
end
mysql_database_user root_user do
connection mysql_local_connection_info
privileges ['ALL PRIVILEGES']
grant_option true
action :grant
end
mysql_database_user root_user do
connection mysql_local_connection_info
database_name '*.*'
privileges ['ALL PRIVILEGES']
grant_option true
action :grant
end
mysql_nodes = get_nodes_for('mysql', 'bcpc')
all_nodes = get_all_nodes
max_connections =
[
(mysql_nodes.length * 50 + all_nodes.length * 5),
200
].max
pool_size = node['bcpc']['mysql']['innodb_buffer_pool_size']
template '/etc/mysql/conf.d/wsrep.cnf' do
source 'mysql/wsrep.cnf.erb'
mode 00644
variables(max_connections: max_connections,
innodb_buffer_pool_size: pool_size,
servers: mysql_nodes)
notifies :stop, 'service[mysql]', :immediate
notifies :start, 'service[mysql]', :immediate
end
# #
# # I can't tell what this code was meant to do. The bare gcomm://
# # will only exist as long as no other cluster members have
# # converged, so why would I want to replace it?
# #
# # Additionally, wsrep_urls has been replaced in modern versions.
# # I don't think this code has functioned for some years.
# #
# bash "remove-bare-gcomm" do
# action :nothing
# user "root"
# code <<-EOH
# sed --in-place 's/^\\(wsrep_urls=.*\\),gcomm:\\/\\/"/\\1"/' \
# /etc/mysql/conf.d/wsrep.cnf
# EOH
# end
ruby_block 'Check MySQL Quorum Status' do
block do
require 'mysql2'
require 'timeout'
# Returns 'ON' if wsrep is ready.
# Returns 'nil' if we time out or get an error.
def wsrep_ready_value(client_options)
Timeout.timeout(5) do
client = Mysql2::Client.new(client_options)
result = client.query("SHOW GLOBAL STATUS LIKE 'wsrep_ready'")
result.first['Value']
end
rescue
nil
end
mysql_status = nil
poll_attempts = 10
poll_attempts.times do |i|
mysql_status = wsrep_ready_value(mysql_local_connection_info)
if mysql_status == 'ON'
Chef::Log.info("MySQL is up after #{i} poll attempts")
break
else
Chef::Log.debug("MySQL status is #{mysql_status.inspect}, sleeping")
sleep(0.5)
end
end
unless mysql_status == 'ON'
raise 'MySQL wsrep status still not ready after ' \
"#{poll_attempts} poll attempts! (got: #{mysql_status.inspect})"
end
end
end
package 'xinetd' do
action :upgrade
end
service 'xinetd' do
action [:enable, :start]
end
replace_or_add 'add-mysqlchk-to-etc-services' do
path '/etc/services'
pattern '^mysqlchk'
line "mysqlchk\t3307/tcp"
end
template '/etc/xinetd.d/mysqlchk' do
source 'mysql/xinetd-mysqlchk.erb'
owner 'root'
group 'root'
mode 00440
notifies :restart, 'service[xinetd]', :immediately
end
#
# Now that we have a working Percona instance, we'll install a dummy
# metapackage to prevent any well-meaning packages from depending on
# mysql-server and mysql-server-5.5
#
package 'equivs'
control_file_path =
::File.join(Chef::Config.file_cache_path, 'mysql-server.control')
file control_file_path do
content <<-EOM.gsub(/^ {4}/,'')
Section: database
Priority: optional
Standards-Version: 3.9.2
Package: mysql-server
Version: 5.6
Maintainer: Andrew Jones <ajones291@bloomberg.net>
Architecture: all
Description: Dummy package to prevent the installation of mysql-server
EOM
end
deb_file_path =
::File.join(Chef::Config.file_cache_path,'mysql-server_5.6_all.deb')
execute 'mysql-server-build' do
cwd ::File.dirname(deb_file_path)
command "equivs-build #{control_file_path}"
creates deb_file_path
end
dpkg_package deb_file_path
Update maintainer email on dummy packages
#
# Cookbook Name:: bcpc
# Recipe:: mysql
#
# Copyright 2013, Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe 'bcpc::default'
include_recipe 'bcpc::mysql_client'
include_recipe 'bcpc::mysql_data_bags'
#
# These data bags and vault items are pre-populated at compile time by
# the bcpc::mysql_data_bags recipe.
#
check_user = get_config!('mysql-check-user')
check_password = get_config!('password', 'mysql-check', 'os')
galera_user = get_config!('mysql-galera-user')
galera_password = get_config!('password', 'mysql-galera', 'os')
root_user = get_config!('mysql-root-user')
#
# The value in the databag should ALWAYS be root.
# If it is not, warn the user and bail out.
#
if root_user != 'root'
raise 'mysql-root-user is not "root" ! ' \
'This would have unpredictable effects in this version of chef-bach!'
end
root_password = get_config!('password', 'mysql-root', 'os')
#
# Since the root password is set in debconf before the package is
# installed, Percona XtraDB will come up with the password already
# set for the root user.
#
package 'debconf-utils'
[
'root_password',
'root_password_again'
].each do |preseed_item|
execute "percona-preseed-#{preseed_item}" do
command 'echo "percona-xtradb-cluster-server-5.6 ' \
"percona-xtradb-cluster-server/#{preseed_item} " \
"password #{root_password}\" | debconf-set-selections"
sensitive true if respond_to?(:sensitive)
end
end
directory '/etc/mysql' do
owner 'root'
group 'root'
mode 00755
end
template '/etc/mysql/my.cnf' do
source 'mysql/my.cnf.erb'
mode 00644
notifies :reload, 'service[mysql]', :delayed
end
template '/etc/mysql/debian.cnf' do
source 'mysql/my-debian.cnf.erb'
mode 00644
notifies :reload, 'service[mysql]', :delayed
end
directory '/etc/mysql/conf.d' do
owner 'root'
group 'root'
mode 00755
end
apt_package 'percona-xtradb-cluster-56' do
#
# This is an ":install" and not an ":upgrade" to avoid momentary
# disruptions in the event of a chef run when only a bare quorum is
# available.
#
# In theory, all 5.6.x revisions should be compatible, so adding new
# cluster members with a different subversion should be OK.
#
action :install
# These dpkg options allow apt to ignore the pre-existing my.cnf file.
options '-o Dpkg::Options::="--force-confdef" ' \
'-o Dpkg::Options::="--force-confold"'
end
service 'mysql' do
action [:enable, :start]
end
[
'localhost',
'%'
].each do |host_name|
mysql_database_user galera_user do
connection mysql_local_connection_info
host host_name
password galera_password
action :create
end
mysql_database_user galera_user do
connection mysql_local_connection_info
host host_name
privileges ['ALL PRIVILEGES']
action :grant
end
mysql_database_user galera_user do
connection mysql_local_connection_info
host host_name
database_name '*.*'
privileges ['ALL PRIVILEGES']
action :grant
end
end
mysql_database_user check_user do
connection mysql_local_connection_info
host 'localhost'
password check_password
action :create
end
mysql_database_user check_user do
connection mysql_local_connection_info
privileges ['PROCESS']
action :grant
end
#
# We re-create the root user with host '%' so that it is usable over
# remote TCP sessions.
#
mysql_database_user root_user do
connection mysql_local_connection_info
host '%'
password root_password
action :create
end
mysql_database_user root_user do
connection mysql_local_connection_info
privileges ['ALL PRIVILEGES']
grant_option true
action :grant
end
mysql_database_user root_user do
connection mysql_local_connection_info
database_name '*.*'
privileges ['ALL PRIVILEGES']
grant_option true
action :grant
end
mysql_nodes = get_nodes_for('mysql', 'bcpc')
all_nodes = get_all_nodes
max_connections =
[
(mysql_nodes.length * 50 + all_nodes.length * 5),
200
].max
pool_size = node['bcpc']['mysql']['innodb_buffer_pool_size']
template '/etc/mysql/conf.d/wsrep.cnf' do
source 'mysql/wsrep.cnf.erb'
mode 00644
variables(max_connections: max_connections,
innodb_buffer_pool_size: pool_size,
servers: mysql_nodes)
notifies :stop, 'service[mysql]', :immediate
notifies :start, 'service[mysql]', :immediate
end
# #
# # I can't tell what this code was meant to do. The bare gcomm://
# # will only exist as long as no other cluster members have
# # converged, so why would I want to replace it?
# #
# # Additionally, wsrep_urls has been replaced in modern versions.
# # I don't think this code has functioned for some years.
# #
# bash "remove-bare-gcomm" do
# action :nothing
# user "root"
# code <<-EOH
# sed --in-place 's/^\\(wsrep_urls=.*\\),gcomm:\\/\\/"/\\1"/' \
# /etc/mysql/conf.d/wsrep.cnf
# EOH
# end
ruby_block 'Check MySQL Quorum Status' do
block do
require 'mysql2'
require 'timeout'
# Returns 'ON' if wsrep is ready.
# Returns 'nil' if we time out or get an error.
def wsrep_ready_value(client_options)
Timeout.timeout(5) do
client = Mysql2::Client.new(client_options)
result = client.query("SHOW GLOBAL STATUS LIKE 'wsrep_ready'")
result.first['Value']
end
rescue
nil
end
mysql_status = nil
poll_attempts = 10
poll_attempts.times do |i|
mysql_status = wsrep_ready_value(mysql_local_connection_info)
if mysql_status == 'ON'
Chef::Log.info("MySQL is up after #{i} poll attempts")
break
else
Chef::Log.debug("MySQL status is #{mysql_status.inspect}, sleeping")
sleep(0.5)
end
end
unless mysql_status == 'ON'
raise 'MySQL wsrep status still not ready after ' \
"#{poll_attempts} poll attempts! (got: #{mysql_status.inspect})"
end
end
end
package 'xinetd' do
action :upgrade
end
service 'xinetd' do
action [:enable, :start]
end
replace_or_add 'add-mysqlchk-to-etc-services' do
path '/etc/services'
pattern '^mysqlchk'
line "mysqlchk\t3307/tcp"
end
template '/etc/xinetd.d/mysqlchk' do
source 'mysql/xinetd-mysqlchk.erb'
owner 'root'
group 'root'
mode 00440
notifies :restart, 'service[xinetd]', :immediately
end
#
# Now that we have a working Percona instance, we'll install a dummy
# metapackage to prevent any well-meaning packages from depending on
# mysql-server and mysql-server-5.5
#
package 'equivs'
control_file_path =
::File.join(Chef::Config.file_cache_path, 'mysql-server.control')
file control_file_path do
content <<-EOM.gsub(/^ {4}/,'')
Section: database
Priority: optional
Standards-Version: 3.9.2
Package: mysql-server
Version: 5.6
Maintainer: BACH <hadoop@bloomberg.net>
Architecture: all
Description: Dummy package to prevent the installation of mysql-server
EOM
end
deb_file_path =
::File.join(Chef::Config.file_cache_path,'mysql-server_5.6_all.deb')
execute 'mysql-server-build' do
cwd ::File.dirname(deb_file_path)
command "equivs-build #{control_file_path}"
creates deb_file_path
end
dpkg_package deb_file_path
|
name 'omnitruck'
maintainer 'Chef Software'
maintainer_email 'cookbooks@chef.io'
license 'Apache2'
description 'Installs/Configures omnitruck'
long_description 'Installs/Configures omnitruck'
version '0.4.22'
depends 'delivery-sugar'
depends 'cia_infra'
depends 'habitat', '~> 0.4.0'
depends 'build-essential'
issues_url 'https://github.com/chef/omnitruck/issues'
source_url 'https://github.com/chef/omnitruck'
update habitat cookbook ver
Signed-off-by: Patrick Wright <cbb7353e6d953ef360baf960c122346276c6e320@chef.io>
name 'omnitruck'
maintainer 'Chef Software'
maintainer_email 'cookbooks@chef.io'
license 'Apache2'
description 'Installs/Configures omnitruck'
long_description 'Installs/Configures omnitruck'
version '0.4.23'
depends 'delivery-sugar'
depends 'cia_infra'
depends 'habitat', '~> 0.38.0'
depends 'build-essential'
issues_url 'https://github.com/chef/omnitruck/issues'
source_url 'https://github.com/chef/omnitruck'
|
ver = ESP32::System.sdk_version
puts "SDK Version: #{ver}"
mem = ESP32::System.available_memory / 1000
puts "Memory free: #{mem}K"
puts "Delaying 10 seconds"
ESP32::System.delay(10 * 1000)
#ESP32::System.restart()
puts "Deep sleeping for 10 seconds"
ESP32::System.deep_sleep_for(10 * 1000000)
Update example script
puts "RUBY_VERSION: #{RUBY_VERSION}"
puts "RUBY_ENGINE: #{RUBY_ENGINE}"
puts "RUBY_ENGINE_VERSION: #{RUBY_ENGINE_VERSION}"
puts "MRUBY_VERSION: #{MRUBY_VERSION}"
puts "MRUBY_RELEASE_NO: #{MRUBY_RELEASE_NO}"
puts "MRUBY_RELEASE_DATE: #{MRUBY_RELEASE_DATE}"
puts "MRUBY_DESCRIPTION: #{MRUBY_DESCRIPTION}"
puts "MRUBY_COPYRIGHT: #{MRUBY_COPYRIGHT}"
puts
ver = ESP32::System.sdk_version
puts "ESP32 SDK Version: #{ver}"
mem = ESP32::System.available_memory / 1000
puts "Available Memory: #{mem}K"
puts
puts "Delaying 10 seconds..."
ESP32::System.delay(10 * 1000)
puts "Deep sleeping for 10 seconds..."
ESP32::System.deep_sleep_for(10 * 1000000)
|
require 'chef/mixin/deep_merge'
module Merb
module ApplicationHelper
end
end
[CHEF-1194] fix cookbook screen in WebUI
require 'chef/mixin/deep_merge'
module Merb
module ApplicationHelper
end
end
|
Regenerate gemspec for version 0.1.0
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{html_inspector}
s.version = "0.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Rusty Burchfield"]
s.date = %q{2010-12-10}
s.description = %q{Pretty-print your objects into HTML}
s.email = %q{GICodeWarrior@gmail.com}
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/html_inspector.rb",
"lib/html_inspector/core_ext/array.rb",
"lib/html_inspector/core_ext/enumerable.rb",
"lib/html_inspector/core_ext/hash.rb",
"lib/html_inspector/core_ext/object.rb",
"lib/html_inspector/core_ext/string.rb",
"lib/html_inspector/escape.rb",
"lib/html_inspector/load.rb"
]
s.homepage = %q{http://github.com/GICodeWarrior/html_inspector}
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{Core extension providing .html_inspect}
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.5.1"])
else
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.5.1"])
end
else
s.add_dependency(%q<bundler>, ["~> 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.5.1"])
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "i18n_namespace"
s.version = "0.0.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Matthias Zirnstein"]
s.date = "2012-01-25"
s.description = "I18n key injection with fallback functionality"
s.email = "matthias.zirnstein@googlemail.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"i18n_namespace.gemspec",
"lib/i18n_namespace.rb",
"lib/i18n_namespace/config.rb",
"lib/i18n_namespace/fallbacks.rb",
"lib/i18n_namespace/helper.rb",
"lib/i18n_namespace/key_value.rb",
"lib/i18n_namespace/storing.rb",
"spec/lib/i18n_namespace_spec.rb",
"test/helper.rb",
"test/test_i18n_namespace.rb"
]
s.homepage = "http://github.com/avarteqgmbh/i18n_namespace"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.10"
s.summary = "I18n key injection"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<i18n>, [">= 0"])
s.add_runtime_dependency(%q<yajl-ruby>, [">= 0"])
s.add_runtime_dependency(%q<active_support>, [">= 0"])
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<bundler>, [">= 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<rspec>, [">= 0"])
s.add_development_dependency(%q<rdoc>, ["> 2.4.2"])
else
s.add_dependency(%q<i18n>, [">= 0"])
s.add_dependency(%q<yajl-ruby>, [">= 0"])
s.add_dependency(%q<active_support>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, [">= 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<rdoc>, ["> 2.4.2"])
end
else
s.add_dependency(%q<i18n>, [">= 0"])
s.add_dependency(%q<yajl-ruby>, [">= 0"])
s.add_dependency(%q<active_support>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, [">= 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<rdoc>, ["> 2.4.2"])
end
end
use activesupport
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "i18n_namespace"
s.version = "0.0.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Matthias Zirnstein"]
s.date = "2012-01-25"
s.description = "I18n key injection with fallback functionality"
s.email = "matthias.zirnstein@googlemail.com"
s.extra_rdoc_files = [
"LICENSE.txt",
"README.rdoc"
]
s.files = [
".document",
"Gemfile",
"Gemfile.lock",
"LICENSE.txt",
"README.rdoc",
"Rakefile",
"VERSION",
"i18n_namespace.gemspec",
"lib/i18n_namespace.rb",
"lib/i18n_namespace/config.rb",
"lib/i18n_namespace/fallbacks.rb",
"lib/i18n_namespace/helper.rb",
"lib/i18n_namespace/key_value.rb",
"lib/i18n_namespace/storing.rb",
"spec/lib/i18n_namespace_spec.rb",
"test/helper.rb",
"test/test_i18n_namespace.rb"
]
s.homepage = "http://github.com/avarteqgmbh/i18n_namespace"
s.licenses = ["MIT"]
s.require_paths = ["lib"]
s.rubygems_version = "1.8.10"
s.summary = "I18n key injection"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<i18n>, [">= 0"])
s.add_runtime_dependency(%q<yajl-ruby>, [">= 0"])
s.add_runtime_dependency(%q<activesupport>, [">= 0"])
s.add_development_dependency(%q<shoulda>, [">= 0"])
s.add_development_dependency(%q<bundler>, [">= 1.0.0"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<rspec>, [">= 0"])
s.add_development_dependency(%q<rdoc>, ["> 2.4.2"])
else
s.add_dependency(%q<i18n>, [">= 0"])
s.add_dependency(%q<yajl-ruby>, [">= 0"])
s.add_dependency(%q<activesupport>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, [">= 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<rdoc>, ["> 2.4.2"])
end
else
s.add_dependency(%q<i18n>, [">= 0"])
s.add_dependency(%q<yajl-ruby>, [">= 0"])
s.add_dependency(%q<activesupport>, [">= 0"])
s.add_dependency(%q<shoulda>, [">= 0"])
s.add_dependency(%q<bundler>, [">= 1.0.0"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rspec>, [">= 0"])
s.add_dependency(%q<rdoc>, ["> 2.4.2"])
end
end
|
class IgnitionTransport3 < Formula
desc "Transport middleware for robotics"
homepage "https://ignitionrobotics.org"
url "http://gazebosim.org/distributions/ign-transport/releases/ignition-transport3-3.0.1.tar.bz2"
sha256 "c2b8dd5f391a30f1239893b51d4ea487fd47bfe12ccdb3876a83df192df666be"
revision 8
head "https://bitbucket.org/ignitionrobotics/ign-transport", :branch => "default", :using => :hg
bottle do
root_url "http://gazebosim.org/distributions/ign-transport/releases"
cellar :any
sha256 "026e29c0ecc4dfee72a5dbb87c176e457e3cb9475b28c6b1fe484b31afa48036" => :high_sierra
sha256 "e34e5d4e76d3f80051772c38a4a571798361f6c7ed3cb424e817d8167acae294" => :sierra
sha256 "3fe31e2fb3146b69a1b2d1e6b9ff1129709b9265fa32ca53c66d8e8c3b53cd38" => :el_capitan
end
depends_on "cmake" => :build
depends_on "doxygen" => [:build, :optional]
depends_on "pkg-config" => :run
depends_on "ignition-msgs0"
depends_on "ignition-tools"
depends_on "protobuf"
depends_on "protobuf-c" => :build
depends_on "ossp-uuid"
depends_on "zeromq"
depends_on "cppzmq"
patch do
# Fix compiler warning
url "https://bitbucket.org/ignitionrobotics/ign-transport/commits/3e5a61a5dadae573c23ba8185bb120cdbaff2d36/raw"
sha256 "66570f0dec49e572c8687fc0819cefc5707ccb591e0a4923c48fbebe53b521c9"
end
def install
system "cmake", ".", *std_cmake_args
system "make", "install"
end
test do
(testpath/"test.cpp").write <<-EOS.undent
#include <iostream>
#include <ignition/transport.hh>
int main() {
ignition::transport::Node node;
return 0;
}
EOS
system "pkg-config", "ignition-transport3"
cflags = `pkg-config --cflags ignition-transport3`.split(" ")
system ENV.cc, "test.cpp",
*cflags,
"-L#{lib}",
"-lignition-transport3",
"-lc++",
"-o", "test"
system "./test"
end
end
ignition-transport3 3.1.0 (#324)
* ignition-transport3 3.1.0
* remove patch
* ignition-transport3 3.1.0 high_sierra bottle
* ignition-transport3 3.1.0 sierra bottle
* ignition-transport3 3.1.0 el_capitan bottle
class IgnitionTransport3 < Formula
desc "Transport middleware for robotics"
homepage "https://ignitionrobotics.org"
url "http://gazebosim.org/distributions/ign-transport/releases/ignition-transport3-3.1.0.tar.bz2"
sha256 "bc8ac5bbb1cfadda857f748ba6467f9512b37a2b8395121586c459166ae45703"
head "https://bitbucket.org/ignitionrobotics/ign-transport", :branch => "default", :using => :hg
bottle do
root_url "http://gazebosim.org/distributions/ign-transport/releases"
cellar :any
sha256 "d645c27a90958e0c97fb6b012541b35bfe4bcc2b6e8b9604c2d71c973d029aa3" => :high_sierra
sha256 "4673ae38021accff0116fc9bc2dc68c52c8a5ba25daa8c5d4cf80363caa5dc6a" => :sierra
sha256 "ed86c8108eae4c6195c21f815da5f0a6c387378ccd67f561a0fd7aec233c760f" => :el_capitan
end
depends_on "cmake" => :build
depends_on "doxygen" => [:build, :optional]
depends_on "pkg-config" => :run
depends_on "ignition-msgs0"
depends_on "ignition-tools"
depends_on "protobuf"
depends_on "protobuf-c" => :build
depends_on "ossp-uuid"
depends_on "zeromq"
depends_on "cppzmq"
def install
system "cmake", ".", *std_cmake_args
system "make", "install"
end
test do
(testpath/"test.cpp").write <<-EOS.undent
#include <iostream>
#include <ignition/transport.hh>
int main() {
ignition::transport::Node node;
return 0;
}
EOS
system "pkg-config", "ignition-transport3"
cflags = `pkg-config --cflags ignition-transport3`.split(" ")
system ENV.cc, "test.cpp",
*cflags,
"-L#{lib}",
"-lignition-transport3",
"-lc++",
"-o", "test"
system "./test"
end
end
|
namespace :rails do
%w(MySQL SQLite3 PostgreSQL).each do |adapter|
desc "Run Rails ActiveRecord tests with #{adapter} (JDBC)"
task "test_#{adapter = adapter.downcase}" do
puts "Use TESTOPTS=\"--verbose\" to pass --verbose to runners." if ARGV.include? '--verbose'
require 'active_record/version'; ar_path = Gem.loaded_specs['activerecord'].full_gem_path
unless File.exist? ar_test_dir = File.join(ar_path, 'test')
raise "can not directly load Rails tests;" +
" try setting a local repository path e.g. export RAILS=`pwd`/../rails && bundle install"
end
driver = "jdbc-#{adapter =~ /postgres/i ? 'postgres' : adapter}"
adapter = 'mysql2' if adapter.eql?('mysql')
root_dir = File.expand_path('..', File.dirname(__FILE__))
env = {}
env['ARCONFIG'] = File.join(root_dir, 'test/rails', 'config.yml')
env['ARCONN'] = adapter
env['BUNDLE_GEMFILE'] = ENV['BUNDLE_GEMFILE'] || File.join(root_dir, 'Gemfile') # use AR-JDBC's with Rails tests
env['EXCLUDE_DIR'] = File.join(root_dir, 'test/rails/excludes', adapter) # minitest-excludes
libs = [
File.join(root_dir, 'lib'),
File.join(root_dir, driver, 'lib'),
File.join(root_dir, 'test/rails'),
ar_test_dir
]
test_files_finder = lambda do
Dir.chdir(ar_path) do # taken from Rails' *activerecord/Rakefile* :
( Dir.glob("test/cases/**/*_test.rb").reject { |x| x =~ /\/adapters\// } +
Dir.glob("test/cases/adapters/#{adapter}/**/*_test.rb") )
end
end
task_stub = Class.new(Rake::TestTask) { def define; end }.new # no-op define
test_loader_code = task_stub.run_code # :rake test-loader
ruby_opts_string = "-I\"#{libs.join(File::PATH_SEPARATOR)}\""
ruby_opts_string += " -C \"#{ar_path}\""
ruby_opts_string += " -rbundler/setup"
ruby_opts_string += " -rminitest/excludes"
file_list_string = ENV["TEST"] ? FileList[ ENV["TEST"] ] : test_files_finder.call
file_list_string = file_list_string.map { |fn| "\"#{fn}\"" }.join(' ')
# test_loader_code = "-e \"ARGV.each{|f| require f}\"" # :direct
option_list = ( ENV["TESTOPTS"] || ENV["TESTOPT"] || ENV["TEST_OPTS"] || '' )
args = "#{ruby_opts_string} #{test_loader_code} #{file_list_string} #{option_list}"
env_sh env, "#{FileUtils::RUBY} #{args}" do |ok, status|
if !ok && status.respond_to?(:signaled?) && status.signaled?
raise SignalException.new(status.termsig)
elsif !ok
fail "Command failed with status (#{status.exitstatus})"
end
end
end
task :test_mysql2 => :test_mysql
FileUtils.module_eval do
def env_sh(env, *cmd, &block)
options = (Hash === cmd.last) ? cmd.pop : {}
shell_runner = block_given? ? block : create_shell_runner(cmd)
set_verbose_option(options)
options[:noop] ||= Rake::FileUtilsExt.nowrite_flag
Rake.rake_check_options options, :noop, :verbose
cmd = env.map { |k,v| "#{k}=\"#{v}\"" }.join(' ') + ' ' + cmd.join(' ')
Rake.rake_output_message cmd if options[:verbose]
unless options[:noop]
res = Kernel.system(cmd)
status = $?
status = Rake::PseudoStatus.new(1) if !res && status.nil?
shell_runner.call(res, status)
end
end
def env_system(env, cmd)
Kernel.system(env.map { |k,v| "#{k}=\"#{v}\"" }.join(' ') + ' ' + cmd)
end
end
end
namespace :db do
namespace :mysql do
desc 'Build the MySQL test databases'
task :build do
config = ARTest.config['connections']['mysql2']
%x( mysql --user=#{config['arunit']['username']} --password=#{config['arunit']['password']} -e "create DATABASE #{config['arunit']['database']} DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_unicode_ci ")
%x( mysql --user=#{config['arunit2']['username']} --password=#{config['arunit2']['password']} -e "create DATABASE #{config['arunit2']['database']} DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_unicode_ci ")
end
desc 'Drop the MySQL test databases'
task :drop do
config = ARTest.config['connections']['mysql2']
%x( mysqladmin --user=#{config['arunit']['username']} --password=#{config['arunit']['password']} -f drop #{config['arunit']['database']} )
%x( mysqladmin --user=#{config['arunit2']['username']} --password=#{config['arunit2']['password']} -f drop #{config['arunit2']['database']} )
end
desc 'Rebuild the MySQL test databases'
task :rebuild => [:drop, :build]
end
namespace :postgresql do
desc 'Build the PostgreSQL test databases'
task :build do
config = ARTest.config['connections']['postgresql']
%x( createdb -E UTF8 -T template0 #{config['arunit']['database']} )
%x( createdb -E UTF8 -T template0 #{config['arunit2']['database']} )
# prepare hstore
if %x( createdb --version ).strip.gsub(/(.*)(\d\.\d\.\d)$/, "\\2") < "9.1.0"
puts "Please prepare hstore data type. See http://www.postgresql.org/docs/current/static/hstore.html"
end
end
desc 'Drop the PostgreSQL test databases'
task :drop do
config = ARTest.config['connections']['postgresql']
%x( dropdb #{config['arunit']['database']} )
%x( dropdb #{config['arunit2']['database']} )
end
desc 'Rebuild the PostgreSQL test databases'
task :rebuild => [:drop, :build]
end
end
# NOTE: we expect to, hopefully, not be using these anymore - delete at WILL!
namespace :test do
task :all do
driver = ENV['DRIVER'] || ENV['ADAPTER']
raise "need a DRIVER (DRIVER=mysql)" unless driver
rails_dir = _rails_dir
ENV['ARCONFIG'] = File.join(_ar_jdbc_dir, 'test', 'rails', 'config.yml')
sh "cd #{File.join(rails_dir, 'activerecord')}; #{FileUtils::RUBY} -S rake RUBYLIB=#{_ruby_lib(rails_dir, driver)} #{_target(driver)}"
end
%w(MySQL SQLite3 Postgres).each do |adapter|
task adapter.downcase do
ENV['ADAPTER'] = adapter
Rake::Task['rails:test:all'].invoke
end
namespace adapter.downcase do
task "base_test" do
ENV['TEST'] ||= 'test/cases/base_test.rb'
ENV['ADAPTER'] = adapter
Rake::Task['rails:test:all'].invoke
end
end
end
private
def _ar_jdbc_dir
@ar_jdbc_dir ||= File.expand_path('..', File.dirname(__FILE__))
end
def _rails_dir
rails_dir = ENV['RAILS'] || File.join('..', 'rails')
unless File.directory? rails_dir
raise "can't find RAILS source at '#{rails_dir}' (maybe set ENV['RAILS'])"
end
rails_dir = File.join(rails_dir, '..') if rails_dir =~ /activerecord$/
File.expand_path(rails_dir)
end
def _ruby_lib(rails_dir, driver)
ar_jdbc_dir = _ar_jdbc_dir
if driver =~ /postgres/i
adapter, driver = 'postgresql', 'postgres'
else
adapter, driver = driver.downcase, adapter
end
[File.join(ar_jdbc_dir, 'lib'),
File.join(ar_jdbc_dir, 'test', 'rails'),
File.join(ar_jdbc_dir, "jdbc-#{driver}", 'lib'),
File.join(ar_jdbc_dir, "activerecord-jdbc#{adapter}-adapter", 'lib'),
File.expand_path('activesupport/lib', rails_dir),
File.expand_path('activemodel/lib', rails_dir),
File.expand_path('activerecord/lib', rails_dir)
].join(':')
end
def _target(name)
case name
when /postgres/i
'test_postgresql'
else
"test_jdbc#{name.downcase}"
end
end
end
end
minitest excluded for now since it is broken on travis
namespace :rails do
%w(MySQL SQLite3 PostgreSQL).each do |adapter|
desc "Run Rails ActiveRecord tests with #{adapter} (JDBC)"
task "test_#{adapter = adapter.downcase}" do
puts "Use TESTOPTS=\"--verbose\" to pass --verbose to runners." if ARGV.include? '--verbose'
require 'active_record/version'; ar_path = Gem.loaded_specs['activerecord'].full_gem_path
unless File.exist? ar_test_dir = File.join(ar_path, 'test')
raise "can not directly load Rails tests;" +
" try setting a local repository path e.g. export RAILS=`pwd`/../rails && bundle install"
end
driver = "jdbc-#{adapter =~ /postgres/i ? 'postgres' : adapter}"
adapter = 'mysql2' if adapter.eql?('mysql')
root_dir = File.expand_path('..', File.dirname(__FILE__))
env = {}
env['ARCONFIG'] = File.join(root_dir, 'test/rails', 'config.yml')
env['ARCONN'] = adapter
env['BUNDLE_GEMFILE'] = ENV['BUNDLE_GEMFILE'] || File.join(root_dir, 'Gemfile') # use AR-JDBC's with Rails tests
env['EXCLUDE_DIR'] = File.join(root_dir, 'test/rails/excludes', adapter) # minitest-excludes
libs = [
File.join(root_dir, 'lib'),
File.join(root_dir, driver, 'lib'),
File.join(root_dir, 'test/rails'),
ar_test_dir
]
test_files_finder = lambda do
Dir.chdir(ar_path) do # taken from Rails' *activerecord/Rakefile* :
( Dir.glob("test/cases/**/*_test.rb").reject { |x| x =~ /\/adapters\// } +
Dir.glob("test/cases/adapters/#{adapter}/**/*_test.rb") )
end
end
task_stub = Class.new(Rake::TestTask) { def define; end }.new # no-op define
test_loader_code = task_stub.run_code # :rake test-loader
ruby_opts_string = "-I\"#{libs.join(File::PATH_SEPARATOR)}\""
ruby_opts_string += " -C \"#{ar_path}\""
ruby_opts_string += " -rbundler/setup"
file_list_string = ENV["TEST"] ? FileList[ ENV["TEST"] ] : test_files_finder.call
file_list_string = file_list_string.map { |fn| "\"#{fn}\"" }.join(' ')
# test_loader_code = "-e \"ARGV.each{|f| require f}\"" # :direct
option_list = ( ENV["TESTOPTS"] || ENV["TESTOPT"] || ENV["TEST_OPTS"] || '' )
args = "#{ruby_opts_string} #{test_loader_code} #{file_list_string} #{option_list}"
env_sh env, "#{FileUtils::RUBY} #{args}" do |ok, status|
if !ok && status.respond_to?(:signaled?) && status.signaled?
raise SignalException.new(status.termsig)
elsif !ok
fail "Command failed with status (#{status.exitstatus})"
end
end
end
task :test_mysql2 => :test_mysql
FileUtils.module_eval do
def env_sh(env, *cmd, &block)
options = (Hash === cmd.last) ? cmd.pop : {}
shell_runner = block_given? ? block : create_shell_runner(cmd)
set_verbose_option(options)
options[:noop] ||= Rake::FileUtilsExt.nowrite_flag
Rake.rake_check_options options, :noop, :verbose
cmd = env.map { |k,v| "#{k}=\"#{v}\"" }.join(' ') + ' ' + cmd.join(' ')
Rake.rake_output_message cmd if options[:verbose]
unless options[:noop]
res = Kernel.system(cmd)
status = $?
status = Rake::PseudoStatus.new(1) if !res && status.nil?
shell_runner.call(res, status)
end
end
def env_system(env, cmd)
Kernel.system(env.map { |k,v| "#{k}=\"#{v}\"" }.join(' ') + ' ' + cmd)
end
end
end
namespace :db do
namespace :mysql do
desc 'Build the MySQL test databases'
task :build do
config = ARTest.config['connections']['mysql2']
%x( mysql --user=#{config['arunit']['username']} --password=#{config['arunit']['password']} -e "create DATABASE #{config['arunit']['database']} DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_unicode_ci ")
%x( mysql --user=#{config['arunit2']['username']} --password=#{config['arunit2']['password']} -e "create DATABASE #{config['arunit2']['database']} DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_unicode_ci ")
end
desc 'Drop the MySQL test databases'
task :drop do
config = ARTest.config['connections']['mysql2']
%x( mysqladmin --user=#{config['arunit']['username']} --password=#{config['arunit']['password']} -f drop #{config['arunit']['database']} )
%x( mysqladmin --user=#{config['arunit2']['username']} --password=#{config['arunit2']['password']} -f drop #{config['arunit2']['database']} )
end
desc 'Rebuild the MySQL test databases'
task :rebuild => [:drop, :build]
end
namespace :postgresql do
desc 'Build the PostgreSQL test databases'
task :build do
config = ARTest.config['connections']['postgresql']
%x( createdb -E UTF8 -T template0 #{config['arunit']['database']} )
%x( createdb -E UTF8 -T template0 #{config['arunit2']['database']} )
# prepare hstore
if %x( createdb --version ).strip.gsub(/(.*)(\d\.\d\.\d)$/, "\\2") < "9.1.0"
puts "Please prepare hstore data type. See http://www.postgresql.org/docs/current/static/hstore.html"
end
end
desc 'Drop the PostgreSQL test databases'
task :drop do
config = ARTest.config['connections']['postgresql']
%x( dropdb #{config['arunit']['database']} )
%x( dropdb #{config['arunit2']['database']} )
end
desc 'Rebuild the PostgreSQL test databases'
task :rebuild => [:drop, :build]
end
end
# NOTE: we expect to, hopefully, not be using these anymore - delete at WILL!
namespace :test do
task :all do
driver = ENV['DRIVER'] || ENV['ADAPTER']
raise "need a DRIVER (DRIVER=mysql)" unless driver
rails_dir = _rails_dir
ENV['ARCONFIG'] = File.join(_ar_jdbc_dir, 'test', 'rails', 'config.yml')
sh "cd #{File.join(rails_dir, 'activerecord')}; #{FileUtils::RUBY} -S rake RUBYLIB=#{_ruby_lib(rails_dir, driver)} #{_target(driver)}"
end
%w(MySQL SQLite3 Postgres).each do |adapter|
task adapter.downcase do
ENV['ADAPTER'] = adapter
Rake::Task['rails:test:all'].invoke
end
namespace adapter.downcase do
task "base_test" do
ENV['TEST'] ||= 'test/cases/base_test.rb'
ENV['ADAPTER'] = adapter
Rake::Task['rails:test:all'].invoke
end
end
end
private
def _ar_jdbc_dir
@ar_jdbc_dir ||= File.expand_path('..', File.dirname(__FILE__))
end
def _rails_dir
rails_dir = ENV['RAILS'] || File.join('..', 'rails')
unless File.directory? rails_dir
raise "can't find RAILS source at '#{rails_dir}' (maybe set ENV['RAILS'])"
end
rails_dir = File.join(rails_dir, '..') if rails_dir =~ /activerecord$/
File.expand_path(rails_dir)
end
def _ruby_lib(rails_dir, driver)
ar_jdbc_dir = _ar_jdbc_dir
if driver =~ /postgres/i
adapter, driver = 'postgresql', 'postgres'
else
adapter, driver = driver.downcase, adapter
end
[File.join(ar_jdbc_dir, 'lib'),
File.join(ar_jdbc_dir, 'test', 'rails'),
File.join(ar_jdbc_dir, "jdbc-#{driver}", 'lib'),
File.join(ar_jdbc_dir, "activerecord-jdbc#{adapter}-adapter", 'lib'),
File.expand_path('activesupport/lib', rails_dir),
File.expand_path('activemodel/lib', rails_dir),
File.expand_path('activerecord/lib', rails_dir)
].join(':')
end
def _target(name)
case name
when /postgres/i
'test_postgresql'
else
"test_jdbc#{name.downcase}"
end
end
end
end
|
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "UIImage+MMLaunchImage"
s.version = "1.0.3"
s.summary = "Returns the current launch image 'Default.png' for an iOS app, depending on OS version, device and orientation."
s.description = <<-DESC
Returns the current launch image 'Default.png' for an iOS app
* Can deal with iOS7 and iOS6 style launch images
* Deals with different orientations
* Deals with iPhone and iPad idioms
* Useful for displaying after launch, to create a fade into your app or waiting for an interstitial
DESC
s.homepage = "https://github.com/matthewmayer/MMLaunchImage"
s.license = 'MIT'
s.author = 'Matt Mayer'
s.platform = :ios, '5.0'
s.source = { :git => "https://github.com/matthewmayer/MMLaunchImage.git", :tag => "1.0.3" }
s.source_files = '*.{h,m}'
s.requires_arc = true
end
podspec
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "UIImage+MMLaunchImage"
s.version = "1.0.3"
s.summary = "Returns the current launch image 'Default.png' for an iOS app, depending on OS version, device and orientation."
s.description = <<-DESC
Returns the current launch image 'Default.png' for an iOS app
* Can deal with iOS7 and iOS6 style launch images
* Deals with different orientations
* Deals with iPhone and iPad idioms
* Useful for displaying after launch, to create a fade into your app or waiting for an interstitial
DESC
s.homepage = "https://github.com/matthewmayer/MMLaunchImage"
s.license = 'MIT'
s.author = 'Matt Mayer'
s.platform = :ios, '5.0'
s.source = { :git => "https://github.com/pilot34/MMLaunchImage.git" }
s.source_files = '*.{h,m}'
s.requires_arc = true
end
|
Add a Module#const_added callback
[Feature #17881]
Works similarly to `method_added` but for constants.
```ruby
Foo::BAR = 42 # call Foo.const_added(:FOO)
class Foo::Baz; end # call Foo.const_added(:Baz)
Foo.autoload(:Something, "path") # call Foo.const_added(:Something)
```
require_relative '../../spec_helper'
require_relative 'fixtures/classes'
describe "Module#const_added" do
ruby_version_is "3.1" do
it "is a private instance method" do
Module.should have_private_instance_method(:const_added)
end
it "returns nil in the default implementation" do
Module.new do
const_added(:TEST).should == nil
end
end
it "is called when a new constant is assigned on self" do
ScratchPad.record []
mod = Module.new do
def self.const_added(name)
ScratchPad << name
end
end
mod.module_eval(<<-RUBY, __FILE__, __LINE__ + 1)
TEST = 1
RUBY
ScratchPad.recorded.should == [:TEST]
end
it "is called when a new constant is assigned on self throught const_set" do
ScratchPad.record []
mod = Module.new do
def self.const_added(name)
ScratchPad << name
end
end
mod.const_set(:TEST, 1)
ScratchPad.recorded.should == [:TEST]
end
it "is called when a new module is defined under self" do
ScratchPad.record []
mod = Module.new do
def self.const_added(name)
ScratchPad << name
end
end
mod.module_eval(<<-RUBY, __FILE__, __LINE__ + 1)
module SubModule
end
module SubModule
end
RUBY
ScratchPad.recorded.should == [:SubModule]
end
it "is called when a new class is defined under self" do
ScratchPad.record []
mod = Module.new do
def self.const_added(name)
ScratchPad << name
end
end
mod.module_eval(<<-RUBY, __FILE__, __LINE__ + 1)
class SubClass
end
class SubClass
end
RUBY
ScratchPad.recorded.should == [:SubClass]
end
it "is called when an autoload is defined" do
ScratchPad.record []
mod = Module.new do
def self.const_added(name)
ScratchPad << name
end
end
mod.autoload :Autoload, "foo"
ScratchPad.recorded.should == [:Autoload]
end
it "is called with a precise caller location with the line of definition" do
ScratchPad.record []
mod = Module.new do
def self.const_added(name)
location = caller_locations(1, 1)[0]
ScratchPad << location.lineno
end
end
line = __LINE__
mod.module_eval(<<-RUBY, __FILE__, __LINE__ + 1)
TEST = 1
module SubModule
end
class SubClass
end
RUBY
mod.const_set(:CONST_SET, 1)
ScratchPad.recorded.should == [line + 2, line + 4, line + 7, line + 11]
end
end
end
|
require 'rubygems'
require 'sqlite3'
require 'active_record'
# connect to database. This will create one if it doesn't exist
MY_DB_NAME = "oneenv.db"
MY_DB = SQLite3::Database.new(MY_DB_NAME)
# get active record set up
ActiveRecord::Base.establish_connection(:adapter => 'sqlite3', :database => MY_DB_NAME)
class Cookbook < ActiveRecord::Base
#has_one :enviroment
#self.primary_key= name
self.connection.create_table(:cookbooks,:force=>true) do |t|
t.column :name, :string, :null=>false, :unique=>true
t.column :path, :string
t.column :place, :string, :default=>'L'
end
validates_uniqueness_of :name
end
class Env_db < ActiveRecord::Base
belongs_to :cookbook
#set_table_name :enviroments
self.table_name= 'enviroments'
self.connection.create_table(:enviroments,:force=>true) do |t|
# El identificador autonumerado se crea automaticamente
t.column :name, :string, :default=>'env-' #+:id.to_s
t.column :ssh, :string, :default=>nil
t.column :cookbooks, :cookbook
end
end
#=begin
Cookbook.create(:name=>'emacs', :path=>'/ruta/hacia/emacs')
Cookbook.create(:name=>'vim', :path=>'/ruta/hacia/vim')
Cookbook.create(:name=>'apache', :path=>'/ruta/hacia/apache')
#=begin
Env_db.create(:name=>'nombre1', :ssh=>'clave1', :cookbooks => Cookbook.find(2))
Env_db.create(:ssh=>'clave2')
Env_db.create(:name=>'nombre3', :ssh=>'clave3', :cookbooks => Cookbook.first(:conditions => {:name => 'emacs'}))
#=end
Añadida restriccion para el campo place, solo podra tener valores R (Repository) o L (Local)
require 'rubygems'
require 'sqlite3'
require 'active_record'
# connect to database. This will create one if it doesn't exist
MY_DB_NAME = "oneenv.db"
MY_DB = SQLite3::Database.new(MY_DB_NAME)
# get active record set up
ActiveRecord::Base.establish_connection(:adapter => 'sqlite3', :database => MY_DB_NAME)
class Cookbook < ActiveRecord::Base
#has_one :enviroment
#self.primary_key= name
self.connection.create_table(:cookbooks,:force=>true) do |t|
t.column :name, :string, :null=>false, :unique=>true
t.column :path, :string
# Parece ser que type esta reservado por ruby, cambiado por place
t.column :place, :string, :default=>'L'
end
validates_uniqueness_of :name
# Obliga a que el campo :place sea R o L
validates :place, :inclusion => {:in=> ['R', 'L'], :message=> "%{value} no es un valor correcto" }
end
class Env_db < ActiveRecord::Base
belongs_to :cookbook
#set_table_name :enviroments
self.table_name= 'enviroments'
self.connection.create_table(:enviroments,:force=>true) do |t|
# El identificador autonumerado se crea automaticamente
t.column :name, :string, :default=>'env-' #+:id.to_s
t.column :ssh, :string, :default=>nil
t.column :cookbooks, :cookbook
end
end
#=begin
Cookbook.create(:name=>'emacs', :path=>'/ruta/hacia/emacs')
Cookbook.create(:name=>'vim', :path=>'/ruta/hacia/vim')
Cookbook.create(:name=>'apache', :path=>'/ruta/hacia/apache')
#=begin
Env_db.create(:name=>'nombre1', :ssh=>'clave1', :cookbooks => Cookbook.find(2))
Env_db.create(:ssh=>'clave2')
Env_db.create(:name=>'nombre3', :ssh=>'clave3', :cookbooks => Cookbook.first(:conditions => {:name => 'emacs'}))
#=end
|
#
# Cookbook Name:: rabbitmq
# Recipe:: default
#
# Copyright 2009, Benjamin Black
# Copyright 2009-2013, Chef Software, Inc.
# Copyright 2012, Kevin Nuckolls <kevin.nuckolls@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
class Chef::Resource # rubocop:disable all
include Opscode::RabbitMQ # rubocop:enable all
end
include_recipe 'erlang'
## Install the package
case node['platform_family']
when 'debian'
template '/etc/apt/apt.conf.d/90forceyes' do
source '90forceyes.erb'
owner 'root'
group 'root'
mode '0644'
end
# logrotate is a package dependency of rabbitmq-server
package 'logrotate'
# => Prevent Debian systems from automatically starting RabbitMQ after dpkg install
dpkg_autostart node['rabbitmq']['service_name'] do
allow false
end
if node['rabbitmq']['use_distro_version']
package 'rabbitmq-server' do
action :install
version node['rabbitmq']['version'] if node['rabbitmq']['pin_distro_version']
end
else
# we need to download the package
deb_package = "#{node['rabbitmq']['deb_package_url']}#{node['rabbitmq']['deb_package']}"
remote_file "#{Chef::Config[:file_cache_path]}/#{node['rabbitmq']['deb_package']}" do
source deb_package
action :create_if_missing
end
dpkg_package "#{Chef::Config[:file_cache_path]}/#{node['rabbitmq']['deb_package']}" do
action :install
end
end
# Configure job control
if node['rabbitmq']['job_control'] == 'upstart' && node['rabbitmq']['manage_service']
# We start with stock init.d, remove it if we're not using init.d, otherwise leave it alone
service node['rabbitmq']['service_name'] do
action [:stop]
only_if { File.exist?('/etc/init.d/rabbitmq-server') }
end
execute 'remove rabbitmq init.d command' do
command 'update-rc.d -f rabbitmq-server remove'
end
file '/etc/init.d/rabbitmq-server' do
action :delete
end
template "/etc/init/#{node['rabbitmq']['service_name']}.conf" do
source 'rabbitmq.upstart.conf.erb'
owner 'root'
group 'root'
mode 0644
variables(:max_file_descriptors => node['rabbitmq']['max_file_descriptors'])
end
end
when 'rhel', 'fedora'
# This is needed since Erlang Solutions' packages provide "esl-erlang"; this package just requires "esl-erlang" and provides "erlang".
if node['erlang']['install_method'] == 'esl'
remote_file "#{Chef::Config[:file_cache_path]}/esl-erlang-compat.rpm" do
source "#{node['rabbitmq']['esl-erlang_package_url']}#{node['rabbitmq']['esl-erlang_package']}"
end
rpm_package "#{Chef::Config[:file_cache_path]}/esl-erlang-compat.rpm"
end
if node['rabbitmq']['use_distro_version']
package 'rabbitmq-server' do
action :install
version node['rabbitmq']['version'] if node['rabbitmq']['pin_distro_version']
end
else
# We need to download the rpm
rpm_package = "#{node['rabbitmq']['rpm_package_url']}#{node['rabbitmq']['rpm_package']}"
remote_file "#{Chef::Config[:file_cache_path]}/#{node['rabbitmq']['rpm_package']}" do
source rpm_package
action :create_if_missing
end
rpm_package "#{Chef::Config[:file_cache_path]}/#{node['rabbitmq']['rpm_package']}"
end
when 'suse'
# rabbitmq-server-plugins needs to be first so they both get installed
# from the right repository. Otherwise, zypper will stop and ask for a
# vendor change.
package 'rabbitmq-server-plugins' do
action :install
version node['rabbitmq']['version']
end
package 'rabbitmq-server' do
action :install
version node['rabbitmq']['version'] if node['rabbitmq']['pin_distro_version']
end
when 'smartos'
package 'rabbitmq'do
action :install
version node['rabbitmq']['version'] if node['rabbitmq']['pin_distro_version']
end
service 'epmd' do
action :start
end
end
if node['rabbitmq']['logdir']
directory node['rabbitmq']['logdir'] do
owner 'rabbitmq'
group 'rabbitmq'
mode '775'
recursive true
end
end
directory node['rabbitmq']['mnesiadir'] do
owner 'rabbitmq'
group 'rabbitmq'
mode '775'
recursive true
end
template "#{node['rabbitmq']['config_root']}/rabbitmq-env.conf" do
source 'rabbitmq-env.conf.erb'
owner 'root'
group 'root'
mode 00644
notifies :restart, "service[#{node['rabbitmq']['service_name']}]"
end
template "#{node['rabbitmq']['config']}.config" do
sensitive true if Gem::Version.new(Chef::VERSION.to_s) >= Gem::Version.new('11.14.2')
source 'rabbitmq.config.erb'
cookbook node['rabbitmq']['config_template_cookbook']
owner 'root'
group 'root'
mode 00644
variables(
:kernel => format_kernel_parameters,
:ssl_versions => (format_ssl_versions if node['rabbitmq']['ssl_versions']),
:ssl_ciphers => (format_ssl_ciphers if node['rabbitmq']['ssl_ciphers'])
)
notifies :restart, "service[#{node['rabbitmq']['service_name']}]"
end
template "/etc/default/#{node['rabbitmq']['service_name']}" do
source 'default.rabbitmq-server.erb'
owner 'root'
group 'root'
mode 00644
notifies :restart, "service[#{node['rabbitmq']['service_name']}]"
end
if File.exist?(node['rabbitmq']['erlang_cookie_path']) && File.readable?((node['rabbitmq']['erlang_cookie_path']))
existing_erlang_key = File.read(node['rabbitmq']['erlang_cookie_path']).strip
else
existing_erlang_key = ''
end
if node['rabbitmq']['cluster'] && (node['rabbitmq']['erlang_cookie'] != existing_erlang_key)
log "stop #{node['rabbitmq']['service_name']} to change erlang cookie" do
notifies :stop, "service[#{node['rabbitmq']['service_name']}]", :immediately
end
template node['rabbitmq']['erlang_cookie_path'] do
source 'doterlang.cookie.erb'
owner 'rabbitmq'
group 'rabbitmq'
mode 00400
notifies :start, "service[#{node['rabbitmq']['service_name']}]", :immediately
notifies :run, 'execute[reset-node]', :immediately
end
# Need to reset for clustering #
execute 'reset-node' do
command 'rabbitmqctl stop_app && rabbitmqctl reset && rabbitmqctl start_app'
action :nothing
end
end
if node['rabbitmq']['manage_service']
service node['rabbitmq']['service_name'] do
action [:enable, :start]
supports :status => true, :restart => true
provider Chef::Provider::Service::Upstart if node['rabbitmq']['job_control'] == 'upstart'
provider Chef::Provider::Service::Init if node['rabbitmq']['job_control'] == 'init'
end
else
service node['rabbitmq']['service_name'] do
action :nothing
end
end
Fixes/Enhancements:
- Make the Debian installation behave more like RedHat by allowing package upgrades
#
# Cookbook Name:: rabbitmq
# Recipe:: default
#
# Copyright 2009, Benjamin Black
# Copyright 2009-2013, Chef Software, Inc.
# Copyright 2012, Kevin Nuckolls <kevin.nuckolls@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
class Chef::Resource # rubocop:disable all
include Opscode::RabbitMQ # rubocop:enable all
end
include_recipe 'erlang'
## Install the package
case node['platform_family']
when 'debian'
template '/etc/apt/apt.conf.d/90forceyes' do
source '90forceyes.erb'
owner 'root'
group 'root'
mode '0644'
end
# logrotate is a package dependency of rabbitmq-server
package 'logrotate'
# => Prevent Debian systems from automatically starting RabbitMQ after dpkg install
dpkg_autostart node['rabbitmq']['service_name'] do
allow false
end
if node['rabbitmq']['use_distro_version']
package 'rabbitmq-server' do
action :install
version node['rabbitmq']['version'] if node['rabbitmq']['pin_distro_version']
end
else
# we need to download the package
deb_package = "#{node['rabbitmq']['deb_package_url']}#{node['rabbitmq']['deb_package']}"
remote_file "#{Chef::Config[:file_cache_path]}/#{node['rabbitmq']['deb_package']}" do
source deb_package
action :create_if_missing
end
package 'rabbitmq-server' do
provider Chef::Provider::Package::Dpkg
source ::File.join(Chef::Config[:file_cache_path], node['rabbitmq']['deb_package'])
action :upgrade
end
end
# Configure job control
if node['rabbitmq']['job_control'] == 'upstart' && node['rabbitmq']['manage_service']
# We start with stock init.d, remove it if we're not using init.d, otherwise leave it alone
service node['rabbitmq']['service_name'] do
action [:stop]
only_if { File.exist?('/etc/init.d/rabbitmq-server') }
end
execute 'remove rabbitmq init.d command' do
command 'update-rc.d -f rabbitmq-server remove'
end
file '/etc/init.d/rabbitmq-server' do
action :delete
end
template "/etc/init/#{node['rabbitmq']['service_name']}.conf" do
source 'rabbitmq.upstart.conf.erb'
owner 'root'
group 'root'
mode 0644
variables(:max_file_descriptors => node['rabbitmq']['max_file_descriptors'])
end
end
when 'rhel', 'fedora'
# This is needed since Erlang Solutions' packages provide "esl-erlang"; this package just requires "esl-erlang" and provides "erlang".
if node['erlang']['install_method'] == 'esl'
remote_file "#{Chef::Config[:file_cache_path]}/esl-erlang-compat.rpm" do
source "#{node['rabbitmq']['esl-erlang_package_url']}#{node['rabbitmq']['esl-erlang_package']}"
end
rpm_package "#{Chef::Config[:file_cache_path]}/esl-erlang-compat.rpm"
end
if node['rabbitmq']['use_distro_version']
package 'rabbitmq-server' do
action :install
version node['rabbitmq']['version'] if node['rabbitmq']['pin_distro_version']
end
else
# We need to download the rpm
rpm_package = "#{node['rabbitmq']['rpm_package_url']}#{node['rabbitmq']['rpm_package']}"
remote_file "#{Chef::Config[:file_cache_path]}/#{node['rabbitmq']['rpm_package']}" do
source rpm_package
action :create_if_missing
end
rpm_package "#{Chef::Config[:file_cache_path]}/#{node['rabbitmq']['rpm_package']}"
end
when 'suse'
# rabbitmq-server-plugins needs to be first so they both get installed
# from the right repository. Otherwise, zypper will stop and ask for a
# vendor change.
package 'rabbitmq-server-plugins' do
action :install
version node['rabbitmq']['version']
end
package 'rabbitmq-server' do
action :install
version node['rabbitmq']['version'] if node['rabbitmq']['pin_distro_version']
end
when 'smartos'
package 'rabbitmq'do
action :install
version node['rabbitmq']['version'] if node['rabbitmq']['pin_distro_version']
end
service 'epmd' do
action :start
end
end
if node['rabbitmq']['logdir']
directory node['rabbitmq']['logdir'] do
owner 'rabbitmq'
group 'rabbitmq'
mode '775'
recursive true
end
end
directory node['rabbitmq']['mnesiadir'] do
owner 'rabbitmq'
group 'rabbitmq'
mode '775'
recursive true
end
template "#{node['rabbitmq']['config_root']}/rabbitmq-env.conf" do
source 'rabbitmq-env.conf.erb'
owner 'root'
group 'root'
mode 00644
notifies :restart, "service[#{node['rabbitmq']['service_name']}]"
end
template "#{node['rabbitmq']['config']}.config" do
sensitive true if Gem::Version.new(Chef::VERSION.to_s) >= Gem::Version.new('11.14.2')
source 'rabbitmq.config.erb'
cookbook node['rabbitmq']['config_template_cookbook']
owner 'root'
group 'root'
mode 00644
variables(
:kernel => format_kernel_parameters,
:ssl_versions => (format_ssl_versions if node['rabbitmq']['ssl_versions']),
:ssl_ciphers => (format_ssl_ciphers if node['rabbitmq']['ssl_ciphers'])
)
notifies :restart, "service[#{node['rabbitmq']['service_name']}]"
end
template "/etc/default/#{node['rabbitmq']['service_name']}" do
source 'default.rabbitmq-server.erb'
owner 'root'
group 'root'
mode 00644
notifies :restart, "service[#{node['rabbitmq']['service_name']}]"
end
if File.exist?(node['rabbitmq']['erlang_cookie_path']) && File.readable?((node['rabbitmq']['erlang_cookie_path']))
existing_erlang_key = File.read(node['rabbitmq']['erlang_cookie_path']).strip
else
existing_erlang_key = ''
end
if node['rabbitmq']['cluster'] && (node['rabbitmq']['erlang_cookie'] != existing_erlang_key)
log "stop #{node['rabbitmq']['service_name']} to change erlang cookie" do
notifies :stop, "service[#{node['rabbitmq']['service_name']}]", :immediately
end
template node['rabbitmq']['erlang_cookie_path'] do
source 'doterlang.cookie.erb'
owner 'rabbitmq'
group 'rabbitmq'
mode 00400
notifies :start, "service[#{node['rabbitmq']['service_name']}]", :immediately
notifies :run, 'execute[reset-node]', :immediately
end
# Need to reset for clustering #
execute 'reset-node' do
command 'rabbitmqctl stop_app && rabbitmqctl reset && rabbitmqctl start_app'
action :nothing
end
end
if node['rabbitmq']['manage_service']
service node['rabbitmq']['service_name'] do
action [:enable, :start]
supports :status => true, :restart => true
provider Chef::Provider::Service::Upstart if node['rabbitmq']['job_control'] == 'upstart'
provider Chef::Provider::Service::Init if node['rabbitmq']['job_control'] == 'init'
end
else
service node['rabbitmq']['service_name'] do
action :nothing
end
end
|
bash_it_version = version_string_for('bash_it')
bash_it_config = node['sprout']['bash_it']
bash_it_dir = bash_it_config['dir']
git "#{Chef::Config[:file_cache_path]}/bash_it" do
owner node['current_user']
repository bash_it_config['repository']
revision bash_it_version
destination "#{Chef::Config[:file_cache_path]}/bash_it"
action :sync
end
directory bash_it_dir do
owner node['current_user']
mode '0777'
end
execute "Copying bash-it's .git to #{node['bash_it']['dir']}" do
command "rsync -axSH #{Chef::Config[:file_cache_path]}/bash_it/ #{bash_it_dir}"
user node['current_user']
end
template bash_it_config['bashrc_path'] do
source 'bash_it/bashrc.erb'
cookbook 'sprout-bash-it'
owner node['current_user']
variables bash_it_dir: bash_it_dir, bash_it_theme: bash_it_config['theme']
mode '0777'
end
include_recipe 'sprout-bash-it::custom_plugins'
include_recipe 'sprout-bash-it::enabled_plugins'
user, not owner. :sob:
bash_it_version = version_string_for('bash_it')
bash_it_config = node['sprout']['bash_it']
bash_it_dir = bash_it_config['dir']
git "#{Chef::Config[:file_cache_path]}/bash_it" do
user node['current_user']
repository bash_it_config['repository']
revision bash_it_version
destination "#{Chef::Config[:file_cache_path]}/bash_it"
action :sync
end
directory bash_it_dir do
owner node['current_user']
mode '0777'
end
execute "Copying bash-it's .git to #{node['bash_it']['dir']}" do
command "rsync -axSH #{Chef::Config[:file_cache_path]}/bash_it/ #{bash_it_dir}"
user node['current_user']
end
template bash_it_config['bashrc_path'] do
source 'bash_it/bashrc.erb'
cookbook 'sprout-bash-it'
owner node['current_user']
variables bash_it_dir: bash_it_dir, bash_it_theme: bash_it_config['theme']
mode '0777'
end
include_recipe 'sprout-bash-it::custom_plugins'
include_recipe 'sprout-bash-it::enabled_plugins'
|
include_recipe "eol-docker::docker"
env = node.environment
return unless data_bag("eol-docker").include?(env)
dbag = data_bag_item("eol-docker", env)
node_conf = dbag[node.name]
log "node_conf: %s" % node_conf
group "docker" do
members dbag["docker_members"]
end
return unless node_conf
directory "/eol/shared" do
user "root"
group "docker"
mode "0775"
recursive true
end
names = []
exec "> /eol/shared/containers"
node_conf["containers"].each do |c|
names << c["name"]
docker_container c["name"] do
config c
end
exec "echo #{ c["name"] } >> /eol/shared/containers"
end
%w(docker_clean docker_nuke docker_names).each do |f|
cookbook_file f do
path "/usr/local/bin/#{f}"
mode "0755"
action :create
end
end
template "/usr/local/bin/restart_all" do
source "restart_all.erb"
variables names: names
mode "775"
user "root"
group "docker"
end
cleaner containers files creation
include_recipe "eol-docker::docker"
env = node.environment
return unless data_bag("eol-docker").include?(env)
dbag = data_bag_item("eol-docker", env)
node_conf = dbag[node.name]
log "node_conf: %s" % node_conf
group "docker" do
members dbag["docker_members"]
end
return unless node_conf
directory "/eol/shared" do
user "root"
group "docker"
mode "0775"
recursive true
end
file "/eol/shared/containers" do
content node_conf["containers"].map { |c| c["name"] }.join("\n")
owner "root"
group "docker"
mode 00644
end
names = []
node_conf["containers"].each do |c|
names << c["name"]
docker_container c["name"] do
config c
end
end
%w(docker_clean docker_nuke docker_names).each do |f|
cookbook_file f do
path "/usr/local/bin/#{f}"
mode "0755"
action :create
end
end
template "/usr/local/bin/restart_all" do
source "restart_all.erb"
variables names: names
mode "775"
user "root"
group "docker"
end
|
#
# Cookbook Name:: rsyslog
# Recipe:: default
#
# Copyright 2009-2013, Opscode, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package 'rsyslog'
package 'rsyslog-relp' if node['rsyslog']['use_relp']
directory "#{node['rsyslog']['config_prefix']}/rsyslog.d" do
owner 'root'
group 'root'
mode '0755'
end
directory '/var/spool/rsyslog' do
owner 'root'
group 'root'
mode '0755'
end
# Our main stub which then does its own rsyslog-specific
# include of things in /etc/rsyslog.d/*
template "#{node['rsyslog']['config_prefix']}/rsyslog.conf" do
source 'rsyslog.conf.erb'
owner 'root'
group 'root'
mode '0644'
notifies :restart, "service[#{node['rsyslog']['service_name']}]"
end
template "#{node['rsyslog']['config_prefix']}/rsyslog.d/50-default.conf" do
source '50-default.conf.erb'
owner 'root'
group 'root'
mode '0644'
notifies :restart, "service[#{node['rsyslog']['service_name']}]"
end
# syslog needs to be stopped before rsyslog can be started on RHEL versions before 6.0
if platform_family?('rhel') && node['platform_version'].to_i < 6
service 'syslog' do
action [:stop, :disable]
end
elsif platform_family?('smartos', 'omnios')
# syslog needs to be stopped before rsyslog can be started on SmartOS, OmniOS
service 'system-log' do
action :disable
end
end
if platform_family?('omnios')
# manage the SMF manifest on OmniOS
template '/var/svc/manifest/system/rsyslogd.xml' do
source 'omnios-manifest.xml.erb'
owner 'root'
group 'root'
mode '0644'
notifies :run, 'execute[import rsyslog manifest]', :immediately
end
execute 'import rsyslog manifest' do
action :nothing
command 'svccfg import /var/svc/manifest/system/rsyslogd.xml'
notifies :restart, "service[#{node['rsyslog']['service_name']}]"
end
end
service node['rsyslog']['service_name'] do
supports :restart => true, :reload => true, :status => true
action [:enable, :start]
end
file "logrotate_fix.conf" do
owner "root"
group "root"
mode 515
path "/etc/logrotate.d/rsyslog"
end
delete existing /etc/logrotate.d/rsyslog before writing a new one
#
# Cookbook Name:: rsyslog
# Recipe:: default
#
# Copyright 2009-2013, Opscode, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
package 'rsyslog'
package 'rsyslog-relp' if node['rsyslog']['use_relp']
directory "#{node['rsyslog']['config_prefix']}/rsyslog.d" do
owner 'root'
group 'root'
mode '0755'
end
directory '/var/spool/rsyslog' do
owner 'root'
group 'root'
mode '0755'
end
# Our main stub which then does its own rsyslog-specific
# include of things in /etc/rsyslog.d/*
template "#{node['rsyslog']['config_prefix']}/rsyslog.conf" do
source 'rsyslog.conf.erb'
owner 'root'
group 'root'
mode '0644'
notifies :restart, "service[#{node['rsyslog']['service_name']}]"
end
template "#{node['rsyslog']['config_prefix']}/rsyslog.d/50-default.conf" do
source '50-default.conf.erb'
owner 'root'
group 'root'
mode '0644'
notifies :restart, "service[#{node['rsyslog']['service_name']}]"
end
# syslog needs to be stopped before rsyslog can be started on RHEL versions before 6.0
if platform_family?('rhel') && node['platform_version'].to_i < 6
service 'syslog' do
action [:stop, :disable]
end
elsif platform_family?('smartos', 'omnios')
# syslog needs to be stopped before rsyslog can be started on SmartOS, OmniOS
service 'system-log' do
action :disable
end
end
if platform_family?('omnios')
# manage the SMF manifest on OmniOS
template '/var/svc/manifest/system/rsyslogd.xml' do
source 'omnios-manifest.xml.erb'
owner 'root'
group 'root'
mode '0644'
notifies :run, 'execute[import rsyslog manifest]', :immediately
end
execute 'import rsyslog manifest' do
action :nothing
command 'svccfg import /var/svc/manifest/system/rsyslogd.xml'
notifies :restart, "service[#{node['rsyslog']['service_name']}]"
end
end
service node['rsyslog']['service_name'] do
supports :restart => true, :reload => true, :status => true
action [:enable, :start]
end
file "/etc/logrotate.d/rsyslog" do
action :delete
end
file "logrotate_fix.conf" do
owner "root"
group "root"
mode 515
path "/etc/logrotate.d/rsyslog"
end
|
# --- Install packages we need ---
package 'php5-fpm'
package 'php5'
package 'php5-cli'
package 'php-pear'
package 'phpunit'
execute "phpunit" do
command "pear upgrade pear"
command "pear channel-discover pear.phpunit.de"
command "pear channel-discover components.ez.no"
command "pear install --alldeps phpunit/PHPUnit"
end
execute "start-php5-fpm" do
command "/etc/init.d/php5-fpm start"
end
Performs install under root permissions
# --- Install packages we need ---
package 'php5-fpm'
package 'php5'
package 'php5-cli'
package 'php-pear'
package 'phpunit'
execute "phpunit" do
command "sudo pear upgrade pear"
command "sudo pear channel-discover pear.phpunit.de"
command "sudo pear channel-discover components.ez.no"
command "sudo pear install --alldeps phpunit/PHPUnit"
end
execute "start-php5-fpm" do
command "/etc/init.d/php5-fpm start"
end
|
#
# Cookbook Name:: laravel
# Recipe:: default
#
# Copyright 2014, Michael Beattie
#
# Licensed under the MIT License.
# You may obtain a copy of the License at
#
# http://opensource.org/licenses/MIT
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Define required user defenitions
missing_attrs = %w[project_name].select { |attr| node['laravel'][attr].nil? }.map { |attr| %Q{node['laravel']['#{attr}']} }
# Fail Chef if required attributes are missing
unless missing_attrs.empty?
Chef::Application.fatal! "You must set #{missing_attrs.join(', ')}." \
" For more information, see https://github.com/BeattieM/laravel#attributes"
end
include_recipe "php"
# Laravel requires mycrypt
unless File.exists?("#{node['php']['ext_conf_dir']}/mcrypt.ini")
include_recipe "php-mcrypt"
end
include_recipe "mysql"
include_recipe "apache2"
include_recipe "composer"
include_recipe "laravel::laravel"
Fix installation of PHP for Apache
#
# Cookbook Name:: laravel
# Recipe:: default
#
# Copyright 2014, Michael Beattie
#
# Licensed under the MIT License.
# You may obtain a copy of the License at
#
# http://opensource.org/licenses/MIT
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Define required user defenitions
missing_attrs = %w[project_name].select { |attr| node['laravel'][attr].nil? }.map { |attr| %Q{node['laravel']['#{attr}']} }
# Fail Chef if required attributes are missing
unless missing_attrs.empty?
Chef::Application.fatal! "You must set #{missing_attrs.join(', ')}." \
" For more information, see https://github.com/BeattieM/laravel#attributes"
end
include_recipe "php"
# Laravel requires mycrypt
unless File.exists?("#{node['php']['ext_conf_dir']}/mcrypt.ini")
include_recipe "php-mcrypt"
end
include_recipe "mysql"
include_recipe "apache2"
include_recipe "apache2::mod_php5"
include_recipe "composer"
include_recipe "laravel::laravel"
|
#
# Cookbook Name:: monit
# Recipe:: default
#
case node.monit.install_method
when 'repo'
include_recipe "yum::epel" if platform_family?("rhel")
include_recipe "ubuntu" if platform?("ubuntu")
package 'monit'
include_recipe "monit::_common"
when 'source'
include_recipe "monit::source"
else
raise ArgumentError, "Unknown install_method '#{node.monit.install_method}' passed to monit cookbook"
end
enable monit from bbg-koji-edge on bbg network
#
# Cookbook Name:: monit
# Recipe:: default
#
case node.monit.install_method
when 'repo'
if platform_family?("rhel")
include_recipe "yum::epel"
if node.bluebox.network
yum_package "monit" do
action :install
options "--enablerepo=bbg-koji-edge"
end
end
end
include_recipe "ubuntu" if platform?("ubuntu")
package 'monit'
include_recipe "monit::_common"
when 'source'
include_recipe "monit::source"
else
raise ArgumentError, "Unknown install_method '#{node.monit.install_method}' passed to monit cookbook"
end
|
# -*- coding: UTF-8 -*-
#
# Cookbook Name:: valhalla
# Recipe:: get_elevation_tiles
#
# stop everything from running
execute 'stop service' do
action :run
command <<-EOH
service prime-httpd stop
count=$((#{node[:valhalla][:workers][:count]} - 1))
service proxyd-skadi stop
for j in $(seq 0 ${count}); do
service workerd-skadi-${j} stop
done
EOH
cwd node[:valhalla][:base_dir]
notifies :run, 'execute[sync tiles]', :immediately
end
# get them from s3
execute 'sync tiles' do
action :run
user node[:valhalla][:user][:name]
cwd node[:valhalla][:src_dir]
command "skadi/scripts/elevation_extract.sh -180 180 -90 90 #{node[:valhalla][:elevation_dir]} $(($(nproc)*2))"
timeout 32_000
end
# turn everything back on
include_recipe 'valhalla::_restart'
add retry to elevation data
# -*- coding: UTF-8 -*-
#
# Cookbook Name:: valhalla
# Recipe:: get_elevation_tiles
#
# stop everything from running
execute 'stop service' do
action :run
command <<-EOH
service prime-httpd stop
count=$((#{node[:valhalla][:workers][:count]} - 1))
service proxyd-skadi stop
for j in $(seq 0 ${count}); do
service workerd-skadi-${j} stop
done
EOH
cwd node[:valhalla][:base_dir]
notifies :run, 'execute[sync tiles]', :immediately
end
# get them from s3
execute 'sync tiles' do
action :run
user node[:valhalla][:user][:name]
cwd node[:valhalla][:src_dir]
command "skadi/scripts/elevation_extract.sh -180 180 -90 90 #{node[:valhalla][:elevation_dir]} $(($(nproc)*2))"
retries 10
timeout 32_000
end
# turn everything back on
include_recipe 'valhalla::_restart'
|
#
# Cookbook Name:: nodejs
# Recipe:: default
#
case node["platform"]
when "ubuntu"
apt_repository "chris-lea-node.js" do
uri "http://ppa.launchpad.net/chris-lea/node.js/ubuntu"
distribution node["lsb"]["codename"]
components ["main"]
key "C7917B12"
keyserver "keyserver.ubuntu.com"
action :add
notifies :run, "execute[apt-get update]", :immediately
end
when "debian"
# backports for initial support
apt_repository "sid-unstable" do
uri "http://ftp.us.debian.org/debian"
distribution "sid"
components ["main"]
action :add
notifies :run, "execute[apt-get update]", :immediately
end
cookbook_file "/etc/apt/preferences.d/sid.pref" do
source "sid.pref"
end
# compatibility for Debian 6
package "libv8-3.8.9.20"
end
# install primary package
package "nodejs"
no need to rerun `apt-get update`
#
# Cookbook Name:: nodejs
# Recipe:: default
#
case node["platform"]
when "ubuntu"
apt_repository "chris-lea-node.js" do
uri "http://ppa.launchpad.net/chris-lea/node.js/ubuntu"
distribution node["lsb"]["codename"]
components ["main"]
key "C7917B12"
keyserver "keyserver.ubuntu.com"
action :add
end
when "debian"
# backports for initial support
apt_repository "sid-unstable" do
uri "http://ftp.us.debian.org/debian"
distribution "sid"
components ["main"]
action :add
end
cookbook_file "/etc/apt/preferences.d/sid.pref" do
source "sid.pref"
end
# compatibility for Debian 6
package "libv8-3.8.9.20"
end
# install primary package
package "nodejs"
|
#
# Cookbook Name:: owncloud
# Recipe:: default
#
# Copyright 2013, Onddo Labs, Sl.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#==============================================================================
# Calculate dependencies for different distros
#==============================================================================
dbtype = node['owncloud']['config']['dbtype']
download_url =
node['owncloud']['download_url'] % { version: node['owncloud']['version'] }
case node['platform_family']
when 'debian'
# Sync apt package index
include_recipe 'apt'
php_pkgs = %w(php5-gd php5-intl php5-curl php5-json smbclient)
php_pkgs << 'php5-sqlite' if dbtype == 'sqlite'
php_pkgs << 'php5-mysql' if dbtype == 'mysql'
php_pkgs << 'php5-pgsql' if dbtype == 'pgsql'
when 'rhel'
if node['platform'] != 'amazon' && node['platform_version'].to_f < 6
php_pkgs = %w(php53-gd php53-mbstring php53-xml php53-intl samba-client)
php_pkgs << 'php53-mysql' if dbtype == 'mysql'
php_pkgs << 'php53-pgsql' if dbtype == 'pgsql'
if dbtype == 'sqlite'
fail(
"SQLite database type not supported on #{node['platform']}"\
" #{node['platform_version']}"
)
end
else
php_pkgs = %w(php-gd php-mbstring php-xml php-intl samba-client)
php_pkgs << 'php-pdo' if dbtype == 'sqlite'
php_pkgs << 'php-mysql' if dbtype == 'mysql'
php_pkgs << 'php-pgsql' if dbtype == 'pgsql'
end
when 'fedora'
php_pkgs = %w(php-gd php-mbstring php-xml php-intl samba-client)
php_pkgs << 'php-pdo' if dbtype == 'sqlite'
php_pkgs << 'php-mysql' if dbtype == 'mysql'
php_pkgs << 'php-pgsql' if dbtype == 'pgsql'
else
Chef::Log.warn('Unsupported platform, trying to guess packages.')
php_pkgs = %w(php-gd php-mbstring php-xml php-intl samba-client)
php_pkgs << 'php-pdo' if dbtype == 'sqlite'
php_pkgs << 'php-mysql' if dbtype == 'mysql'
php_pkgs << 'php-pgsql' if dbtype == 'pgsql'
end
#==============================================================================
# Initialize autogenerated passwords
#==============================================================================
::Chef::Recipe.send(:include, Opscode::OpenSSL::Password)
if Chef::Config[:solo]
if node['owncloud']['config']['dbpassword'].nil? &&
node['owncloud']['config']['dbtype'] != 'sqlite'
fail 'You must set ownCloud\'s database password in chef-solo mode.'
end
if node['owncloud']['database']['rootpassword'].nil? &&
node['owncloud']['config']['dbtype'] != 'sqlite'
fail 'You must set the database admin password in chef-solo mode.'
end
if node['owncloud']['admin']['pass'].nil?
fail 'You must set ownCloud\'s admin password in chef-solo mode.'
end
else
unless node['owncloud']['config']['dbtype'] == 'sqlite'
node.set_unless['owncloud']['config']['dbpassword'] = secure_password
node.set_unless['owncloud']['database']['rootpassword'] = secure_password
end
node.set_unless['owncloud']['admin']['pass'] = secure_password
node.save
end
#==============================================================================
# Install PHP
#==============================================================================
# ownCloud requires PHP >= 5.4.0, so in older ubuntu versions we need to add an
# extra repository in order to provide it
apt_repository 'ondrej-php5-oldstable' do
uri 'http://ppa.launchpad.net/ondrej/php5-oldstable/ubuntu'
distribution node['lsb']['codename'] if node['lsb'].is_a?(Hash)
components %w(main)
keyserver 'keyserver.ubuntu.com'
key 'E5267A6C'
deb_src true
only_if do
node['platform'] == 'ubuntu' &&
Chef::VersionConstraint.new('<= 12.04').include?(node['platform_version'])
end
end
include_recipe 'php'
php_pkgs.each do |pkg|
package pkg do
action :install
end
end
#==============================================================================
# Set up database
#==============================================================================
case node['owncloud']['config']['dbtype']
when 'sqlite'
# With SQLite the table prefix must be oc_
node.default['owncloud']['config']['dbtableprefix'] = 'oc_'
when 'mysql'
if node['owncloud']['config']['dbport'].nil?
node.default['owncloud']['config']['dbport'] = '3306'
end
if %w(localhost 127.0.0.1).include?(node['owncloud']['config']['dbhost'])
# Install MySQL
dbinstance = node['owncloud']['database']['instance']
mysql2_chef_gem dbinstance do
action :install
end
mysql_service dbinstance do
data_dir node['owncloud']['database']['data_dir']
version node['owncloud']['database']['version']
bind_address '127.0.0.1'
port node['owncloud']['config']['dbport'].to_s
initial_root_password node['owncloud']['database']['rootpassword']
action [:create, :start]
end
mysql_connection_info = {
host: '127.0.0.1',
port: node['owncloud']['config']['dbport'],
username: 'root',
password: node['owncloud']['database']['rootpassword']
}
mysql_database node['owncloud']['config']['dbname'] do
connection mysql_connection_info
action :create
end
mysql_database_user node['owncloud']['config']['dbuser'] do
connection mysql_connection_info
database_name node['owncloud']['config']['dbname']
host 'localhost'
password node['owncloud']['config']['dbpassword']
privileges [:all]
action :grant
end
end
when 'pgsql'
if node['owncloud']['config']['dbport'].nil?
node.default['owncloud']['config']['dbport'] =
node['postgresql']['config']['port']
else
node.default['postgresql']['config']['port'] =
node['owncloud']['config']['dbport']
end
if %w(localhost 127.0.0.1).include?(node['owncloud']['config']['dbhost'])
# Install PostgreSQL
node.set_unless['postgresql']['password']['postgres'] =
node['owncloud']['database']['rootpassword']
include_recipe 'postgresql::server'
include_recipe 'database::postgresql'
postgresql_connection_info = {
host: 'localhost',
port: node['owncloud']['config']['dbport'],
username: 'postgres',
password: node['postgresql']['password']['postgres']
}
postgresql_database node['owncloud']['config']['dbname'] do
connection postgresql_connection_info
action :create
end
postgresql_database_user node['owncloud']['config']['dbuser'] do
connection postgresql_connection_info
host 'localhost'
password node['owncloud']['config']['dbpassword']
action :create
end
postgresql_database_user node['owncloud']['config']['dbuser'] do
connection postgresql_connection_info
database_name node['owncloud']['config']['dbname']
host 'localhost'
password node['owncloud']['config']['dbpassword']
privileges [:all]
action :grant
end
end
else
fail "Unsupported database type: #{node['owncloud']['config']['dbtype']}"
end
#==============================================================================
# Set up mail transfer agent
#==============================================================================
if node['owncloud']['config']['mail_smtpmode'].eql?('sendmail') &&
node['owncloud']['install_postfix']
include_recipe 'postfix::default'
# Fix Ubuntu 15.04 support:
if node['platform'] == 'ubuntu' && node['platform_version'].to_i >= 15
r = resources(service: 'postfix')
r.provider(Chef::Provider::Service::Debian)
end
end
#==============================================================================
# Download and extract ownCloud
#==============================================================================
directory node['owncloud']['www_dir']
if node['owncloud']['deploy_from_git'] != true
basename = ::File.basename(download_url)
local_file = ::File.join(Chef::Config[:file_cache_path], basename)
# Prior to Chef 11.6, remote_file does not support conditional get
# so we do a HEAD http_request to mimic it
http_request 'HEAD owncloud' do
message ''
url download_url
if Gem::Version.new(Chef::VERSION) < Gem::Version.new('11.6.0')
action :head
else
action :nothing
end
if File.exist?(local_file)
headers 'If-Modified-Since' => File.mtime(local_file).httpdate
end
notifies :create, 'remote_file[download owncloud]', :immediately
end
remote_file 'download owncloud' do
source download_url
path local_file
if Gem::Version.new(Chef::VERSION) < Gem::Version.new('11.6.0')
action :nothing
else
action :create
end
notifies :run, 'bash[extract owncloud]', :immediately
end
bash 'extract owncloud' do
code <<-EOF
# remove previous installation if any
if [ -d ./owncloud ]
then
pushd ./owncloud >/dev/null
ls | grep -v 'data\\|config' | xargs rm -r
popd >/dev/null
fi
# extract tar file
tar xfj '#{local_file}' --no-same-owner
EOF
cwd node['owncloud']['www_dir']
action :nothing
end
else
if node['owncloud']['git_ref']
git_ref = node['owncloud']['git_ref']
elsif node['owncloud']['version'].eql?('latest')
git_ref = 'master'
else
git_ref = "v#{node['owncloud']['version']}"
end
git 'clone owncloud' do
destination node['owncloud']['dir']
repository node['owncloud']['git_repo']
reference git_ref
enable_submodules true
action :sync
end
end
#==============================================================================
# Set up webserver
#==============================================================================
# Get the webserver used
web_server = node['owncloud']['web_server']
# include the recipe for installing the webserver
case web_server
when 'apache'
include_recipe 'owncloud::_apache'
web_services = %w(apache2)
when 'nginx'
include_recipe 'owncloud::_nginx'
web_services = %w(nginx php-fpm)
else
fail "Web server not supported: #{web_server}"
end
#==============================================================================
# Initialize configuration file and install ownCloud
#==============================================================================
# create required directories
[
::File.join(node['owncloud']['dir'], 'apps'),
::File.join(node['owncloud']['dir'], 'config'),
node['owncloud']['data_dir']
].each do |dir|
directory dir do
if node['owncloud']['skip_permissions'] == false
owner node[web_server]['user']
group node[web_server]['group']
mode 00750
end
action :create
end
end
dbhost =
if node['owncloud']['config']['dbport'].nil?
node['owncloud']['config']['dbhost']
else
[
node['owncloud']['config']['dbhost'],
node['owncloud']['config']['dbport']
].join(':')
end
# create autoconfig.php for the installation
template 'autoconfig.php' do
path ::File.join(node['owncloud']['dir'], 'config', 'autoconfig.php')
source 'autoconfig.php.erb'
unless node['owncloud']['skip_permissions']
owner node[web_server]['user']
group node[web_server]['group']
mode 00640
end
variables(
dbtype: node['owncloud']['config']['dbtype'],
dbname: node['owncloud']['config']['dbname'],
dbuser: node['owncloud']['config']['dbuser'],
dbpass: node['owncloud']['config']['dbpassword'],
dbhost: dbhost,
dbprefix: node['owncloud']['config']['dbtableprefix'],
admin_user: node['owncloud']['admin']['user'],
admin_pass: node['owncloud']['admin']['pass'],
data_dir: node['owncloud']['data_dir']
)
not_if do
::File.exist?(::File.join(node['owncloud']['dir'], 'config', 'config.php'))
end
web_services.each do |web_service|
notifies :restart, "service[#{web_service}]", :immediately
end
notifies :run, 'execute[run setup]', :immediately
end
# install ownCloud
execute 'run setup' do
cwd node['owncloud']['dir']
command(
"sudo -u '#{node[web_server]['user']}' php -f index.php "\
'| { ! grep -iA2 error; }'
)
action :nothing
end
# Apply the configuration on attributes to config.php
ruby_block 'apply config' do
block do
self.class.send(:include, OwncloudCookbook::CookbookHelpers)
apply_owncloud_configuration
end
only_if do
::File.exist?(::File.join(node['owncloud']['dir'], 'config', 'config.php'))
end
end
#==============================================================================
# Enable cron for background jobs
#==============================================================================
include_recipe 'cron'
cron_command =
"php -f '#{node['owncloud']['dir']}/cron.php' "\
">> '#{node['owncloud']['data_dir']}/cron.log' 2>&1"
cron 'owncloud cron' do
user node[web_server]['user']
minute node['owncloud']['cron']['min']
hour node['owncloud']['cron']['hour']
day node['owncloud']['cron']['day']
month node['owncloud']['cron']['month']
weekday node['owncloud']['cron']['weekday']
action node['owncloud']['cron']['enabled'] == true ? :create : :delete
command cron_command
end
Fix postgresql_database_user[owncloud] resource duplication
#
# Cookbook Name:: owncloud
# Recipe:: default
#
# Copyright 2013, Onddo Labs, Sl.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#==============================================================================
# Calculate dependencies for different distros
#==============================================================================
dbtype = node['owncloud']['config']['dbtype']
download_url =
node['owncloud']['download_url'] % { version: node['owncloud']['version'] }
case node['platform_family']
when 'debian'
# Sync apt package index
include_recipe 'apt'
php_pkgs = %w(php5-gd php5-intl php5-curl php5-json smbclient)
php_pkgs << 'php5-sqlite' if dbtype == 'sqlite'
php_pkgs << 'php5-mysql' if dbtype == 'mysql'
php_pkgs << 'php5-pgsql' if dbtype == 'pgsql'
when 'rhel'
if node['platform'] != 'amazon' && node['platform_version'].to_f < 6
php_pkgs = %w(php53-gd php53-mbstring php53-xml php53-intl samba-client)
php_pkgs << 'php53-mysql' if dbtype == 'mysql'
php_pkgs << 'php53-pgsql' if dbtype == 'pgsql'
if dbtype == 'sqlite'
fail(
"SQLite database type not supported on #{node['platform']}"\
" #{node['platform_version']}"
)
end
else
php_pkgs = %w(php-gd php-mbstring php-xml php-intl samba-client)
php_pkgs << 'php-pdo' if dbtype == 'sqlite'
php_pkgs << 'php-mysql' if dbtype == 'mysql'
php_pkgs << 'php-pgsql' if dbtype == 'pgsql'
end
when 'fedora'
php_pkgs = %w(php-gd php-mbstring php-xml php-intl samba-client)
php_pkgs << 'php-pdo' if dbtype == 'sqlite'
php_pkgs << 'php-mysql' if dbtype == 'mysql'
php_pkgs << 'php-pgsql' if dbtype == 'pgsql'
else
Chef::Log.warn('Unsupported platform, trying to guess packages.')
php_pkgs = %w(php-gd php-mbstring php-xml php-intl samba-client)
php_pkgs << 'php-pdo' if dbtype == 'sqlite'
php_pkgs << 'php-mysql' if dbtype == 'mysql'
php_pkgs << 'php-pgsql' if dbtype == 'pgsql'
end
#==============================================================================
# Initialize autogenerated passwords
#==============================================================================
::Chef::Recipe.send(:include, Opscode::OpenSSL::Password)
if Chef::Config[:solo]
if node['owncloud']['config']['dbpassword'].nil? &&
node['owncloud']['config']['dbtype'] != 'sqlite'
fail 'You must set ownCloud\'s database password in chef-solo mode.'
end
if node['owncloud']['database']['rootpassword'].nil? &&
node['owncloud']['config']['dbtype'] != 'sqlite'
fail 'You must set the database admin password in chef-solo mode.'
end
if node['owncloud']['admin']['pass'].nil?
fail 'You must set ownCloud\'s admin password in chef-solo mode.'
end
else
unless node['owncloud']['config']['dbtype'] == 'sqlite'
node.set_unless['owncloud']['config']['dbpassword'] = secure_password
node.set_unless['owncloud']['database']['rootpassword'] = secure_password
end
node.set_unless['owncloud']['admin']['pass'] = secure_password
node.save
end
#==============================================================================
# Install PHP
#==============================================================================
# ownCloud requires PHP >= 5.4.0, so in older ubuntu versions we need to add an
# extra repository in order to provide it
apt_repository 'ondrej-php5-oldstable' do
uri 'http://ppa.launchpad.net/ondrej/php5-oldstable/ubuntu'
distribution node['lsb']['codename'] if node['lsb'].is_a?(Hash)
components %w(main)
keyserver 'keyserver.ubuntu.com'
key 'E5267A6C'
deb_src true
only_if do
node['platform'] == 'ubuntu' &&
Chef::VersionConstraint.new('<= 12.04').include?(node['platform_version'])
end
end
include_recipe 'php'
php_pkgs.each do |pkg|
package pkg do
action :install
end
end
#==============================================================================
# Set up database
#==============================================================================
case node['owncloud']['config']['dbtype']
when 'sqlite'
# With SQLite the table prefix must be oc_
node.default['owncloud']['config']['dbtableprefix'] = 'oc_'
when 'mysql'
if node['owncloud']['config']['dbport'].nil?
node.default['owncloud']['config']['dbport'] = '3306'
end
if %w(localhost 127.0.0.1).include?(node['owncloud']['config']['dbhost'])
# Install MySQL
dbinstance = node['owncloud']['database']['instance']
mysql2_chef_gem dbinstance do
action :install
end
mysql_service dbinstance do
data_dir node['owncloud']['database']['data_dir']
version node['owncloud']['database']['version']
bind_address '127.0.0.1'
port node['owncloud']['config']['dbport'].to_s
initial_root_password node['owncloud']['database']['rootpassword']
action [:create, :start]
end
mysql_connection_info = {
host: '127.0.0.1',
port: node['owncloud']['config']['dbport'],
username: 'root',
password: node['owncloud']['database']['rootpassword']
}
mysql_database node['owncloud']['config']['dbname'] do
connection mysql_connection_info
action :create
end
mysql_database_user node['owncloud']['config']['dbuser'] do
connection mysql_connection_info
database_name node['owncloud']['config']['dbname']
host 'localhost'
password node['owncloud']['config']['dbpassword']
privileges [:all]
action :grant
end
end
when 'pgsql'
if node['owncloud']['config']['dbport'].nil?
node.default['owncloud']['config']['dbport'] =
node['postgresql']['config']['port']
else
node.default['postgresql']['config']['port'] =
node['owncloud']['config']['dbport']
end
if %w(localhost 127.0.0.1).include?(node['owncloud']['config']['dbhost'])
# Install PostgreSQL
node.set_unless['postgresql']['password']['postgres'] =
node['owncloud']['database']['rootpassword']
include_recipe 'postgresql::server'
include_recipe 'database::postgresql'
postgresql_connection_info = {
host: 'localhost',
port: node['owncloud']['config']['dbport'],
username: 'postgres',
password: node['postgresql']['password']['postgres']
}
postgresql_database node['owncloud']['config']['dbname'] do
connection postgresql_connection_info
action :create
end
postgresql_database_user node['owncloud']['config']['dbuser'] do
connection postgresql_connection_info
database_name node['owncloud']['config']['dbname']
host 'localhost'
password node['owncloud']['config']['dbpassword']
privileges [:all]
action [:create, :grant]
end
end
else
fail "Unsupported database type: #{node['owncloud']['config']['dbtype']}"
end
#==============================================================================
# Set up mail transfer agent
#==============================================================================
if node['owncloud']['config']['mail_smtpmode'].eql?('sendmail') &&
node['owncloud']['install_postfix']
include_recipe 'postfix::default'
# Fix Ubuntu 15.04 support:
if node['platform'] == 'ubuntu' && node['platform_version'].to_i >= 15
r = resources(service: 'postfix')
r.provider(Chef::Provider::Service::Debian)
end
end
#==============================================================================
# Download and extract ownCloud
#==============================================================================
directory node['owncloud']['www_dir']
if node['owncloud']['deploy_from_git'] != true
basename = ::File.basename(download_url)
local_file = ::File.join(Chef::Config[:file_cache_path], basename)
# Prior to Chef 11.6, remote_file does not support conditional get
# so we do a HEAD http_request to mimic it
http_request 'HEAD owncloud' do
message ''
url download_url
if Gem::Version.new(Chef::VERSION) < Gem::Version.new('11.6.0')
action :head
else
action :nothing
end
if File.exist?(local_file)
headers 'If-Modified-Since' => File.mtime(local_file).httpdate
end
notifies :create, 'remote_file[download owncloud]', :immediately
end
remote_file 'download owncloud' do
source download_url
path local_file
if Gem::Version.new(Chef::VERSION) < Gem::Version.new('11.6.0')
action :nothing
else
action :create
end
notifies :run, 'bash[extract owncloud]', :immediately
end
bash 'extract owncloud' do
code <<-EOF
# remove previous installation if any
if [ -d ./owncloud ]
then
pushd ./owncloud >/dev/null
ls | grep -v 'data\\|config' | xargs rm -r
popd >/dev/null
fi
# extract tar file
tar xfj '#{local_file}' --no-same-owner
EOF
cwd node['owncloud']['www_dir']
action :nothing
end
else
if node['owncloud']['git_ref']
git_ref = node['owncloud']['git_ref']
elsif node['owncloud']['version'].eql?('latest')
git_ref = 'master'
else
git_ref = "v#{node['owncloud']['version']}"
end
git 'clone owncloud' do
destination node['owncloud']['dir']
repository node['owncloud']['git_repo']
reference git_ref
enable_submodules true
action :sync
end
end
#==============================================================================
# Set up webserver
#==============================================================================
# Get the webserver used
web_server = node['owncloud']['web_server']
# include the recipe for installing the webserver
case web_server
when 'apache'
include_recipe 'owncloud::_apache'
web_services = %w(apache2)
when 'nginx'
include_recipe 'owncloud::_nginx'
web_services = %w(nginx php-fpm)
else
fail "Web server not supported: #{web_server}"
end
#==============================================================================
# Initialize configuration file and install ownCloud
#==============================================================================
# create required directories
[
::File.join(node['owncloud']['dir'], 'apps'),
::File.join(node['owncloud']['dir'], 'config'),
node['owncloud']['data_dir']
].each do |dir|
directory dir do
if node['owncloud']['skip_permissions'] == false
owner node[web_server]['user']
group node[web_server]['group']
mode 00750
end
action :create
end
end
dbhost =
if node['owncloud']['config']['dbport'].nil?
node['owncloud']['config']['dbhost']
else
[
node['owncloud']['config']['dbhost'],
node['owncloud']['config']['dbport']
].join(':')
end
# create autoconfig.php for the installation
template 'autoconfig.php' do
path ::File.join(node['owncloud']['dir'], 'config', 'autoconfig.php')
source 'autoconfig.php.erb'
unless node['owncloud']['skip_permissions']
owner node[web_server]['user']
group node[web_server]['group']
mode 00640
end
variables(
dbtype: node['owncloud']['config']['dbtype'],
dbname: node['owncloud']['config']['dbname'],
dbuser: node['owncloud']['config']['dbuser'],
dbpass: node['owncloud']['config']['dbpassword'],
dbhost: dbhost,
dbprefix: node['owncloud']['config']['dbtableprefix'],
admin_user: node['owncloud']['admin']['user'],
admin_pass: node['owncloud']['admin']['pass'],
data_dir: node['owncloud']['data_dir']
)
not_if do
::File.exist?(::File.join(node['owncloud']['dir'], 'config', 'config.php'))
end
web_services.each do |web_service|
notifies :restart, "service[#{web_service}]", :immediately
end
notifies :run, 'execute[run setup]', :immediately
end
# install ownCloud
execute 'run setup' do
cwd node['owncloud']['dir']
command(
"sudo -u '#{node[web_server]['user']}' php -f index.php "\
'| { ! grep -iA2 error; }'
)
action :nothing
end
# Apply the configuration on attributes to config.php
ruby_block 'apply config' do
block do
self.class.send(:include, OwncloudCookbook::CookbookHelpers)
apply_owncloud_configuration
end
only_if do
::File.exist?(::File.join(node['owncloud']['dir'], 'config', 'config.php'))
end
end
#==============================================================================
# Enable cron for background jobs
#==============================================================================
include_recipe 'cron'
cron_command =
"php -f '#{node['owncloud']['dir']}/cron.php' "\
">> '#{node['owncloud']['data_dir']}/cron.log' 2>&1"
cron 'owncloud cron' do
user node[web_server]['user']
minute node['owncloud']['cron']['min']
hour node['owncloud']['cron']['hour']
day node['owncloud']['cron']['day']
month node['owncloud']['cron']['month']
weekday node['owncloud']['cron']['weekday']
action node['owncloud']['cron']['enabled'] == true ? :create : :delete
command cron_command
end
|
#
# Author:: Lance Powell (<lanceraymondpowell@gmail.com>)
# Cookbook Name:: filezilla
# Recipe:: default
#
# Copyright (c) 2015 The Authors, All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Installs the client
# Add _server.rb to install the server
case node['platform']
when 'windows'
windows_package "FileZilla Client" do
source node['filezilla']['url']
installer_type :custom
options "/S /user=all"
action :install
end
else
Chef::Log.warn('FileZilla Client can only be installed on the Windows at this time.')
end
removed installer_type option
#
# Author:: Lance Powell (<lanceraymondpowell@gmail.com>)
# Cookbook Name:: filezilla
# Recipe:: default
#
# Copyright (c) 2015 The Authors, All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Installs the client
# Add _server.rb to install the server
case node['platform']
when 'windows'
windows_package "FileZilla Client" do
source node['filezilla']['url']
options "/S /user=all"
action :install
end
else
Chef::Log.warn('FileZilla Client can only be installed on the Windows at this time.')
end
|
#
# Cookbook Name:: sensu-client-wrapper
# Recipe:: default
#
# Copyright 2013, Ryutaro YOSHIBA
#
# This software is released under the MIT License.
# http://opensource.org/licenses/mit-license.php
include_recipe "sensu::default"
if node["sensu-client-wrapper"]["ipaddress"] then
ipaddress = node["sensu-client-wrapper"]["ipaddress"]
else
ipaddress = node["ipaddress"]
end
node_name = node_name() # from helper
sensu_client node_name do
address ipaddress
if node["sensu-client-wrapper"]["roles"] then
subscriptions node["sensu-client-wrapper"]["roles"] + ["all"]
else
subscriptions ["all"]
end
end
execute "chmod 644 /etc/sensu/conf.d/client.json" do
action :run
end
include_recipe "sensu::client_service"
# Installing check-procs
remote_file "/etc/sensu/plugins/check-procs.rb" do
source "https://raw.github.com/sensu/sensu-community-plugins/master/plugins/processes/check-procs.rb"
mode 0755
end
# Installing check-disk
remote_file "/etc/sensu/plugins/check-disk.rb" do
source "https://github.com/sensu/sensu-community-plugins/raw/master/plugins/system/check-disk.rb"
mode 0755
end
# Installing check-cpu
remote_file "/etc/sensu/plugins/check-cpu.rb" do
source "https://github.com/sensu/sensu-community-plugins/raw/master/plugins/system/check-cpu.rb"
mode 0755
end
# Installing check-ram
remote_file "/etc/sensu/plugins/check-ram.rb" do
source "https://github.com/sensu/sensu-community-plugins/raw/master/plugins/system/check-ram.rb"
mode 0755
end
# Installing load-metric
remote_file "/etc/sensu/plugins/load-metrics.rb" do
source "https://raw.github.com/sensu/sensu-community-plugins/master/plugins/system/load-metrics.rb"
mode 0755
end
# Installing vmstat-metrics.rb
remote_file "/etc/sensu/plugins/vmstat-metrics.rb" do
source "https://github.com/sensu/sensu-community-plugins/raw/master/plugins/system/vmstat-metrics.rb"
mode 0755
end
service "sensu-client" do
action :restart
end
# vim: filetype=ruby.chef
fix url to avoid redirection
#
# Cookbook Name:: sensu-client-wrapper
# Recipe:: default
#
# Copyright 2013, Ryutaro YOSHIBA
#
# This software is released under the MIT License.
# http://opensource.org/licenses/mit-license.php
include_recipe "sensu::default"
if node["sensu-client-wrapper"]["ipaddress"] then
ipaddress = node["sensu-client-wrapper"]["ipaddress"]
else
ipaddress = node["ipaddress"]
end
node_name = node_name() # from helper
sensu_client node_name do
address ipaddress
if node["sensu-client-wrapper"]["roles"] then
subscriptions node["sensu-client-wrapper"]["roles"] + ["all"]
else
subscriptions ["all"]
end
end
execute "chmod 644 /etc/sensu/conf.d/client.json" do
action :run
end
include_recipe "sensu::client_service"
# Installing check-procs
remote_file "/etc/sensu/plugins/check-procs.rb" do
source "https://raw.github.com/sensu/sensu-community-plugins/master/plugins/processes/check-procs.rb"
mode 0755
end
# Installing check-disk
remote_file "/etc/sensu/plugins/check-disk.rb" do
source "https://raw.github.com/sensu/sensu-community-plugins/master/plugins/system/check-disk.rb"
mode 0755
end
# Installing check-cpu
remote_file "/etc/sensu/plugins/check-cpu.rb" do
source "https://raw.github.com/sensu/sensu-community-plugins/master/plugins/system/check-cpu.rb"
mode 0755
end
# Installing check-ram
remote_file "/etc/sensu/plugins/check-ram.rb" do
source "https://raw.github.com/sensu/sensu-community-plugins/master/plugins/system/check-ram.rb"
mode 0755
end
# Installing load-metric
remote_file "/etc/sensu/plugins/load-metrics.rb" do
source "https://raw.github.com/sensu/sensu-community-plugins/master/plugins/system/load-metrics.rb"
mode 0755
end
# Installing vmstat-metrics.rb
remote_file "/etc/sensu/plugins/vmstat-metrics.rb" do
source "https://raw.github.com/sensu/sensu-community-plugins/master/plugins/system/vmstat-metrics.rb"
mode 0755
end
service "sensu-client" do
action :restart
end
# vim: filetype=ruby.chef
|
require File.dirname(__FILE__) + '/../../spec_helper'
require File.dirname(__FILE__) + '/fixtures/classes'
describe "Thread#[]=" do
it "raises exceptions on the wrong type of keys" do
lambda { Thread.current[nil] = true }.should raise_error(TypeError)
lambda { Thread.current[5] = true }.should raise_error(ArgumentError)
end
end
Thread#[]=: 1.9 raises TypeError for invalid keys.
require File.dirname(__FILE__) + '/../../spec_helper'
require File.dirname(__FILE__) + '/fixtures/classes'
describe "Thread#[]=" do
ruby_version_is ""..."1.9.1" do
it "raises exceptions on the wrong type of keys" do
lambda { Thread.current[nil] = true }.should raise_error(TypeError)
lambda { Thread.current[5] = true }.should raise_error(ArgumentError)
end
end
ruby_version_is "1.9.1" do
it "raises exceptions on the wrong type of keys" do
lambda { Thread.current[nil] = true }.should raise_error(TypeError)
lambda { Thread.current[5] = true }.should raise_error(TypeError)
end
end
end
|
require 'json'
bash 'fix_java_path_for_glassfish_cookbook' do
user "root"
code <<-EOF
# upstart job in glassfish expects java to be installed in /bin/java
test -f /usr/bin/java && ln -sf /usr/bin/java /bin/java
EOF
end
private_ip=my_private_ip()
hopsworks_db = "hopsworks"
realmname="kthfsrealm"
mysql_user=node[:mysql][:user]
mysql_pwd=node[:mysql][:password]
# hopsworks_grants "create_hopsworks_db" do
# action :create_db
# end
tables_path = "#{Chef::Config[:file_cache_path]}/tables.sql"
rows_path = "#{Chef::Config[:file_cache_path]}/rows.sql"
hopsworks_grants "creds" do
tables_path "#{tables_path}"
rows_path "#{rows_path}"
action :nothing
end
Chef::Log.info("Could not find previously defined #{tables_path} resource")
template tables_path do
source File.basename("#{tables_path}") + ".erb"
owner node[:glassfish][:user]
mode 0750
action :create
variables({
:private_ip => private_ip
})
notifies :create_tables, 'hopsworks_grants[creds]', :immediately
end
template "#{rows_path}" do
source File.basename("#{rows_path}") + ".erb"
owner node[:glassfish][:user]
mode 0755
action :create
notifies :insert_rows, 'hopsworks_grants[creds]', :immediately
end
###############################################################################
# config glassfish
###############################################################################
# group node[:glassfish][:group] do
# end
# user node['glassfish']['user'] do
# supports :manage_home => true
# home "/home/#{node['glassfish']['user']}"
# shell '/bin/bash'
# action :create
# system true
# not_if "getent passwd #{node['glassfish']['user']}"
# end
# group node[:glassfish][:group] do
# action :modify
# members node[:glassfish][:user]
# append true
# end
username="adminuser"
password="adminpw"
domain_name="domain1"
domains_dir = '/usr/local/glassfish/glassfish/domains'
admin_port = 4848
mysql_host = private_recipe_ip("ndb","mysqld")
mysql_user = "kthfs"
mysql_password = "kthfs"
node.override = {
'java' => {
'install_flavor' => 'oracle',
'jdk_version' => 7,
'oracle' => {
'accept_oracle_download_terms' => true
}
},
'glassfish' => {
'version' => '4.1',
'base_dir' => '/usr/local/glassfish',
'domains_dir' => domains_dir,
'domains' => {
domain_name => {
'config' => {
'min_memory' => 1024,
'max_memory' => 1024,
'max_perm_size' => 256,
'port' => 8080,
'admin_port' => admin_port,
'username' => username,
'password' => password,
'master_password' => 'mykeystorepassword',
'remote_access' => false,
'jvm_options' => ['-DMYAPP_CONFIG_DIR=/usr/local/myapp/config', '-Dcom.sun.enterprise.tools.admingui.NO_NETWORK=true'],
'secure' => false
},
'extra_libraries' => {
'jdbcdriver' => {
'type' => 'common',
'url' => 'http://snurran.sics.se/hops/mysql-connector-java-5.1.29-bin.jar'
}
},
'threadpools' => {
'thread-pool-1' => {
'maxthreadpoolsize' => 200,
'minthreadpoolsize' => 5,
'idletimeout' => 900,
'maxqueuesize' => 4096
},
'http-thread-pool' => {
'maxthreadpoolsize' => 200,
'minthreadpoolsize' => 5,
'idletimeout' => 900,
'maxqueuesize' => 4096
},
'admin-pool' => {
'maxthreadpoolsize' => 50,
'minthreadpoolsize' => 5,
'maxqueuesize' => 256
}
},
'iiop_listeners' => {
'orb-listener-1' => {
'enabled' => true,
'iiopport' => 1072,
'securityenabled' => false
}
},
'jdbc_connection_pools' => {
'hopsworksPool' => {
'config' => {
'datasourceclassname' => 'com.mysql.jdbc.jdbc2.optional.MysqlDataSource',
'restype' => 'javax.sql.DataSource',
'isconnectvalidatereq' => 'true',
'validationmethod' => 'auto-commit',
'ping' => 'true',
'description' => 'App Pool',
'properties' => {
'Url' => "jdbc:mysql://#{mysql_host}:3306/",
'User' => mysql_user,
'Password' => mysql_password
}
},
'resources' => {
'jdbc/hopsworks' => {
'description' => 'Resource for App Pool',
}
}
}
},
'deployables' => {
'HopsWorks' => {
'url' => 'http://snurran.sics.se/hops/hopsworks.war',
'context_root' => '/hopsworks'
}
},
}
}
}
}
include_recipe 'glassfish::default'
include_recipe 'glassfish::attribute_driven_domain'
glassfish_secure_admin domain_name do
domain_name domain_name
password_file "#{domains_dir}/#{domain_name}_admin_passwd"
username username
admin_port admin_port
secure false
action :enable
end
# glassfish_deployable "hopsworks" do
# component_name "hopsworks"
# url "http://snurran.sics.se/hops/hopsworks.war"
# context_root "/"
# domain_name domain_name
# password_file "#{domains_dir}/#{domain_name}_admin_passwd"
# username username
# admin_port admin_port
# secure false
# availability_enabled true
# action :deploy
# end
props = {
'datasource-jndi' => 'jdbc/hopsworks',
'password-column' => 'password',
'group-table' => 'hopsworks.users_groups',
'user-table' => 'hopsworks.users',
'group-name-column' => 'group_name',
'user-name-column' => 'email',
'group-table-user-name-column' => 'email',
'encoding' => 'hex',
'digestrealm-password-enc-algorithm' => 'SHA-256',
'digest-algorithm' => 'SHA-256'
}
glassfish_auth_realm "#{realmname}" do
realm_name "#{realmname}"
jaas_context "jdbcRealm"
properties props
domain_name domain_name
password_file "#{domains_dir}/#{domain_name}_admin_passwd"
username username
admin_port admin_port
secure false
classname "com.sun.enterprise.security.ee.auth.realm.jdbc.JDBCRealm"
end
# Avoid empty property values - glassfish will crash otherwise
if node[:hopsworks][:gmail][:email].empty?
node.default[:hopsworks][:gmail][:email]="none"
end
if node[:hopsworks][:gmail][:password].empty?
node.default[:hopsworks][:gmail][:password]="empty"
end
gmailProps = {
'mail-smtp-host' => 'smtp.gmail.com',
'mail-smtp-user' => "#{node[:hopsworks][:gmail][:email]}",
'mail-smtp-password' => "#{node[:hopsworks][:gmail][:password]}",
'mail-smtp-auth' => 'true',
'mail-smtp-port' => '587',
'mail-smtp-socketFactory-port' => '465',
'mail-smtp-socketFactory-class' => 'javax.net.ssl.SSLSocketFactory',
'mail-smtp-starttls-enable' => 'true',
'mail.smtp.ssl.enable' => 'false',
'mail-smtp-socketFactory-fallback' => 'false'
}
glassfish_javamail_resource "gmail" do
jndi_name "mail/BBCMail"
mailuser node[:hopsworks][:gmail][:email]
mailhost "smtp.gmail.com"
fromaddress node[:hopsworks][:gmail][:email]
properties gmailProps
domain_name domain_name
password_file "#{domains_dir}/#{domain_name}_admin_passwd"
username username
admin_port admin_port
secure false
action :create
end
No 'ee' in jdbc classname
require 'json'
bash 'fix_java_path_for_glassfish_cookbook' do
user "root"
code <<-EOF
# upstart job in glassfish expects java to be installed in /bin/java
test -f /usr/bin/java && ln -sf /usr/bin/java /bin/java
EOF
end
private_ip=my_private_ip()
hopsworks_db = "hopsworks"
realmname="kthfsrealm"
mysql_user=node[:mysql][:user]
mysql_pwd=node[:mysql][:password]
# hopsworks_grants "create_hopsworks_db" do
# action :create_db
# end
tables_path = "#{Chef::Config[:file_cache_path]}/tables.sql"
rows_path = "#{Chef::Config[:file_cache_path]}/rows.sql"
hopsworks_grants "creds" do
tables_path "#{tables_path}"
rows_path "#{rows_path}"
action :nothing
end
Chef::Log.info("Could not find previously defined #{tables_path} resource")
template tables_path do
source File.basename("#{tables_path}") + ".erb"
owner node[:glassfish][:user]
mode 0750
action :create
variables({
:private_ip => private_ip
})
notifies :create_tables, 'hopsworks_grants[creds]', :immediately
end
template "#{rows_path}" do
source File.basename("#{rows_path}") + ".erb"
owner node[:glassfish][:user]
mode 0755
action :create
notifies :insert_rows, 'hopsworks_grants[creds]', :immediately
end
###############################################################################
# config glassfish
###############################################################################
# group node[:glassfish][:group] do
# end
# user node['glassfish']['user'] do
# supports :manage_home => true
# home "/home/#{node['glassfish']['user']}"
# shell '/bin/bash'
# action :create
# system true
# not_if "getent passwd #{node['glassfish']['user']}"
# end
# group node[:glassfish][:group] do
# action :modify
# members node[:glassfish][:user]
# append true
# end
username="adminuser"
password="adminpw"
domain_name="domain1"
domains_dir = '/usr/local/glassfish/glassfish/domains'
admin_port = 4848
mysql_host = private_recipe_ip("ndb","mysqld")
mysql_user = "kthfs"
mysql_password = "kthfs"
node.override = {
'java' => {
'install_flavor' => 'oracle',
'jdk_version' => 7,
'oracle' => {
'accept_oracle_download_terms' => true
}
},
'glassfish' => {
'version' => '4.1',
'base_dir' => '/usr/local/glassfish',
'domains_dir' => domains_dir,
'domains' => {
domain_name => {
'config' => {
'min_memory' => 1024,
'max_memory' => 1024,
'max_perm_size' => 256,
'port' => 8080,
'admin_port' => admin_port,
'username' => username,
'password' => password,
'master_password' => 'mykeystorepassword',
'remote_access' => false,
'jvm_options' => ['-DMYAPP_CONFIG_DIR=/usr/local/myapp/config', '-Dcom.sun.enterprise.tools.admingui.NO_NETWORK=true'],
'secure' => false
},
'extra_libraries' => {
'jdbcdriver' => {
'type' => 'common',
'url' => 'http://snurran.sics.se/hops/mysql-connector-java-5.1.29-bin.jar'
}
},
'threadpools' => {
'thread-pool-1' => {
'maxthreadpoolsize' => 200,
'minthreadpoolsize' => 5,
'idletimeout' => 900,
'maxqueuesize' => 4096
},
'http-thread-pool' => {
'maxthreadpoolsize' => 200,
'minthreadpoolsize' => 5,
'idletimeout' => 900,
'maxqueuesize' => 4096
},
'admin-pool' => {
'maxthreadpoolsize' => 50,
'minthreadpoolsize' => 5,
'maxqueuesize' => 256
}
},
'iiop_listeners' => {
'orb-listener-1' => {
'enabled' => true,
'iiopport' => 1072,
'securityenabled' => false
}
},
'jdbc_connection_pools' => {
'hopsworksPool' => {
'config' => {
'datasourceclassname' => 'com.mysql.jdbc.jdbc2.optional.MysqlDataSource',
'restype' => 'javax.sql.DataSource',
'isconnectvalidatereq' => 'true',
'validationmethod' => 'auto-commit',
'ping' => 'true',
'description' => 'App Pool',
'properties' => {
'Url' => "jdbc:mysql://#{mysql_host}:3306/",
'User' => mysql_user,
'Password' => mysql_password
}
},
'resources' => {
'jdbc/hopsworks' => {
'description' => 'Resource for App Pool',
}
}
}
},
'deployables' => {
'HopsWorks' => {
'url' => 'http://snurran.sics.se/hops/hopsworks.war',
'context_root' => '/hopsworks'
}
},
}
}
}
}
include_recipe 'glassfish::default'
include_recipe 'glassfish::attribute_driven_domain'
glassfish_secure_admin domain_name do
domain_name domain_name
password_file "#{domains_dir}/#{domain_name}_admin_passwd"
username username
admin_port admin_port
secure false
action :enable
end
# glassfish_deployable "hopsworks" do
# component_name "hopsworks"
# url "http://snurran.sics.se/hops/hopsworks.war"
# context_root "/"
# domain_name domain_name
# password_file "#{domains_dir}/#{domain_name}_admin_passwd"
# username username
# admin_port admin_port
# secure false
# availability_enabled true
# action :deploy
# end
props = {
'datasource-jndi' => 'jdbc/hopsworks',
'password-column' => 'password',
'group-table' => 'hopsworks.users_groups',
'user-table' => 'hopsworks.users',
'group-name-column' => 'group_name',
'user-name-column' => 'email',
'group-table-user-name-column' => 'email',
'encoding' => 'hex',
'digestrealm-password-enc-algorithm' => 'SHA-256',
'digest-algorithm' => 'SHA-256'
}
glassfish_auth_realm "#{realmname}" do
realm_name "#{realmname}"
jaas_context "jdbcRealm"
properties props
domain_name domain_name
password_file "#{domains_dir}/#{domain_name}_admin_passwd"
username username
admin_port admin_port
secure false
classname "com.sun.enterprise.security.auth.realm.jdbc.JDBCRealm"
end
# Avoid empty property values - glassfish will crash otherwise
if node[:hopsworks][:gmail][:email].empty?
node.default[:hopsworks][:gmail][:email]="none"
end
if node[:hopsworks][:gmail][:password].empty?
node.default[:hopsworks][:gmail][:password]="empty"
end
gmailProps = {
'mail-smtp-host' => 'smtp.gmail.com',
'mail-smtp-user' => "#{node[:hopsworks][:gmail][:email]}",
'mail-smtp-password' => "#{node[:hopsworks][:gmail][:password]}",
'mail-smtp-auth' => 'true',
'mail-smtp-port' => '587',
'mail-smtp-socketFactory-port' => '465',
'mail-smtp-socketFactory-class' => 'javax.net.ssl.SSLSocketFactory',
'mail-smtp-starttls-enable' => 'true',
'mail.smtp.ssl.enable' => 'false',
'mail-smtp-socketFactory-fallback' => 'false'
}
glassfish_javamail_resource "gmail" do
jndi_name "mail/BBCMail"
mailuser node[:hopsworks][:gmail][:email]
mailhost "smtp.gmail.com"
fromaddress node[:hopsworks][:gmail][:email]
properties gmailProps
domain_name domain_name
password_file "#{domains_dir}/#{domain_name}_admin_passwd"
username username
admin_port admin_port
secure false
action :create
end
|
require_relative '../../spec_helper'
require_relative 'fixtures/classes'
describe "UnboundMethod#bind" do
before :each do
@normal_um = UnboundMethodSpecs::Methods.new.method(:foo).unbind
@parent_um = UnboundMethodSpecs::Parent.new.method(:foo).unbind
@child1_um = UnboundMethodSpecs::Child1.new.method(:foo).unbind
@child2_um = UnboundMethodSpecs::Child2.new.method(:foo).unbind
end
it "raises TypeError if object is not kind_of? the Module the method defined in" do
lambda { @normal_um.bind(UnboundMethodSpecs::B.new) }.should raise_error(TypeError)
end
it "returns Method for any object that is kind_of? the Module method was extracted from" do
@normal_um.bind(UnboundMethodSpecs::Methods.new).should be_kind_of(Method)
end
it "returns Method on any object when UnboundMethod is unbound from a module" do
UnboundMethodSpecs::Mod.instance_method(:from_mod).bind(Object.new).should be_kind_of(Method)
end
it "the returned Method is equal to the one directly returned by obj.method" do
obj = UnboundMethodSpecs::Methods.new
@normal_um.bind(obj).should == obj.method(:foo)
end
it "returns Method for any object kind_of? the Module the method is defined in" do
@parent_um.bind(UnboundMethodSpecs::Child1.new).should be_kind_of(Method)
@child1_um.bind(UnboundMethodSpecs::Parent.new).should be_kind_of(Method)
@child2_um.bind(UnboundMethodSpecs::Child1.new).should be_kind_of(Method)
end
it "returns a callable method" do
obj = UnboundMethodSpecs::Methods.new
@normal_um.bind(obj).call.should == obj.foo
end
it "binds a Parent's class method to any Child's class methods" do
m = UnboundMethodSpecs::Parent.method(:class_method).unbind.bind(UnboundMethodSpecs::Child1)
m.should be_an_instance_of(Method)
m.call.should == "I am UnboundMethodSpecs::Child1"
end
it "will raise when binding a an object singleton's method to another object" do
other = UnboundMethodSpecs::Parent.new
p = UnboundMethodSpecs::Parent.new
class << p
def singleton_method
:single
end
end
um = p.method(:singleton_method).unbind
lambda{ um.bind(other) }.should raise_error(TypeError)
end
end
Add spec for binding Kernel#instance_of? on BasicObject
require_relative '../../spec_helper'
require_relative 'fixtures/classes'
describe "UnboundMethod#bind" do
before :each do
@normal_um = UnboundMethodSpecs::Methods.new.method(:foo).unbind
@parent_um = UnboundMethodSpecs::Parent.new.method(:foo).unbind
@child1_um = UnboundMethodSpecs::Child1.new.method(:foo).unbind
@child2_um = UnboundMethodSpecs::Child2.new.method(:foo).unbind
end
it "raises TypeError if object is not kind_of? the Module the method defined in" do
lambda { @normal_um.bind(UnboundMethodSpecs::B.new) }.should raise_error(TypeError)
end
it "returns Method for any object that is kind_of? the Module method was extracted from" do
@normal_um.bind(UnboundMethodSpecs::Methods.new).should be_kind_of(Method)
end
it "returns Method on any object when UnboundMethod is unbound from a module" do
UnboundMethodSpecs::Mod.instance_method(:from_mod).bind(Object.new).should be_kind_of(Method)
end
it "the returned Method is equal to the one directly returned by obj.method" do
obj = UnboundMethodSpecs::Methods.new
@normal_um.bind(obj).should == obj.method(:foo)
end
it "returns Method for any object kind_of? the Module the method is defined in" do
@parent_um.bind(UnboundMethodSpecs::Child1.new).should be_kind_of(Method)
@child1_um.bind(UnboundMethodSpecs::Parent.new).should be_kind_of(Method)
@child2_um.bind(UnboundMethodSpecs::Child1.new).should be_kind_of(Method)
end
it "allows binding a Kernel method retrieved from Object on BasicObject" do
Object.instance_method(:instance_of?).bind(BasicObject.new).call(BasicObject).should == true
end
it "returns a callable method" do
obj = UnboundMethodSpecs::Methods.new
@normal_um.bind(obj).call.should == obj.foo
end
it "binds a Parent's class method to any Child's class methods" do
m = UnboundMethodSpecs::Parent.method(:class_method).unbind.bind(UnboundMethodSpecs::Child1)
m.should be_an_instance_of(Method)
m.call.should == "I am UnboundMethodSpecs::Child1"
end
it "will raise when binding a an object singleton's method to another object" do
other = UnboundMethodSpecs::Parent.new
p = UnboundMethodSpecs::Parent.new
class << p
def singleton_method
:single
end
end
um = p.method(:singleton_method).unbind
lambda{ um.bind(other) }.should raise_error(TypeError)
end
end
|
include_recipe "java::oracle"
%w{ libtomcat7-java tomcat7-common tomcat7-user tomcat7-docs tomcat7-admin tomcat7 }.each do |package_name|
package package_name do
action :install
end
end
# configure tomcat users
template "/etc/tomcat7/tomcat-users.xml" do
source "tomcat-users.xml.erb"
owner "root"
group "tomcat7"
mode "775"
end
remote_file "/tmp/apache-solr-#{node[:solr][:version]}.tgz" do
source node[:solr][:url]
mode "755"
action :create_if_missing
end
# bash unpack solr
bash 'unpack solr' do
user 'root'
cwd '/tmp'
code "tar -xzvf apache-solr-#{node[:solr][:version]}.tgz"
not_if "test -d #{node[:solr][:unpack_path]}"
end
bash "install solr on tomcat" do
user "root"
cwd node[:solr][:unpack_path]
code <<-EOH
cp -f dist/solr-#{node[:solr][:version]}.war /var/lib/tomcat7/webapps/solr.war
cp -Rf example/solr/ /var/lib/tomcat7/solr/
EOH
# TODO: not if case
end
# copy the solr config
cookbook_file "/var/lib/tomcat7/conf/Catalina/localhost/solr.xml" do
cookbook "tomcat-solr"
source "solr.xml"
owner "root"
group "root"
mode 0775
end
directory "/var/lib/tomcat7/solr/data" do
owner "tomcat7"
group "tomcat7"
mode "0755"
action :create
end
# give the respective permissions
bash "install solr on tomcat" do
user "root"
code <<-EOH
cp /tmp/solr-#{node[:solr][:version]}/example/lib/ext/* /usr/share/tomcat7/lib/
cp /tmp/solr-#{node[:solr][:version]}/example/resources/log4j.properties /usr/share/tomcat7/lib/
chown -R tomcat7:tomcat7 /var/lib/tomcat7/
chmod 775 /var/lib/tomcat7/conf/tomcat-users.xml
service tomcat7 restart
EOH
end
testing if solr dir isn't created yet
include_recipe "java::oracle"
%w{ libtomcat7-java tomcat7-common tomcat7-user tomcat7-docs tomcat7-admin tomcat7 }.each do |package_name|
package package_name do
action :install
end
end
# configure tomcat users
template "/etc/tomcat7/tomcat-users.xml" do
source "tomcat-users.xml.erb"
owner "root"
group "tomcat7"
mode "775"
end
remote_file "/tmp/apache-solr-#{node[:solr][:version]}.tgz" do
source node[:solr][:url]
mode "755"
action :create_if_missing
end
# bash unpack solr
bash 'unpack solr' do
user 'root'
cwd '/tmp'
code "tar -xzvf apache-solr-#{node[:solr][:version]}.tgz"
not_if "test -d #{node[:solr][:unpack_path]}"
end
bash "install solr on tomcat" do
user "root"
cwd node[:solr][:unpack_path]
code <<-EOH
cp -f dist/solr-#{node[:solr][:version]}.war /var/lib/tomcat7/webapps/solr.war
cp -Rf example/solr/ /var/lib/tomcat7/solr/
EOH
not_if "test -d /var/lib/tomcat7/solr/"
end
# copy the solr config
cookbook_file "/var/lib/tomcat7/conf/Catalina/localhost/solr.xml" do
cookbook "tomcat-solr"
source "solr.xml"
owner "root"
group "root"
mode 0775
end
directory "/var/lib/tomcat7/solr/data" do
owner "tomcat7"
group "tomcat7"
mode "0755"
action :create
end
# give the respective permissions
bash "install solr on tomcat" do
user "root"
code <<-EOH
cp /tmp/solr-#{node[:solr][:version]}/example/lib/ext/* /usr/share/tomcat7/lib/
cp /tmp/solr-#{node[:solr][:version]}/example/resources/log4j.properties /usr/share/tomcat7/lib/
chown -R tomcat7:tomcat7 /var/lib/tomcat7/
chmod 775 /var/lib/tomcat7/conf/tomcat-users.xml
service tomcat7 restart
EOH
end
|
service "remote_syslog" do
init_command "/etc/init.d/remote_syslog"
reload_command "/etc/init.d/remote_syslog reload"
action :nothing
supports :status => true, :start => true, :stop => true, :restart => true
end
service 'monit' do
action :nothing
end
src_filename = node['remote_syslog2']['filename']
src_filepath = "#{Chef::Config['file_cache_path']}/#{src_filename}"
extract_path = "#{Chef::Config['file_cache_path']}/remote_syslog2"
node[:deploy].each do |application, deploy|
remote_file src_filepath do
source "http://github.com/papertrail/remote_syslog2/releases/download/v0.17/remote_syslog_linux_amd64.tar.gz"
owner deploy[:user]
group deploy[:group]
mode "0644"
end
bash 'extract and copy executable' do
cwd ::File.dirname(src_filepath)
code <<-EOH
mkdir -p #{extract_path}
tar xzf #{src_filename} -C #{extract_path}
mv #{extract_path}/remote_syslog/remote_syslog #{node['remote_syslog2']['install_dir']}
EOH
not_if { ::File.exists?(extract_path) }
end
file "#{node['remote_syslog2']['install_dir']}/remote_syslog" do
owner deploy[:user]
group deploy[:group]
mode "0755"
action :touch
end
file "/tmp/remote_syslog.log" do
owner deploy[:user]
group deploy[:group]
mode "0755"
action :touch
end
file "/etc/remote_syslog.pid" do
owner deploy[:user]
group deploy[:group]
mode "0755"
action :touch
end
template "/etc/log_files.yml" do
action :create
owner deploy[:user]
group deploy[:group]
mode '0644'
source 'logs.yml.erb'
end
cookbook_file '/etc/init.d/remote_syslog' do
action :create
owner deploy[:user]
group deploy[:group]
mode '0755'
source 'remote_syslog.init'
notifies :reload, "service[remote_syslog]", :immediately
end
end
template "#{node[:monit][:conf_dir]}/remote_syslog.monitrc" do
source 'remote_syslog.monitrc.erb'
owner 'root'
group 'root'
mode 0644
notifies :restart, "service[monit]"
end
service "remote_syslog" do
action [:reload]
end
Update default.rb
service "remote_syslog" do
init_command "/etc/init.d/remote_syslog"
reload_command "/etc/init.d/remote_syslog reload"
action :nothing
supports :status => true, :start => true, :stop => true, :restart => true
end
service 'monit' do
action :nothing
end
src_filename = node['remote_syslog2']['filename']
src_filepath = "#{Chef::Config['file_cache_path']}/#{src_filename}"
extract_path = "#{Chef::Config['file_cache_path']}/remote_syslog2"
node[:deploy].each do |application, deploy|
remote_file src_filepath do
source "https://kit-production-ad-images.s3.amazonaws.com/remote_syslog_linux_amd64.tar.gz"
owner deploy[:user]
group deploy[:group]
mode "0644"
end
bash 'extract and copy executable' do
cwd ::File.dirname(src_filepath)
code <<-EOH
mkdir -p #{extract_path}
tar xzf #{src_filename} -C #{extract_path}
mv #{extract_path}/remote_syslog/remote_syslog #{node['remote_syslog2']['install_dir']}
EOH
not_if { ::File.exists?(extract_path) }
end
file "#{node['remote_syslog2']['install_dir']}/remote_syslog" do
owner deploy[:user]
group deploy[:group]
mode "0755"
action :touch
end
file "/tmp/remote_syslog.log" do
owner deploy[:user]
group deploy[:group]
mode "0755"
action :touch
end
file "/etc/remote_syslog.pid" do
owner deploy[:user]
group deploy[:group]
mode "0755"
action :touch
end
template "/etc/log_files.yml" do
action :create
owner deploy[:user]
group deploy[:group]
mode '0644'
source 'logs.yml.erb'
end
cookbook_file '/etc/init.d/remote_syslog' do
action :create
owner deploy[:user]
group deploy[:group]
mode '0755'
source 'remote_syslog.init'
notifies :reload, "service[remote_syslog]", :immediately
end
end
template "#{node[:monit][:conf_dir]}/remote_syslog.monitrc" do
source 'remote_syslog.monitrc.erb'
owner 'root'
group 'root'
mode 0644
notifies :restart, "service[monit]"
end
service "remote_syslog" do
action [:reload]
end
|
#
# Cookbook Name:: nginx-additions
# Recipe:: default
#
# Copyright (c) 2013 Nick Charlton <nick@nickcharlton.net>
#
# MIT Licensed.
#
# create the web root
# see also: http://wiki.apache.org/httpd/FileSystemPermissions
directory '/var/www' do
owner 'www-data'
mode 00750
action :create
end
/var/www should also have the group set.
#
# Cookbook Name:: nginx-additions
# Recipe:: default
#
# Copyright (c) 2013 Nick Charlton <nick@nickcharlton.net>
#
# MIT Licensed.
#
# create the web root
# see also: http://wiki.apache.org/httpd/FileSystemPermissions
directory '/var/www' do
owner 'www-data'
group 'www-data'
mode 00750
action :create
end
|
elasticsearch = "elasticsearch-#{node.elasticsearch[:version]}"
# Include the `curl` recipe, needed by `service status`
#
include_recipe "elasticsearch::curl"
# Create user and group
#
group node.elasticsearch[:user] do
action :create
end
user node.elasticsearch[:user] do
comment "ElasticSearch User"
home "#{node.elasticsearch[:dir]}/elasticsearch"
shell "/bin/bash"
gid node.elasticsearch[:user]
supports :manage_home => false
action :create
end
# FIX: Work around the fact that Chef creates the directory even for `manage_home: false`
bash "remove the elasticsearch user home" do
user 'root'
code "rm -rf #{node.elasticsearch[:dir]}/elasticsearch"
only_if "test -d #{node.elasticsearch[:dir]}/elasticsearch"
end
# Create ES directories
#
%w| conf_path data_path log_path pid_path |.each do |path|
directory node.elasticsearch[path.to_sym] do
owner node.elasticsearch[:user] and group node.elasticsearch[:user] and mode 0755
recursive true
action :create
end
end
# Create service
#
template "/etc/init.d/elasticsearch" do
source "elasticsearch.init.erb"
owner 'root' and mode 0755
end
service "elasticsearch" do
supports :status => true, :restart => true
action [ :enable ]
end
# Increase open file limits
#
bash "enable user limits" do
user 'root'
code <<-END.gsub(/^ /, '')
echo 'session required pam_limits.so' >> /etc/pam.d/su
END
not_if { File.read("/etc/pam.d/su").match(/^session required pam_limits\.so/) }
end
bash "increase limits for the elasticsearch user" do
user 'root'
code <<-END.gsub(/^ /, '')
echo '#{node.elasticsearch.fetch(:user, "elasticsearch")} - nofile #{node.elasticsearch[:limits][:nofile]}' >> /etc/security/limits.conf
echo '#{node.elasticsearch.fetch(:user, "elasticsearch")} - memlock #{node.elasticsearch[:limits][:memlock]}' >> /etc/security/limits.conf
END
not_if { File.read("/etc/security/limits.conf").include?("#{node.elasticsearch.fetch(:user, "elasticsearch")} - nofile") }
end
# Download ES
#
remote_file "/tmp/elasticsearch-#{node.elasticsearch[:version]}.tar.gz" do
source "https://github.com/downloads/elasticsearch/elasticsearch/#{elasticsearch}.tar.gz"
action :create_if_missing
end
# Move to ES dir
#
bash "Move elasticsearch to #{node.elasticsearch[:dir]}/#{elasticsearch}" do
user "root"
cwd "/tmp"
code <<-EOS
tar xfz /tmp/#{elasticsearch}.tar.gz
mv --force /tmp/#{elasticsearch} #{node.elasticsearch[:dir]}
EOS
creates "#{node.elasticsearch[:dir]}/#{elasticsearch}/lib/#{elasticsearch}.jar"
creates "#{node.elasticsearch[:dir]}/#{elasticsearch}/bin/elasticsearch"
end
# Ensure proper permissions
#
bash "Ensure proper permissions for #{node.elasticsearch[:dir]}/#{elasticsearch}" do
user "root"
code <<-EOS
chown -R #{node.elasticsearch[:user]}:#{node.elasticsearch[:user]} #{node.elasticsearch[:dir]}/#{elasticsearch}
chmod -R 775 #{node.elasticsearch[:dir]}/#{elasticsearch}
EOS
end
# Symlink binaries
#
%w| elasticsearch plugin |.each do |f|
link "/usr/local/bin/#{f}" do
owner node.elasticsearch[:user] and group node.elasticsearch[:user]
to "#{node.elasticsearch[:dir]}/#{elasticsearch}/bin/#{f}"
end
end
# Create file with ES environment variables
#
template "elasticsearch-env.sh" do
path "#{node.elasticsearch[:conf_path]}/elasticsearch-env.sh"
source "elasticsearch-env.sh.erb"
owner node.elasticsearch[:user] and group node.elasticsearch[:user] and mode 0755
notifies :restart, resources(:service => 'elasticsearch')
end
# Create ES config file
#
template "elasticsearch.yml" do
path "#{node.elasticsearch[:conf_path]}/elasticsearch.yml"
source "elasticsearch.yml.erb"
owner node.elasticsearch[:user] and group node.elasticsearch[:user] and mode 0755
notifies :restart, resources(:service => 'elasticsearch')
end
# Symlink current version to main directory
#
link "#{node.elasticsearch[:dir]}/elasticsearch" do
owner node.elasticsearch[:user] and group node.elasticsearch[:user]
to "#{node.elasticsearch[:dir]}/#{elasticsearch}"
end
# Add Monit configuration file
#
monitrc("elasticsearch", :pidfile => "#{node.elasticsearch[:pid_path]}/#{node.elasticsearch[:node_name].to_s.gsub(/\W/, '_')}.pid") \
if node.recipes.include?('monit')
[FIX] Fix an error where Chef thinks we're using the `Chef::Resource::File` class...
elasticsearch = "elasticsearch-#{node.elasticsearch[:version]}"
# Include the `curl` recipe, needed by `service status`
#
include_recipe "elasticsearch::curl"
# Create user and group
#
group node.elasticsearch[:user] do
action :create
end
user node.elasticsearch[:user] do
comment "ElasticSearch User"
home "#{node.elasticsearch[:dir]}/elasticsearch"
shell "/bin/bash"
gid node.elasticsearch[:user]
supports :manage_home => false
action :create
end
# FIX: Work around the fact that Chef creates the directory even for `manage_home: false`
bash "remove the elasticsearch user home" do
user 'root'
code "rm -rf #{node.elasticsearch[:dir]}/elasticsearch"
only_if "test -d #{node.elasticsearch[:dir]}/elasticsearch"
end
# Create ES directories
#
%w| conf_path data_path log_path pid_path |.each do |path|
directory node.elasticsearch[path.to_sym] do
owner node.elasticsearch[:user] and group node.elasticsearch[:user] and mode 0755
recursive true
action :create
end
end
# Create service
#
template "/etc/init.d/elasticsearch" do
source "elasticsearch.init.erb"
owner 'root' and mode 0755
end
service "elasticsearch" do
supports :status => true, :restart => true
action [ :enable ]
end
# Increase open file limits
#
bash "enable user limits" do
user 'root'
code <<-END.gsub(/^ /, '')
echo 'session required pam_limits.so' >> /etc/pam.d/su
END
not_if { ::File.read("/etc/pam.d/su").match(/^session required pam_limits\.so/) }
end
bash "increase limits for the elasticsearch user" do
user 'root'
code <<-END.gsub(/^ /, '')
echo '#{node.elasticsearch.fetch(:user, "elasticsearch")} - nofile #{node.elasticsearch[:limits][:nofile]}' >> /etc/security/limits.conf
echo '#{node.elasticsearch.fetch(:user, "elasticsearch")} - memlock #{node.elasticsearch[:limits][:memlock]}' >> /etc/security/limits.conf
END
not_if { ::File.read("/etc/security/limits.conf").include?("#{node.elasticsearch.fetch(:user, "elasticsearch")} - nofile") }
end
# Download ES
#
remote_file "/tmp/elasticsearch-#{node.elasticsearch[:version]}.tar.gz" do
source "https://github.com/downloads/elasticsearch/elasticsearch/#{elasticsearch}.tar.gz"
action :create_if_missing
end
# Move to ES dir
#
bash "Move elasticsearch to #{node.elasticsearch[:dir]}/#{elasticsearch}" do
user "root"
cwd "/tmp"
code <<-EOS
tar xfz /tmp/#{elasticsearch}.tar.gz
mv --force /tmp/#{elasticsearch} #{node.elasticsearch[:dir]}
EOS
creates "#{node.elasticsearch[:dir]}/#{elasticsearch}/lib/#{elasticsearch}.jar"
creates "#{node.elasticsearch[:dir]}/#{elasticsearch}/bin/elasticsearch"
end
# Ensure proper permissions
#
bash "Ensure proper permissions for #{node.elasticsearch[:dir]}/#{elasticsearch}" do
user "root"
code <<-EOS
chown -R #{node.elasticsearch[:user]}:#{node.elasticsearch[:user]} #{node.elasticsearch[:dir]}/#{elasticsearch}
chmod -R 775 #{node.elasticsearch[:dir]}/#{elasticsearch}
EOS
end
# Symlink binaries
#
%w| elasticsearch plugin |.each do |f|
link "/usr/local/bin/#{f}" do
owner node.elasticsearch[:user] and group node.elasticsearch[:user]
to "#{node.elasticsearch[:dir]}/#{elasticsearch}/bin/#{f}"
end
end
# Create file with ES environment variables
#
template "elasticsearch-env.sh" do
path "#{node.elasticsearch[:conf_path]}/elasticsearch-env.sh"
source "elasticsearch-env.sh.erb"
owner node.elasticsearch[:user] and group node.elasticsearch[:user] and mode 0755
notifies :restart, resources(:service => 'elasticsearch')
end
# Create ES config file
#
template "elasticsearch.yml" do
path "#{node.elasticsearch[:conf_path]}/elasticsearch.yml"
source "elasticsearch.yml.erb"
owner node.elasticsearch[:user] and group node.elasticsearch[:user] and mode 0755
notifies :restart, resources(:service => 'elasticsearch')
end
# Symlink current version to main directory
#
link "#{node.elasticsearch[:dir]}/elasticsearch" do
owner node.elasticsearch[:user] and group node.elasticsearch[:user]
to "#{node.elasticsearch[:dir]}/#{elasticsearch}"
end
# Add Monit configuration file
#
monitrc("elasticsearch", :pidfile => "#{node.elasticsearch[:pid_path]}/#{node.elasticsearch[:node_name].to_s.gsub(/\W/, '_')}.pid") \
if node.recipes.include?('monit')
|
#
# Cookbook Name:: gitlab
# Recipe:: install
#
gitlab = node['gitlab']
### Copy the example GitLab config
template File.join(gitlab['path'], 'config', 'gitlab.yml') do
source "gitlab.yml.erb"
user gitlab['user']
group gitlab['group']
variables({
:host => gitlab['host'],
:port => gitlab['port'],
:user => gitlab['user'],
:email_from => gitlab['email_from'],
:support_email => gitlab['support_email'],
:satellites_path => gitlab['satellites_path'],
:repos_path => gitlab['repos_path'],
:shell_path => gitlab['shell_path'],
:signup_enabled => gitlab['signup_enabled'],
:projects_limit => gitlab['projects_limit'],
:oauth_enabled => gitlab['oauth_enabled'],
:oauth_block_auto_created_users => gitlab['oauth_block_auto_created_users'],
:oauth_allow_single_sign_on => gitlab['oauth_allow_single_sign_on'],
:oauth_providers => gitlab['oauth_providers'],
:google_analytics_id => gitlab['extra']['google_analytics_id'],
:sign_in_text => gitlab['extra']['sign_in_text']
})
notifies :run, "bash[git config]", :immediately
end
### Make sure GitLab can write to the log/ and tmp/ directories
### Create directories for sockets/pids
### Create public/uploads directory otherwise backup will fail
%w{log tmp tmp/pids tmp/sockets public/uploads}.each do |path|
directory File.join(gitlab['path'], path) do
owner gitlab['user']
group gitlab['group']
mode 0755
not_if { File.exist?(File.join(gitlab['path'], path)) }
end
end
### Create directory for satellites
directory gitlab['satellites_path'] do
owner gitlab['user']
group gitlab['group']
not_if { File.exist?(gitlab['satellites_path']) }
end
### Unicorn config
template File.join(gitlab['path'], 'config', 'unicorn.rb') do
source "unicorn.rb.erb"
user gitlab['user']
group gitlab['group']
variables({
:unicorn_workers_number => gitlab['unicorn_workers_number'],
:unicorn_timeout => gitlab['unicorn_timeout']
})
end
### Enable Rack attack
# Creating the file this way for the following reasons
# 1. Chef 11.4.0 must be used to keep support for AWS OpsWorks
# 2. Using file resource is not an option because it is ran at compilation time
# and at that point the file doesn't exist
# 3. Using cookbook_file resource is not an option because we do not want to include the file
# in the cookbook for maintenance reasons. Same for template resource.
# 4. Using remote_file resource is not an option because Chef 11.4.0 connects to remote URI
# see https://github.com/opscode/chef/blob/11.4.4/lib/chef/resource/remote_file.rb#L63
# 5 Using bash and execute resource is not an option because they would run at every chef run
# and supplying a restriction in the form of "not_if" would prevent an update of a file
# if there is any
# Ruby block is compiled at compilation time but only executed during execution time
# allowing us to create a resource.
ruby_block "Copy from example rack attack config" do
block do
resource = Chef::Resource::File.new("rack_attack.rb", run_context)
resource.path File.join(gitlab['path'], 'config', 'initializers', 'rack_attack.rb')
resource.content IO.read(File.join(gitlab['path'], 'config', 'initializers', 'rack_attack.rb.example'))
resource.owner gitlab['user']
resource.group gitlab['group']
resource.mode 0644
resource.run_action :create
end
end
# SMTP email settings
if gitlab['smtp']['enabled']
smtp = gitlab['smtp']
template File.join(gitlab['path'], 'config', 'initializers', 'smtp_settings.rb') do
source "smtp_settings.rb.erb"
user gitlab['user']
group gitlab['group']
variables({
:address => smtp['address'],
:port => smtp['port'],
:username => smtp['username'],
:password => smtp['password'],
:domain => smtp['domain'],
:authentication => smtp['authentication'],
:enable_starttls_auto => smtp['enable_starttls_auto']
})
end
end
### Configure Git global settings for git user, useful when editing via web
bash "git config" do
code <<-EOS
git config --global user.name "GitLab"
git config --global user.email "gitlab@#{gitlab['host']}"
git config --global core.autocrlf input
EOS
user gitlab['user']
group gitlab['group']
environment('HOME' => gitlab['home'])
action :nothing
end
## Configure GitLab DB settings
template File.join(gitlab['path'], "config", "database.yml") do
source "database.yml.#{gitlab['database_adapter']}.erb"
user gitlab['user']
group gitlab['group']
variables({
:user => gitlab['database_user'],
:password => gitlab['database_password']
})
end
### db:setup
file_setup = File.join(gitlab['home'], ".gitlab_setup_#{gitlab['env']}")
file_setup_old = File.join(gitlab['home'], ".gitlab_setup")
execute "rake db:setup" do
command <<-EOS
PATH="/usr/local/bin:$PATH"
bundle exec rake db:setup RAILS_ENV=#{gitlab['env']}
EOS
cwd gitlab['path']
user gitlab['user']
group gitlab['group']
not_if {File.exists?(file_setup) || File.exists?(file_setup_old)}
end
file file_setup do
owner gitlab['user']
group gitlab['group']
action :create
end
### db:migrate
file_migrate = File.join(gitlab['home'], ".gitlab_migrate_#{gitlab['env']}")
file_migrate_old = File.join(gitlab['home'], ".gitlab_migrate")
execute "rake db:migrate" do
command <<-EOS
PATH="/usr/local/bin:$PATH"
bundle exec rake db:migrate RAILS_ENV=#{gitlab['env']}
EOS
cwd gitlab['path']
user gitlab['user']
group gitlab['group']
not_if {File.exists?(file_migrate) || File.exists?(file_migrate_old)}
end
### db:seed_fu
file_seed = File.join(gitlab['home'], ".gitlab_seed_#{gitlab['env']}")
file_seed_old = File.join(gitlab['home'], ".gitlab_seed")
execute "rake db:seed_fu" do
command <<-EOS
PATH="/usr/local/bin:$PATH"
bundle exec rake db:seed_fu RAILS_ENV=#{gitlab['env']}
EOS
cwd gitlab['path']
user gitlab['user']
group gitlab['group']
not_if {File.exists?(file_seed) || File.exists?(file_seed_old)}
end
file file_seed do
owner gitlab['user']
group gitlab['group']
action :create
end
case gitlab['env']
when 'production'
## Setup Init Script
# Creating the file this way for the following reasons
# 1. Chef 11.4.0 must be used to keep support for AWS OpsWorks
# 2. Using file resource is not an option because it is ran at compilation time
# and at that point the file doesn't exist
# 3. Using cookbook_file resource is not an option because we do not want to include the file
# in the cookbook for maintenance reasons. Same for template resource.
# 4. Using remote_file resource is not an option because Chef 11.4.0 connects to remote URI
# see https://github.com/opscode/chef/blob/11.4.4/lib/chef/resource/remote_file.rb#L63
# 5 Using bash and execute resource is not an option because they would run at every chef run
# and supplying a restriction in the form of "not_if" would prevent an update of a file
# if there is any
# Ruby block is compiled at compilation time but only executed during execution time
# allowing us to create a resource.
ruby_block "Copy from example gitlab init config" do
block do
resource = Chef::Resource::File.new("gitlab_init", run_context)
resource.path "/etc/init.d/gitlab"
resource.content IO.read(File.join(gitlab['path'], "lib", "support", "init.d", "gitlab"))
resource.mode 0755
resource.run_action :create
if resource.updated?
self.notifies :run, resources(:execute => "set gitlab to start on boot"), :immediately
end
end
end
# Updates defaults so gitlab can boot on start. As per man pages of update-rc.d runs only if links do not exist
execute "set gitlab to start on boot" do
if platform_family?("debian")
command "update-rc.d gitlab defaults 21"
else
command "chkconfig --level 21 gitlab on"
end
action :nothing
end
## Setup logrotate
# Creating the file this way for the following reasons
# 1. Chef 11.4.0 must be used to keep support for AWS OpsWorks
# 2. Using file resource is not an option because it is ran at compilation time
# and at that point the file doesn't exist
# 3. Using cookbook_file resource is not an option because we do not want to include the file
# in the cookbook for maintenance reasons. Same for template resource.
# 4. Using remote_file resource is not an option because Chef 11.4.0 connects to remote URI
# see https://github.com/opscode/chef/blob/11.4.4/lib/chef/resource/remote_file.rb#L63
# 5 Using bash and execute resource is not an option because they would run at every chef run
# and supplying a restriction in the form of "not_if" would prevent an update of a file
# if there is any
# Ruby block is compiled at compilation time but only executed during execution time
# allowing us to create a resource.
ruby_block "Copy from example logrotate config" do
block do
resource = Chef::Resource::File.new("logrotate", run_context)
resource.path "/etc/logrotate.d/gitlab"
resource.content IO.read(File.join(gitlab['path'], "lib", "support", "logrotate", "gitlab"))
resource.mode 0644
resource.run_action :create
end
end
if gitlab['aws']['enabled']
template "aws.yml" do
owner gitlab['user']
group gitlab['group']
path "#{gitlab['path']}/config/aws.yml"
mode 0755
action :create_if_missing
variables({
:aws_access_key_id => gitlab['aws']['aws_access_key_id'],
:aws_secret_access_key => gitlab['aws']['aws_secret_access_key'],
:bucket => gitlab['aws']['bucket'],
:region => gitlab['aws']['region'],
:host => gitlab['aws']['host'],
:endpoint => gitlab['aws']['endpoint']
})
end
end
execute "rake assets:clean" do
command <<-EOS
PATH="/usr/local/bin:$PATH"
bundle exec rake assets:clean RAILS_ENV=#{gitlab['env']}
EOS
cwd gitlab['path']
user gitlab['user']
group gitlab['group']
not_if { File.exists?(file_migrate) }
end
execute "rake assets:precompile" do
command <<-EOS
PATH="/usr/local/bin:$PATH"
bundle exec rake assets:precompile RAILS_ENV=#{gitlab['env']}
EOS
cwd gitlab['path']
user gitlab['user']
group gitlab['group']
not_if { File.exists?(file_migrate) }
end
execute "rake cache:clear" do
command <<-EOS
PATH="/usr/local/bin:$PATH"
bundle exec rake cache:clear RAILS_ENV=#{gitlab['env']}
EOS
cwd gitlab['path']
user gitlab['user']
group gitlab['group']
not_if { File.exists?(file_migrate) }
end
else
## For execute javascript test
include_recipe "phantomjs"
end
file file_migrate do
owner gitlab['user']
group gitlab['group']
action :create
end
Correct placement.
#
# Cookbook Name:: gitlab
# Recipe:: install
#
gitlab = node['gitlab']
### Copy the example GitLab config
template File.join(gitlab['path'], 'config', 'gitlab.yml') do
source "gitlab.yml.erb"
user gitlab['user']
group gitlab['group']
variables({
:host => gitlab['host'],
:port => gitlab['port'],
:user => gitlab['user'],
:email_from => gitlab['email_from'],
:support_email => gitlab['support_email'],
:satellites_path => gitlab['satellites_path'],
:repos_path => gitlab['repos_path'],
:shell_path => gitlab['shell_path'],
:signup_enabled => gitlab['signup_enabled'],
:projects_limit => gitlab['projects_limit'],
:oauth_enabled => gitlab['oauth_enabled'],
:oauth_block_auto_created_users => gitlab['oauth_block_auto_created_users'],
:oauth_allow_single_sign_on => gitlab['oauth_allow_single_sign_on'],
:oauth_providers => gitlab['oauth_providers'],
:google_analytics_id => gitlab['extra']['google_analytics_id'],
:sign_in_text => gitlab['extra']['sign_in_text']
})
notifies :run, "bash[git config]", :immediately
end
### Make sure GitLab can write to the log/ and tmp/ directories
### Create directories for sockets/pids
### Create public/uploads directory otherwise backup will fail
%w{log tmp tmp/pids tmp/sockets public/uploads}.each do |path|
directory File.join(gitlab['path'], path) do
owner gitlab['user']
group gitlab['group']
mode 0755
not_if { File.exist?(File.join(gitlab['path'], path)) }
end
end
### Create directory for satellites
directory gitlab['satellites_path'] do
owner gitlab['user']
group gitlab['group']
not_if { File.exist?(gitlab['satellites_path']) }
end
### Unicorn config
template File.join(gitlab['path'], 'config', 'unicorn.rb') do
source "unicorn.rb.erb"
user gitlab['user']
group gitlab['group']
variables({
:unicorn_workers_number => gitlab['unicorn_workers_number'],
:unicorn_timeout => gitlab['unicorn_timeout']
})
end
### Enable Rack attack
# Creating the file this way for the following reasons
# 1. Chef 11.4.0 must be used to keep support for AWS OpsWorks
# 2. Using file resource is not an option because it is ran at compilation time
# and at that point the file doesn't exist
# 3. Using cookbook_file resource is not an option because we do not want to include the file
# in the cookbook for maintenance reasons. Same for template resource.
# 4. Using remote_file resource is not an option because Chef 11.4.0 connects to remote URI
# see https://github.com/opscode/chef/blob/11.4.4/lib/chef/resource/remote_file.rb#L63
# 5 Using bash and execute resource is not an option because they would run at every chef run
# and supplying a restriction in the form of "not_if" would prevent an update of a file
# if there is any
# Ruby block is compiled at compilation time but only executed during execution time
# allowing us to create a resource.
ruby_block "Copy from example rack attack config" do
block do
resource = Chef::Resource::File.new("rack_attack.rb", run_context)
resource.path File.join(gitlab['path'], 'config', 'initializers', 'rack_attack.rb')
resource.content IO.read(File.join(gitlab['path'], 'config', 'initializers', 'rack_attack.rb.example'))
resource.owner gitlab['user']
resource.group gitlab['group']
resource.mode 0644
resource.run_action :create
end
end
### Configure Git global settings for git user, useful when editing via web
bash "git config" do
code <<-EOS
git config --global user.name "GitLab"
git config --global user.email "gitlab@#{gitlab['host']}"
git config --global core.autocrlf input
EOS
user gitlab['user']
group gitlab['group']
environment('HOME' => gitlab['home'])
action :nothing
end
## Configure GitLab DB settings
template File.join(gitlab['path'], "config", "database.yml") do
source "database.yml.#{gitlab['database_adapter']}.erb"
user gitlab['user']
group gitlab['group']
variables({
:user => gitlab['database_user'],
:password => gitlab['database_password']
})
end
### db:setup
file_setup = File.join(gitlab['home'], ".gitlab_setup_#{gitlab['env']}")
file_setup_old = File.join(gitlab['home'], ".gitlab_setup")
execute "rake db:setup" do
command <<-EOS
PATH="/usr/local/bin:$PATH"
bundle exec rake db:setup RAILS_ENV=#{gitlab['env']}
EOS
cwd gitlab['path']
user gitlab['user']
group gitlab['group']
not_if {File.exists?(file_setup) || File.exists?(file_setup_old)}
end
file file_setup do
owner gitlab['user']
group gitlab['group']
action :create
end
### db:migrate
file_migrate = File.join(gitlab['home'], ".gitlab_migrate_#{gitlab['env']}")
file_migrate_old = File.join(gitlab['home'], ".gitlab_migrate")
execute "rake db:migrate" do
command <<-EOS
PATH="/usr/local/bin:$PATH"
bundle exec rake db:migrate RAILS_ENV=#{gitlab['env']}
EOS
cwd gitlab['path']
user gitlab['user']
group gitlab['group']
not_if {File.exists?(file_migrate) || File.exists?(file_migrate_old)}
end
### db:seed_fu
file_seed = File.join(gitlab['home'], ".gitlab_seed_#{gitlab['env']}")
file_seed_old = File.join(gitlab['home'], ".gitlab_seed")
execute "rake db:seed_fu" do
command <<-EOS
PATH="/usr/local/bin:$PATH"
bundle exec rake db:seed_fu RAILS_ENV=#{gitlab['env']}
EOS
cwd gitlab['path']
user gitlab['user']
group gitlab['group']
not_if {File.exists?(file_seed) || File.exists?(file_seed_old)}
end
file file_seed do
owner gitlab['user']
group gitlab['group']
action :create
end
case gitlab['env']
when 'production'
## Setup Init Script
# Creating the file this way for the following reasons
# 1. Chef 11.4.0 must be used to keep support for AWS OpsWorks
# 2. Using file resource is not an option because it is ran at compilation time
# and at that point the file doesn't exist
# 3. Using cookbook_file resource is not an option because we do not want to include the file
# in the cookbook for maintenance reasons. Same for template resource.
# 4. Using remote_file resource is not an option because Chef 11.4.0 connects to remote URI
# see https://github.com/opscode/chef/blob/11.4.4/lib/chef/resource/remote_file.rb#L63
# 5 Using bash and execute resource is not an option because they would run at every chef run
# and supplying a restriction in the form of "not_if" would prevent an update of a file
# if there is any
# Ruby block is compiled at compilation time but only executed during execution time
# allowing us to create a resource.
ruby_block "Copy from example gitlab init config" do
block do
resource = Chef::Resource::File.new("gitlab_init", run_context)
resource.path "/etc/init.d/gitlab"
resource.content IO.read(File.join(gitlab['path'], "lib", "support", "init.d", "gitlab"))
resource.mode 0755
resource.run_action :create
if resource.updated?
self.notifies :run, resources(:execute => "set gitlab to start on boot"), :immediately
end
end
end
# Updates defaults so gitlab can boot on start. As per man pages of update-rc.d runs only if links do not exist
execute "set gitlab to start on boot" do
if platform_family?("debian")
command "update-rc.d gitlab defaults 21"
else
command "chkconfig --level 21 gitlab on"
end
action :nothing
end
## Setup logrotate
# Creating the file this way for the following reasons
# 1. Chef 11.4.0 must be used to keep support for AWS OpsWorks
# 2. Using file resource is not an option because it is ran at compilation time
# and at that point the file doesn't exist
# 3. Using cookbook_file resource is not an option because we do not want to include the file
# in the cookbook for maintenance reasons. Same for template resource.
# 4. Using remote_file resource is not an option because Chef 11.4.0 connects to remote URI
# see https://github.com/opscode/chef/blob/11.4.4/lib/chef/resource/remote_file.rb#L63
# 5 Using bash and execute resource is not an option because they would run at every chef run
# and supplying a restriction in the form of "not_if" would prevent an update of a file
# if there is any
# Ruby block is compiled at compilation time but only executed during execution time
# allowing us to create a resource.
ruby_block "Copy from example logrotate config" do
block do
resource = Chef::Resource::File.new("logrotate", run_context)
resource.path "/etc/logrotate.d/gitlab"
resource.content IO.read(File.join(gitlab['path'], "lib", "support", "logrotate", "gitlab"))
resource.mode 0644
resource.run_action :create
end
end
# SMTP email settings
if gitlab['smtp']['enabled']
smtp = gitlab['smtp']
template File.join(gitlab['path'], 'config', 'initializers', 'smtp_settings.rb') do
source "smtp_settings.rb.erb"
user gitlab['user']
group gitlab['group']
variables({
:address => smtp['address'],
:port => smtp['port'],
:username => smtp['username'],
:password => smtp['password'],
:domain => smtp['domain'],
:authentication => smtp['authentication'],
:enable_starttls_auto => smtp['enable_starttls_auto']
})
end
end
if gitlab['aws']['enabled']
template "aws.yml" do
owner gitlab['user']
group gitlab['group']
path "#{gitlab['path']}/config/aws.yml"
mode 0755
action :create_if_missing
variables({
:aws_access_key_id => gitlab['aws']['aws_access_key_id'],
:aws_secret_access_key => gitlab['aws']['aws_secret_access_key'],
:bucket => gitlab['aws']['bucket'],
:region => gitlab['aws']['region'],
:host => gitlab['aws']['host'],
:endpoint => gitlab['aws']['endpoint']
})
end
end
execute "rake assets:clean" do
command <<-EOS
PATH="/usr/local/bin:$PATH"
bundle exec rake assets:clean RAILS_ENV=#{gitlab['env']}
EOS
cwd gitlab['path']
user gitlab['user']
group gitlab['group']
not_if { File.exists?(file_migrate) }
end
execute "rake assets:precompile" do
command <<-EOS
PATH="/usr/local/bin:$PATH"
bundle exec rake assets:precompile RAILS_ENV=#{gitlab['env']}
EOS
cwd gitlab['path']
user gitlab['user']
group gitlab['group']
not_if { File.exists?(file_migrate) }
end
execute "rake cache:clear" do
command <<-EOS
PATH="/usr/local/bin:$PATH"
bundle exec rake cache:clear RAILS_ENV=#{gitlab['env']}
EOS
cwd gitlab['path']
user gitlab['user']
group gitlab['group']
not_if { File.exists?(file_migrate) }
end
else
## For execute javascript test
include_recipe "phantomjs"
end
file file_migrate do
owner gitlab['user']
group gitlab['group']
action :create
end
|
#
# Author:: Seth Chisamore (<schisamo@opscode.com>)
# Cookbook Name:: php-fpm
# Recipe:: package
#
# Copyright 2011, Opscode, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe 'php-fpm::repository' unless node['php-fpm']['skip_repository_install']
if node['php-fpm']['package_name'].nil?
if platform_family?("rhel")
php_fpm_package_name = "php-fpm"
else
php_fpm_package_name = "php5-fpm"
end
else
php_fpm_package_name = node['php-fpm']['package_name']
end
package php_fpm_package_name do
action :upgrade
end
if node['php-fpm']['service_name'].nil?
php_fpm_service_name = php_fpm_package_name
else
php_fpm_service_name = node['php-fpm']['service_name']
end
service_provider = nil
if node['platform'] == 'ubuntu' and node['platform_version'].to_f >= 13.10
service_provider = ::Chef::Provider::Service::Upstart
end
service "php-fpm" do
provider service_provider if service_provider
service_name php_fpm_service_name
supports :start => true, :stop => true, :restart => true, :reload => true
action [ :enable, :start ]
end
Include default apt recipe when installing on debian platforms
#
# Author:: Seth Chisamore (<schisamo@opscode.com>)
# Cookbook Name:: php-fpm
# Recipe:: package
#
# Copyright 2011, Opscode, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe 'php-fpm::repository' unless node['php-fpm']['skip_repository_install']
include_recipe 'apt::default' if node['platform_family'] == 'debian'
if node['php-fpm']['package_name'].nil?
if platform_family?("rhel")
php_fpm_package_name = "php-fpm"
else
php_fpm_package_name = "php5-fpm"
end
else
php_fpm_package_name = node['php-fpm']['package_name']
end
package php_fpm_package_name do
action :upgrade
end
if node['php-fpm']['service_name'].nil?
php_fpm_service_name = php_fpm_package_name
else
php_fpm_service_name = node['php-fpm']['service_name']
end
service_provider = nil
if node['platform'] == 'ubuntu' and node['platform_version'].to_f >= 13.10
service_provider = ::Chef::Provider::Service::Upstart
end
service "php-fpm" do
provider service_provider if service_provider
service_name php_fpm_service_name
supports :start => true, :stop => true, :restart => true, :reload => true
action [ :enable, :start ]
end
|
#
# Cookbook Name:: haproxy-ng
# Recipe:: install
#
# Copyright 2015 Nathan Williams
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
case node['haproxy']['install_method']
when 'package'
package 'haproxy'
when 'ppa'
apt_repository 'haproxy' do
uri node['haproxy']['ppa']['uri']
distribution node['lsb']['codename']
components ['main']
keyserver 'keyserver.ubuntu.com'
key node['haproxy']['ppa']['key']
end
package 'haproxy'
when 'source'
src = node['haproxy']['source']
src['dependencies'].each do |dep|
package dep
end
directory '/etc/haproxy'
user 'haproxy' do
home '/var/lib/haproxy'
shell '/usr/sbin/nologin'
system true
end
directory '/var/lib/haproxy' do
owner 'haproxy'
group 'haproxy'
end
ark 'haproxy' do
url src['url']
version src['url'].match(/\d\.\d/).to_s
checksum src['checksum']
make_opts src['make_args'].map { |k, v| "#{k}=#{v}" }
action :install_with_make
end
cookbook_file '/etc/init/haproxy.conf' do
source 'haproxy.conf'
mode '0644'
only_if { File.directory?('/etc/init') }
end
cookbook_file '/etc/systemd/system/haproxy.service' do
source 'haproxy.service'
only_if { File.directory?('/etc/systemd/system') }
not_if { File.directory?('/etc/init') }
end
else
Chef::Log.warn 'Unknown install_method for haproxy. Skipping install!'
end
fix version matching to avoid conflicting source dirs
#
# Cookbook Name:: haproxy-ng
# Recipe:: install
#
# Copyright 2015 Nathan Williams
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
case node['haproxy']['install_method']
when 'package'
package 'haproxy'
when 'ppa'
apt_repository 'haproxy' do
uri node['haproxy']['ppa']['uri']
distribution node['lsb']['codename']
components ['main']
keyserver 'keyserver.ubuntu.com'
key node['haproxy']['ppa']['key']
end
package 'haproxy'
when 'source'
src = node['haproxy']['source']
src['dependencies'].each do |dep|
package dep
end
directory '/etc/haproxy'
user 'haproxy' do
home '/var/lib/haproxy'
shell '/usr/sbin/nologin'
system true
end
directory '/var/lib/haproxy' do
owner 'haproxy'
group 'haproxy'
end
ark 'haproxy' do
url src['url']
version src['url'].match(/(\d+\.?){2}\d+/).to_s
checksum src['checksum']
make_opts src['make_args'].map { |k, v| "#{k}=#{v}" }
action :install_with_make
end
cookbook_file '/etc/init/haproxy.conf' do
source 'haproxy.conf'
mode '0644'
only_if { File.directory?('/etc/init') }
end
cookbook_file '/etc/systemd/system/haproxy.service' do
source 'haproxy.service'
only_if { File.directory?('/etc/systemd/system') }
not_if { File.directory?('/etc/init') }
end
else
Chef::Log.warn 'Unknown install_method for haproxy. Skipping install!'
end
|
#
# Cookbook Name:: cdap
# Recipe:: security_realm_file
#
# Copyright © 2013-2015 Cask Data, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Manage Authentication realmfile
if node['cdap']['cdap_site']['security.authentication.basic.realmfile']
realmfile = node['cdap']['cdap_site']['security.authentication.basic.realmfile']
realmdir = ::File.dirname(realmfile)
# Ensure parent directory exists
unless Dir.exists? realmfile
directory realmdir do
mode '0700'
owner 'cdap'
group 'cdap'
action :create
recursive true
end
end
# Create the realmfile
template realmfile do
source 'generic-kv-colon.erb'
mode '0600'
owner 'cdap'
group 'cdap'
variables options: node['cdap']['security']['realmfile']
action :create
end
end
Update year
#
# Cookbook Name:: cdap
# Recipe:: security_realm_file
#
# Copyright © 2013-2016 Cask Data, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Manage Authentication realmfile
if node['cdap']['cdap_site']['security.authentication.basic.realmfile']
realmfile = node['cdap']['cdap_site']['security.authentication.basic.realmfile']
realmdir = ::File.dirname(realmfile)
# Ensure parent directory exists
unless Dir.exists? realmfile
directory realmdir do
mode '0700'
owner 'cdap'
group 'cdap'
action :create
recursive true
end
end
# Create the realmfile
template realmfile do
source 'generic-kv-colon.erb'
mode '0600'
owner 'cdap'
group 'cdap'
variables options: node['cdap']['security']['realmfile']
action :create
end
end
|
file "/etc/prelink.conf.d/ruby.conf" do
content "-b /usr/bin/ruby\n"
owner "root"
group "root"
mode "0644"
end
ensuring /etc/prelink.conf.d exists
directory "/etc/prelink.conf.d"
file "/etc/prelink.conf.d/ruby.conf" do
content "-b /usr/bin/ruby\n"
owner "root"
group "root"
mode "0644"
end
|
node.set[:vagrant][:url] = "https://dl.bintray.com/mitchellh/vagrant/vagrant_1.7.2_x86_64.deb"
node.set[:vagrant][:checksum] = "9d7f1c587134011e2d5429eb21b6c0e95487f52e6d6d47c03ecc82cbeee73968"
include_recipe "vagrant"
install_vagrant_plugin "vagrant-cachier", "1.2.0"
install_vagrant_plugin "vagrant-berkshelf", "4.0.2"
install_vagrant_plugin "vagrant-omnibus", "1.4.1"
install_vagrant_plugin "vagrant-toplevel-cookbooks", "0.2.3"
install_vagrant_plugin "vagrant-lxc", "1.1.0"
# vagrant-lxc setup
%w{ lxc lxc-templates cgroup-lite redir bridge-utils }.each do |pkg|
package pkg
end
bash_profile "set-vagrant-default-provider" do
user devbox_user
content "export VAGRANT_DEFAULT_PROVIDER=lxc"
end
bash "add vagrant-lxc sudoers permissions" do
environment devbox_user_env
code "vagrant lxc sudoers"
not_if { ::File.exists? "/etc/sudoers.d/vagrant-lxc" }
end
template "#{devbox_userhome}/.vagrant.d/Vagrantfile" do
source "Vagrantfile.erb"
owner devbox_user
group devbox_group
mode "0644"
end
set KITCHEN_LOCAL_YAML to use the lxc specific settings after splitting the kitchenci configs (see tknerr/sample-toplevel-cookbook#9)
node.set[:vagrant][:url] = "https://dl.bintray.com/mitchellh/vagrant/vagrant_1.7.2_x86_64.deb"
node.set[:vagrant][:checksum] = "9d7f1c587134011e2d5429eb21b6c0e95487f52e6d6d47c03ecc82cbeee73968"
include_recipe "vagrant"
install_vagrant_plugin "vagrant-cachier", "1.2.0"
install_vagrant_plugin "vagrant-berkshelf", "4.0.2"
install_vagrant_plugin "vagrant-omnibus", "1.4.1"
install_vagrant_plugin "vagrant-toplevel-cookbooks", "0.2.3"
install_vagrant_plugin "vagrant-lxc", "1.1.0"
# vagrant-lxc setup
%w{ lxc lxc-templates cgroup-lite redir bridge-utils }.each do |pkg|
package pkg
end
bash_profile "set-vagrant-default-provider" do
user devbox_user
content "export VAGRANT_DEFAULT_PROVIDER=lxc"
end
bash_profile "set-kitchen-local-yaml" do
user devbox_user
content "export KITCHEN_LOCAL_YAML=.kitchen.lxc.yml"
end
bash "add vagrant-lxc sudoers permissions" do
environment devbox_user_env
code "vagrant lxc sudoers"
not_if { ::File.exists? "/etc/sudoers.d/vagrant-lxc" }
end
template "#{devbox_userhome}/.vagrant.d/Vagrantfile" do
source "Vagrantfile.erb"
owner devbox_user
group devbox_group
mode "0644"
end
|
# -*- encoding: utf-8 -*-
$:.push File.expand_path('../lib', __FILE__)
require 'redis/i18n/version'
Gem::Specification.new do |s|
s.name = 'redis-i18n'
s.version = Redis::I18n::VERSION
s.authors = ['Luca Guidi']
s.email = ['me@lucaguidi.com']
s.homepage = 'http://redis-store.org/redis-i18n'
s.summary = %q{Redis store for i18n}
s.description = %q{Redis backed store for i18n}
s.license = 'MIT'
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ['lib']
s.add_runtime_dependency 'redis-store', '~> 1'
s.add_runtime_dependency 'i18n', '~> 0.7.0'
s.add_development_dependency 'rake', '~> 10'
s.add_development_dependency 'bundler', '~> 2'
s.add_development_dependency 'mocha', '~> 0.14.0'
s.add_development_dependency 'minitest', '~> 5'
s.add_development_dependency 'redis-store-testing'
end
Update to Rake 12
# -*- encoding: utf-8 -*-
$:.push File.expand_path('../lib', __FILE__)
require 'redis/i18n/version'
Gem::Specification.new do |s|
s.name = 'redis-i18n'
s.version = Redis::I18n::VERSION
s.authors = ['Luca Guidi']
s.email = ['me@lucaguidi.com']
s.homepage = 'http://redis-store.org/redis-i18n'
s.summary = %q{Redis store for i18n}
s.description = %q{Redis backed store for i18n}
s.license = 'MIT'
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ['lib']
s.add_runtime_dependency 'redis-store', '~> 1'
s.add_runtime_dependency 'i18n', '~> 0.7.0'
s.add_development_dependency 'rake', '>= 12.3.3'
s.add_development_dependency 'bundler', '~> 2'
s.add_development_dependency 'mocha', '~> 0.14.0'
s.add_development_dependency 'minitest', '~> 5'
s.add_development_dependency 'redis-store-testing'
end
|
Created the db
# encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20150914234110) do
create_table "professors", force: :cascade do |t|
t.string "first"
t.string "last"
t.string "university"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "ratings", force: :cascade do |t|
t.string "course"
t.string "comment"
t.integer "professor_id"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_index "ratings", ["professor_id"], name: "index_ratings_on_professor_id"
end
|
require 'date'
require File.dirname(__FILE__) + '/../../spec_helper'
describe "Date constants" do
it "should define ITALY" do
Date::ITALY.should == 2299161 # 1582-10-15
end
it "should define ENGLAND" do
Date::ENGLAND.should == 2361222 # 1752-09-14
end
ruby_bug "#", "1.8.6.114" do
it "should define JULIAN" do
(Date::JULIAN <=> Date::Infinity.new).should == 0
end
end
ruby_bug "#", "1.8.6.114" do
it "should define GREGORIAN" do
(Date::GREGORIAN <=> -Date::Infinity.new).should == 0
end
end
it "should define MONTHNAMES" do
Date::MONTHNAMES.should == [nil] + %w(January February March April May June July
August September October November December)
end
it "should define DAYNAMES" do
Date::DAYNAMES.should == %w(Sunday Monday Tuesday Wednesday Thursday Friday Saturday)
end
it "should define ABBR_MONTHNAMES" do
Date::ABBR_DAYNAMES.should == %w(Sun Mon Tue Wed Thu Fri Sat)
end
end
Updating spec guards for Date::GREGORIAN.
require 'date'
require File.dirname(__FILE__) + '/../../spec_helper'
describe "Date constants" do
it "should define ITALY" do
Date::ITALY.should == 2299161 # 1582-10-15
end
it "should define ENGLAND" do
Date::ENGLAND.should == 2361222 # 1752-09-14
end
ruby_bug "#", "1.8.6.114" do
it "should define JULIAN" do
(Date::JULIAN <=> Date::Infinity.new).should == 0
end
end
# Fixed in 1.8.7
ruby_bug "#", "1.8.6" do
it "should define GREGORIAN" do
(Date::GREGORIAN <=> -Date::Infinity.new).should == 0
end
end
it "should define MONTHNAMES" do
Date::MONTHNAMES.should == [nil] + %w(January February March April May June July
August September October November December)
end
it "should define DAYNAMES" do
Date::DAYNAMES.should == %w(Sunday Monday Tuesday Wednesday Thursday Friday Saturday)
end
it "should define ABBR_MONTHNAMES" do
Date::ABBR_DAYNAMES.should == %w(Sun Mon Tue Wed Thu Fri Sat)
end
end
|
Solve problem
# The sum of the primes below 10 is 2 + 3 + 5 + 7 = 17.
# Find the sum of all the primes below two million.
require 'prime'
solution = Prime.each(2_000_000).reduce(:+)
p solution
# => 142913828922
|
# -*- encoding: utf-8 -*-
require File.expand_path('../lib/rails_underscore_templates/version', __FILE__)
Gem::Specification.new do |gem|
gem.authors = ["Christopher Boone"]
gem.email = ["chris@hypsometry.com"]
gem.description = %q{TODO: Write a gem description}
gem.summary = %q{TODO: Write a gem summary}
gem.homepage = ""
gem.files = `git ls-files`.split($\)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.name = "rails_underscore_templates"
gem.require_paths = ["lib"]
gem.version = RailsUnderscoreTemplates::VERSION
end
Add basic gemspec details
# -*- encoding: utf-8 -*-
require File.expand_path('../lib/rails_underscore_templates/version', __FILE__)
Gem::Specification.new do |gem|
gem.authors = ['Christopher Boone']
gem.email = ['info@hypsometry.com']
gem.description = %q{Underscore template support for Rails asset pipeline}
gem.summary = %q{Underscore templates for Rails}
gem.homepage = 'http://github.com/cboone/rails_underscore_templates'
gem.files = `git ls-files`.split($\)
gem.executables = gem.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
gem.name = 'rails_underscore_templates'
gem.require_paths = ['lib']
gem.version = RailsUnderscoreTemplates::VERSION
gem.add_dependency 'actionpack', '>= 3.1'
end
|
#!/usr/bin/env ruby
require 'io/console'
module Argon
class Editor
def initialize(filename)
@filename = filename
data = read_file_data
@line_sep = data["\r\n"] || "\n"
lines = data.split(line_sep)
@buffer = Buffer.new(lines)
@cursor = Cursor.new
@snapshots = []
end
def self.open(filename)
new(filename).run
end
def run
IO.console.raw do
reset_screen
loop do
render
handle_input
end
end
end
private
attr_reader :buffer, :blank_buffer, :cursor, :filename, :snapshots, :line_sep
def render
clear_screen
print buffer
ANSI.move_cursor(cursor.row, cursor.col)
end
def handle_input
char = $stdin.getc
case char
when "\cs" then save
when "\cq" then quit
when "\r" then enter
when "\c_" then undo
when "\cp" then up
when "\cn" then down
when "\cb" then left
when "\cf" then right
when "\ca" then line_home
when "\ce" then line_end
when "\ch" then backspace
when "\cd" then delete
when "\cu" then delete_before
when "\ck" then delete_after
else
insert_char(char) if char =~ /[[:print:]]/
end
end
def quit
reset_screen
exit
end
def up
@cursor = cursor.up(buffer)
end
def down
@cursor = cursor.down(buffer)
end
def left
@cursor = cursor.left(buffer)
end
def right
@cursor = cursor.right(buffer)
end
def backspace
return if cursor.beginning_of_file?
store_snapshot
if cursor.col == 0
cursor_left = buffer.lines[cursor.row].size + 1
@buffer = buffer.join_lines(cursor.row - 1)
cursor_left.times { @cursor = cursor.left(buffer) }
else
@buffer = buffer.delete_char(cursor.row, cursor.col - 1)
@cursor = cursor.left(buffer)
end
end
def delete
return if cursor.end_of_file?(buffer)
store_snapshot
if cursor.end_of_line?(buffer)
@buffer = buffer.join_lines(cursor.row)
else
@buffer = buffer.delete_char(cursor.row, cursor.col)
end
end
def data
data = buffer.lines.join(line_sep).chomp(line_sep)
data << line_sep unless data.empty?
data
end
def save
open(filename, 'w') { |f| f << data }
end
def enter
store_snapshot
@buffer = buffer.break_line(cursor.row, cursor.col)
@cursor = cursor.enter(buffer)
end
def undo
return if snapshots.empty?
@buffer, @cursor = snapshots.pop
end
def insert_char(char)
store_snapshot
@buffer = buffer.insert_char(char, cursor.row, cursor.col)
@cursor = cursor.right(buffer)
end
def store_snapshot
snapshots << [buffer, cursor]
end
def line_home
@cursor = cursor.line_home
end
def line_end
@cursor = cursor.line_end(buffer)
end
def delete_before
store_snapshot
@buffer = buffer.delete_before(cursor.row, cursor.col)
line_home
end
def delete_after
store_snapshot
@buffer = buffer.delete_after(cursor.row, cursor.col)
end
def reset_screen
ANSI.move_cursor(0, 0)
ANSI.clear_screen
end
def clear_screen
ANSI.move_cursor(0, 0)
if blank_buffer
print blank_buffer # overwrite screen with spaces
ANSI.move_cursor(0, 0)
end
blank_lines = buffer.lines.map {|line| ' ' * line.size }
@blank_buffer = Buffer.new(blank_lines)
end
def read_file_data
if File.exist?(filename)
File.read(filename)
else
''
end
end
end
class Buffer
attr_reader :lines
def initialize(lines)
@lines = lines
end
def to_s
lines.map {|line| "#{line}\r\n" }.join
end
def lines_count
lines.size
end
def line_length(row)
lines[row].size
end
def delete_char(row, col)
with_copy { |b| b.lines[row].slice!(col) }
end
def insert_char(char, row, col)
with_copy do |b|
b.lines[row] ||= ''
b.lines[row].insert(col, char)
end
end
def break_line(row, col)
with_copy do |b|
b.lines[row..row] = [b.lines[row][0...col], b.lines[row][col..-1]]
end
end
def delete_before(row, col)
with_copy { |b| b.lines[row][0...col] = '' }
end
def delete_after(row, col)
with_copy { |b| b.lines[row][col..-1] = '' }
end
def join_lines(row)
with_copy { |b| b.lines[row..row + 1] = b.lines[row..row + 1].join }
end
private
def with_copy
Buffer.new(lines.map(&:dup)).tap { |b| yield b }
end
end
class Cursor
attr_reader :row, :col
def initialize(row = 0, col = 0)
@row = row
@col = col
end
def up(buffer)
Cursor.new(row - 1, col).clamp(buffer)
end
def down(buffer)
Cursor.new(row + 1, col).clamp(buffer)
end
def left(buffer)
return Cursor.new(row, col - 1) if col > 0
return self if row == 0
Cursor.new(row - 1, buffer.line_length(row - 1))
end
def right(buffer)
return Cursor.new(row, col + 1) unless end_of_line?(buffer)
return self if final_line?(buffer)
Cursor.new(row + 1, 0)
end
def clamp(buffer)
@row = row.clamp(0, buffer.lines_count - 1)
@col = col.clamp(0, buffer.line_length(row))
self
end
def enter(buffer)
down(buffer).line_home
end
def line_home
Cursor.new(row, 0)
end
def line_end(buffer)
Cursor.new(row, buffer.line_length(row))
end
def end_of_line?(buffer)
col == buffer.line_length(row)
end
def final_line?(buffer)
row == buffer.lines_count - 1
end
def end_of_file?(buffer)
final_line?(buffer) && end_of_line?(buffer)
end
def beginning_of_file?
row == 0 && col == 0
end
end
module ANSI
def self.clear_screen
print "\e[J"
end
def self.move_cursor(row, col)
print "\e[#{row + 1};#{col + 1}H"
end
end
end
begin
Argon::Editor.open(ARGV[0])
rescue IndexError
puts "Usage: #$0 file"
end
FEATURE: History
#!/usr/bin/env ruby
require 'io/console'
module Argon
class Editor
def initialize(filename)
@filename = filename
data = read_file_data
@line_sep = data["\r\n"] || "\n"
lines = data.split(line_sep)
@buffer = Buffer.new(lines)
@cursor = Cursor.new
@history = History.new
end
def self.open(filename)
new(filename).run
end
def run
IO.console.raw do
reset_screen
loop do
render
handle_input
end
end
end
private
attr_reader :buffer, :blank_buffer, :cursor, :history, :filename, :snapshots,
:line_sep
def render
clear_screen
print buffer
ANSI.move_cursor(cursor.row, cursor.col)
end
def handle_input
char = $stdin.getc
case char
when "\cs" then save
when "\cq" then quit
when "\r" then enter
when "\cp" then up
when "\cn" then down
when "\cb" then left
when "\cf" then right
when "\ca" then line_home
when "\ce" then line_end
when "\ch" then backspace
when "\cd" then delete
when "\cu" then delete_before
when "\ck" then delete_after
when "\c_" then history_undo
when "\cr" then history_redo
else
insert_char(char) if char =~ /[[:print:]]/
end
end
def quit
reset_screen
exit
end
def up
@cursor = cursor.up(buffer)
end
def down
@cursor = cursor.down(buffer)
end
def left
@cursor = cursor.left(buffer)
end
def right
@cursor = cursor.right(buffer)
end
def backspace
return if cursor.beginning_of_file?
store_snapshot
if cursor.col == 0
cursor_left = buffer.lines[cursor.row].size + 1
@buffer = buffer.join_lines(cursor.row - 1)
cursor_left.times { @cursor = cursor.left(buffer) }
else
@buffer = buffer.delete_char(cursor.row, cursor.col - 1)
@cursor = cursor.left(buffer)
end
end
def delete
return if cursor.end_of_file?(buffer)
store_snapshot
if cursor.end_of_line?(buffer)
@buffer = buffer.join_lines(cursor.row)
else
@buffer = buffer.delete_char(cursor.row, cursor.col)
end
end
def data
data = buffer.lines.join(line_sep).chomp(line_sep)
data << line_sep unless data.empty?
data
end
def save
open(filename, 'w') { |f| f << data }
end
def enter
store_snapshot
@buffer = buffer.break_line(cursor.row, cursor.col)
@cursor = cursor.enter(buffer)
end
def history_undo
return unless history.can_undo?
store_snapshot(false) unless history.can_redo?
@buffer, @cursor = history.undo
end
def history_redo
return unless history.can_redo?
@buffer, @cursor = history.redo
end
def insert_char(char)
store_snapshot
@buffer = buffer.insert_char(char, cursor.row, cursor.col)
@cursor = cursor.right(buffer)
end
def store_snapshot[advance = true]
history.save([buffer, cursor], advance)
end
def line_home
@cursor = cursor.line_home
end
def line_end
@cursor = cursor.line_end(buffer)
end
def delete_before
store_snapshot
@buffer = buffer.delete_before(cursor.row, cursor.col)
line_home
end
def delete_after
store_snapshot
@buffer = buffer.delete_after(cursor.row, cursor.col)
end
def reset_screen
ANSI.move_cursor(0, 0)
ANSI.clear_screen
end
def clear_screen
ANSI.move_cursor(0, 0)
if blank_buffer
print blank_buffer # overwrite screen with spaces
ANSI.move_cursor(0, 0)
end
blank_lines = buffer.lines.map {|line| ' ' * line.size }
@blank_buffer = Buffer.new(blank_lines)
end
def read_file_data
if File.exist?(filename)
File.read(filename)
else
''
end
end
end
class Buffer
attr_reader :lines
def initialize(lines)
@lines = lines
end
def to_s
lines.map {|line| "#{line}\r\n" }.join
end
def lines_count
lines.size
end
def line_length(row)
lines[row].size
end
def delete_char(row, col)
with_copy { |b| b.lines[row].slice!(col) }
end
def insert_char(char, row, col)
with_copy do |b|
b.lines[row] ||= ''
b.lines[row].insert(col, char)
end
end
def break_line(row, col)
with_copy do |b|
b.lines[row..row] = [b.lines[row][0...col], b.lines[row][col..-1]]
end
end
def delete_before(row, col)
with_copy { |b| b.lines[row][0...col] = '' }
end
def delete_after(row, col)
with_copy { |b| b.lines[row][col..-1] = '' }
end
def join_lines(row)
with_copy { |b| b.lines[row..row + 1] = b.lines[row..row + 1].join }
end
private
def with_copy
Buffer.new(lines.map(&:dup)).tap { |b| yield b }
end
end
class Cursor
attr_reader :row, :col
def initialize(row = 0, col = 0)
@row = row
@col = col
end
def up(buffer)
Cursor.new(row - 1, col).clamp(buffer)
end
def down(buffer)
Cursor.new(row + 1, col).clamp(buffer)
end
def left(buffer)
return Cursor.new(row, col - 1) if col > 0
return self if row == 0
Cursor.new(row - 1, buffer.line_length(row - 1))
end
def right(buffer)
return Cursor.new(row, col + 1) unless end_of_line?(buffer)
return self if final_line?(buffer)
Cursor.new(row + 1, 0)
end
def clamp(buffer)
@row = row.clamp(0, buffer.lines_count - 1)
@col = col.clamp(0, buffer.line_length(row))
self
end
def enter(buffer)
down(buffer).line_home
end
def line_home
Cursor.new(row, 0)
end
def line_end(buffer)
Cursor.new(row, buffer.line_length(row))
end
def end_of_line?(buffer)
col == buffer.line_length(row)
end
def final_line?(buffer)
row == buffer.lines_count - 1
end
def end_of_file?(buffer)
final_line?(buffer) && end_of_line?(buffer)
end
def beginning_of_file?
row == 0 && col == 0
end
end
class History
def initialize
@snapshots = []
@current = -1
end
def save(data, advance = true)
snapshots.slice!(current + 1..-1)
snapshots << data
@current += 1 if advance
end
def can_undo?
!undo_snapshot.nil?
end
def undo
undo_snapshot.tap { @current -= 1 }
end
def can_redo?
!redo_snapshot.nil?
end
def redo
redo_snapshot.tap { @current += 1 }
end
private
attr_reader :current, :snapshots
def undo_snapshot
snapshots[current] if current >= 0
end
def redo_snapshot
snapshots[current + 2]
end
end
module ANSI
def self.clear_screen
print "\e[J"
end
def self.move_cursor(row, col)
print "\e[#{row + 1};#{col + 1}H"
end
end
end
begin
Argon::Editor.open(ARGV[0])
rescue IndexError
puts "Usage: #$0 file"
end
|
class Beast < Formula
homepage "http://beast.bio.ed.ac.uk/"
# doi "10.1093/molbev/mss075"
# tag "bioinformatics"
url "http://tree.bio.ed.ac.uk/download.php?id=92&num=3"
version "1.8.2"
sha256 "233ca5e06d98c5e7f8fb6a68fe5dd5448bb36d7d801117f7f6f11ac9f6b6ecc9"
bottle do
cellar :any
sha256 "e1357fad70b3a51ce734a705667f2e9d16bdddf480bf340559cdad0bbcaacb65" => :yosemite
sha256 "c411831dc26441e4b5bd92dc1926fbd8171d5c8d26d17239f2ce1e9604f67f8b" => :mavericks
sha256 "c3974c08c01dfa26db9407b070b4302a109043725fef586b4d82290603f2dfee" => :mountain_lion
end
head do
url "https://github.com/beast-dev/beast-mcmc.git"
depends_on :ant
end
def install
system "ant", "linux" if build.head?
# Move jars to libexec
inreplace Dir["bin/*"] do |s|
s["$BEAST/lib"] = "$BEAST/libexec"
end
mv "lib", "libexec"
prefix.install Dir[build.head? ? "release/Linux/BEASTv*/*" : "*"]
end
test do
system "#{bin}/beast", "-help"
end
def caveats; <<-EOS.undent
Examples are installed in:
#{opt_prefix}/examples/
EOS
end
end
beast: Add desc and fix style
`caveats method` should be put before `test block`
class Beast < Formula
desc "Bayesian Evolutionary Analysis Sampling Trees"
homepage "http://beast.bio.ed.ac.uk/"
# doi "10.1093/molbev/mss075"
# tag "bioinformatics"
url "http://tree.bio.ed.ac.uk/download.php?id=92&num=3"
version "1.8.2"
sha256 "233ca5e06d98c5e7f8fb6a68fe5dd5448bb36d7d801117f7f6f11ac9f6b6ecc9"
bottle do
cellar :any
sha256 "e1357fad70b3a51ce734a705667f2e9d16bdddf480bf340559cdad0bbcaacb65" => :yosemite
sha256 "c411831dc26441e4b5bd92dc1926fbd8171d5c8d26d17239f2ce1e9604f67f8b" => :mavericks
sha256 "c3974c08c01dfa26db9407b070b4302a109043725fef586b4d82290603f2dfee" => :mountain_lion
end
head do
url "https://github.com/beast-dev/beast-mcmc.git"
depends_on :ant
end
def install
system "ant", "linux" if build.head?
# Move jars to libexec
inreplace Dir["bin/*"] do |s|
s["$BEAST/lib"] = "$BEAST/libexec"
end
mv "lib", "libexec"
prefix.install Dir[build.head? ? "release/Linux/BEASTv*/*" : "*"]
end
def caveats; <<-EOS.undent
Examples are installed in:
#{opt_prefix}/examples/
EOS
end
test do
system "#{bin}/beast", "-help"
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.