CombinedText stringlengths 4 3.42M |
|---|
module PickupRandomizer
VILLAGER_NAME_TO_EVENT_FLAG = {
:villagerjacob => 0x2A,
:villagerabram => 0x2D,
:villageraeon => 0x3C,
:villagereugen => 0x38,
:villagermonica => 0x4F,
:villagerlaura => 0x32,
:villagermarcel => 0x40,
:villagerserge => 0x47,
:villageranna => 0x4B,
:villagerdaniela => 0x57,
:villageririna => 0x53,
}
RANDOMIZABLE_VILLAGER_NAMES = VILLAGER_NAME_TO_EVENT_FLAG.keys
PORTRAIT_NAME_TO_DATA = {
:portraitcityofhaze => {subtype: 0x1A, var_a: 1, var_b: 0x1A},
:portraitsandygrave => {subtype: 0x1A, var_a: 3, var_b: 0},
:portraitnationoffools => {subtype: 0x1A, var_a: 5, var_b: 0x21},
:portraitforestofdoom => {subtype: 0x1A, var_a: 7, var_b: 0},
:portraitdarkacademy => {subtype: 0x76, var_a: 8, var_b: 0x46},
:portraitburntparadise => {subtype: 0x76, var_a: 6, var_b: 0x20},
:portraitforgottencity => {subtype: 0x76, var_a: 4, var_b: 0},
:portrait13thstreet => {subtype: 0x76, var_a: 2, var_b: 7},
:portraitnestofevil => {subtype: 0x86, var_a: 9, var_b: 0},
}
PORTRAIT_NAMES = PORTRAIT_NAME_TO_DATA.keys
AREA_INDEX_TO_PORTRAIT_NAME = PORTRAIT_NAME_TO_DATA.map do |name, data|
[data[:var_a], name]
end.to_h
PORTRAIT_NAME_TO_AREA_INDEX = PORTRAIT_NAME_TO_DATA.map do |name, data|
[name, data[:var_a]]
end.to_h
PORTRAIT_NAME_TO_DEFAULT_ENTITY_LOCATION = {
:portraitcityofhaze => "00-01-00_00",
:portraitsandygrave => "00-04-12_00",
:portraitnationoffools => "00-06-01_00",
:portraitforestofdoom => "00-08-01_02",
:portraitdarkacademy => "00-0B-00_04",
:portraitburntparadise => "00-0B-00_03",
:portraitforgottencity => "00-0B-00_01",
:portrait13thstreet => "00-0B-00_02",
:portraitnestofevil => "00-00-05_00",
}
WORLD_MAP_EXITS = [
#"00-02-1B_000", # Exit from the castle. Don't randomize this.
"04-00-03_000",
"05-00-00_000",
"06-00-0A_000",
"06-01-00_000",
"07-00-0E_000",
"08-02-07_000",
"09-00-07_000",
"0A-00-0A_000",
#"0A-00-13_000", # Alternate exit from Tymeo. Not randomized separately from the other one.
"0B-00-10_000",
"0D-00-09_000",
"0F-00-00_000",
]
WORLD_MAP_ENTRANCES = {
#3 => "03-00-00_000", # Training Hall. Not randomized because we don't randomize the castle exit.
6 => "06-00-00_000",
8 => "08-00-00_000",
9 => "09-00-00_000", # Lighthouse. My logic has a special case here due to the spikes but it can still be randomized.
0xA => "0A-00-00_000",
0xB => "0B-00-00_000",
0xD => "0D-00-00_000",
0xE => "0E-00-0C_000",
0xF => "0F-00-08_000",
0x10 => "10-01-06_000",
0x11 => "11-00-00_000",
-1 => "06-01-09_000", # Lower Kalidus entrance.
#-2 => "0C-00-00_000", # Large Cavern. Not randomized because we don't randomize the castle exit.
}
WORLD_MAP_ENTRANCES_THAT_LEAD_TO_A_WORLD_MAP_EXIT = [
# Which entrances to prioritize placing first to avoid running out of accessible exits.
6,
8,
9,
0xA,
0xB,
0xD,
0xF,
]
def randomize_pickups_completably(&block)
spoiler_log.puts "Randomizing pickups:"
case GAME
when "dos"
checker.add_item(0x43) # knife
checker.add_item(0x91) # casual clothes
checker.add_item(0x3D) # seal 1
if options[:unlock_boss_doors]
checker.add_item(0x3E) # seal 2
checker.add_item(0x3F) # seal 3
checker.add_item(0x40) # seal 4
checker.add_item(0x41) # seal 5
end
when "por"
checker.add_item(0x61) # starting vampire killer
checker.add_item(0x6C) # encyclopedia
checker.add_item(0xAA) # casual clothes
checker.add_item(0x1AD) # call cube
if options[:dont_randomize_change_cube]
checker.add_item(0x1AC) # change cube
unless room_rando? # In room rando this item is placed in the player's starting room instead.
change_entity_location_to_pickup_global_id("00-00-01_01", 0x1AC)
end
else
# If the player doesn't start with change cube, give them skill cube instead so they can still use Charlotte's spells.
checker.add_item(0x1AE) # skill cube
unless room_rando? # In room rando this item is placed in the player's starting room instead.
change_entity_location_to_pickup_global_id("00-00-01_01", 0x1AE)
end
end
# In the corridor where Behemoth chases you, change the code of the platform to not permanently disappear.
# This is so the player can't get stuck if they miss an important item up there.
game.fs.load_overlay(79)
game.fs.write(0x022EC638, [0xEA000003].pack("V"))
# Room in Sandy Grave that has two overlapping Charm Necklaces.
# We don't want these to overlap as the player could easily think it's just one item and not see the one beneath it.
# Move one a bit to the left and the other a bit to the right. Also give one a different pickup flag.
item_a = game.areas[3].sectors[0].rooms[0x13].entities[0]
item_b = game.areas[3].sectors[0].rooms[0x13].entities[1]
item_a.x_pos = 0x120
item_b.x_pos = 0x140
pickup_flag = get_unused_pickup_flag()
item_b.var_a = pickup_flag
use_pickup_flag(pickup_flag)
item_a.write_to_rom()
item_b.write_to_rom()
when "ooe"
checker.add_item(0xE6) # casual clothes
checker.add_item(0x6F) # lizard tail
checker.add_item(0x72) # glyph union
checker.add_item(0x1E) # torpor. the player will get enough of these as it is
# Give the player the glyph sleeve in Ecclesia like in hard mode.
# To do this just get rid of the entity hider that hides it on normal mode.
entity_hider = game.areas[2].sectors[0].rooms[4].entities[6]
entity_hider.type = 0
entity_hider.write_to_rom()
# But we also need to give the chest a unique flag, because it shares the flag with the one from Minera in normal mode.
sleeve_chest = game.areas[2].sectors[0].rooms[4].entities[7]
pickup_flag = get_unused_pickup_flag()
sleeve_chest.var_b = pickup_flag
use_pickup_flag(pickup_flag)
sleeve_chest.write_to_rom()
# We also make sure the chest in Minera appears even on hard mode.
entity_hider = game.areas[8].sectors[2].rooms[7].entities[1]
entity_hider.type = 0
entity_hider.write_to_rom()
checker.add_item(0x73) # glyph sleeve
# Room in the Final Approach that has two overlapping chests both containing diamonds.
# We don't want these to overlap as the player could easily think it's just one item and not see the one beneath it.
# Move one a bit to the left and the other a bit to the right. Also give one a different pickup flag.
chest_a = game.areas[0].sectors[0xA].rooms[0xB].entities[1]
chest_b = game.areas[0].sectors[0xA].rooms[0xB].entities[2]
chest_a.x_pos = 0xE0
chest_b.x_pos = 0x130
pickup_flag = get_unused_pickup_flag()
chest_b.var_b = pickup_flag
use_pickup_flag(pickup_flag)
chest_a.write_to_rom()
chest_b.write_to_rom()
end
total_progression_pickups = checker.all_progression_pickups.length
place_progression_pickups() do |progression_pickups_placed|
percent_done = progression_pickups_placed.to_f / total_progression_pickups
yield percent_done
end
if !checker.game_beatable?
#if options[:randomize_rooms_map_friendly]
# # When debugging logic errors in map rando, output a list of what room strings were accessible at the end.
# File.open("./logs/accessed rooms debug #{GAME} #{seed}.txt", "w") do |f|
# for room_str in @rooms_by_progression_order_accessed
# f.puts(room_str)
# end
# end
#
# # And also output an image of the map with accessible rooms highlighted in red.
# unique_rooms_accessed = @rooms_by_progression_order_accessed.flatten.uniq
# map = game.get_map(0, 0)
# for tile in map.tiles
# if tile.sector_index.nil? || tile.room_index.nil?
# next
# end
# room_str_for_tile = "%02X-%02X-%02X" % [0, tile.sector_index, tile.room_index]
# if unique_rooms_accessed.include?(room_str_for_tile)
# tile.is_save = true
# tile.is_warp = false
# tile.is_entrance = false
# else
# tile.is_save = false
# tile.is_warp = false
# tile.is_entrance = false
# end
# end
# hardcoded_transition_rooms = (GAME == "dos" ? @transition_rooms : [])
# filename = "./logs/map debug #{GAME} #{seed}.png"
# renderer.render_map(map, scale=3, hardcoded_transition_rooms=hardcoded_transition_rooms).save(filename)
#end
item_names = checker.current_items.map do |global_id|
if global_id.is_a?(Symbol)
global_id
else
checker.defs.invert[global_id]
end
end.compact
raise "Bug: Game is not beatable on this seed!\nThis error shouldn't happen.\nSeed: #{@seed}\n\nItems:\n#{item_names.join(", ")}"
end
if GAME == "por" && options[:randomize_portraits]
# Remove the extra portraits at the end of 13th Street, Forgotten City, Burnt Paradise, and Dark Academy.
# (The one return portrait back to where you entered this portrait from is not removed, and is updated elsewhere in the code.)
[
"02-02-16_01",
"02-02-16_03",
"02-02-16_04",
"04-01-07_02",
"04-01-07_03",
"04-01-07_04",
"06-00-06_01",
"06-00-06_02",
"06-00-06_04",
"08-00-08_01",
"08-00-08_02",
"08-00-08_03",
].each do |entity_str|
portrait = game.entity_by_str(entity_str)
portrait.type = 0
portrait.write_to_rom()
end
end
end
def place_progression_pickups(&block)
previous_accessible_locations = []
@locations_randomized_to_have_useful_pickups = []
@rooms_that_already_have_an_event = []
progression_pickups_placed = 0
total_progression_pickups = checker.all_progression_pickups.length
on_leftovers = false
@rooms_by_progression_order_accessed = []
world_map_exits_randomized = []
world_map_entrances_used = []
game.each_room do |room|
room.entities.each do |entity|
if entity.is_special_object? && (0x5F..0x88).include?(entity.subtype)
room_str = "%02X-%02X-%02X" % [room.area_index, room.sector_index, room.room_index]
@rooms_that_already_have_an_event << room_str
break
end
end
end
if GAME == "por" && options[:randomize_starting_room] && options[:randomize_portraits]
starting_portrait_name = AREA_INDEX_TO_PORTRAIT_NAME[@starting_room.area_index]
if starting_portrait_name
# The starting room randomizer started the player in a portrait.
# This is problematic because the portrait randomizer will traditionally never place a portrait back to Dracula's castle, making it inaccessible.
# So we need to place the starting portrait at a random location in Dracula's Castle and register it with the logic.
# First pick a random valid location.
possible_portrait_locations = checker.all_locations.keys
possible_portrait_locations = filter_locations_valid_for_pickup(possible_portrait_locations, starting_portrait_name)
unused_room_strs = @unused_rooms.map{|room| room.room_str}
possible_portrait_locations.reject! do |location|
room_str = location[0,8]
unused_room_strs.include?(room_str)
end
possible_portrait_locations.select! do |location|
area_index = location[0,2].to_i(16)
area_index == 0
end
starting_portrait_location_in_castle = possible_portrait_locations.sample(random: rng)
# Then place the portrait.
change_entity_location_to_pickup_global_id(starting_portrait_location_in_castle, starting_portrait_name)
@locations_randomized_to_have_useful_pickups << starting_portrait_location_in_castle
end
end
verbose = false
# First place progression pickups needed to beat the game.
spoiler_log.puts "Placing main route progression pickups:"
on_first_item = true
while true
case GAME
when "por"
if !checker.current_items.include?(0x1B2) && checker.wind_accessible? && checker.vincent_accessible?
checker.add_item(0x1B2) # give lizard tail if the player has reached wind
end
end
if room_rando?
possible_locations, accessible_doors = checker.get_accessible_locations_and_doors()
accessible_rooms = accessible_doors.map{|door_str| door_str[0,8]}
@rooms_by_progression_order_accessed << accessible_rooms
else
possible_locations = checker.get_accessible_locations()
end
possible_locations -= @locations_randomized_to_have_useful_pickups
puts "Total possible locations: #{possible_locations.size}" if verbose
pickups_by_locations = checker.pickups_by_current_num_locations_they_access()
if starting_portrait_name
# Don't place the starting portrait anywhere, it's already in Dracula's Castle.
pickups_by_locations.delete(starting_portrait_name)
end
if GAME == "por" && options[:randomize_portraits] && (!room_rando? || !options[:rebalance_enemies_in_room_rando])
# If portraits are randomized but we can't rebalance enemies, try to avoid placing late game portraits in the early game.
if progression_pickups_placed < 5
pickups_by_locations_filtered = pickups_by_locations.reject do |pickup, usefulness|
[:portraitdarkacademy, :portraitburntparadise, :portraitforgottencity, :portrait13thstreet].include?(pickup)
end
if pickups_by_locations_filtered.any?
pickups_by_locations = pickups_by_locations_filtered
end
end
end
pickups_by_usefulness = pickups_by_locations.select{|pickup, num_locations| num_locations > 0}
currently_useless_pickups = pickups_by_locations.select{|pickup, num_locations| num_locations == 0}
puts "Num useless pickups: #{currently_useless_pickups.size}" if verbose
placing_currently_useless_pickup = false
if pickups_by_usefulness.any?
max_usefulness = pickups_by_usefulness.values.max
weights = pickups_by_usefulness.map do |pickup, usefulness|
# Weight less useful pickups as being more likely to be chosen.
weight = max_usefulness - usefulness + 1
weight = Math.sqrt(weight)
if checker.preferences[pickup]
weight *= checker.preferences[pickup]
end
weight
end
ps = weights.map{|w| w.to_f / weights.reduce(:+)}
useful_pickups = pickups_by_usefulness.keys
weighted_useful_pickups = useful_pickups.zip(ps).to_h
pickup_global_id = weighted_useful_pickups.max_by{|_, weight| rng.rand ** (1.0 / weight)}.first
weighted_useful_pickups_names = weighted_useful_pickups.map do |global_id, weight|
"%.2f %s" % [weight, checker.defs.invert[global_id]]
end
#puts "Weighted less useful pickups: [" + weighted_useful_pickups_names.join(", ") + "]"
elsif pickups_by_locations.any? && checker.game_beatable?
# The player can access all locations.
# So we just randomly place one progression pickup.
if !on_leftovers
spoiler_log.puts "Placing leftover progression pickups:"
on_leftovers = true
end
pickup_global_id = pickups_by_locations.keys.sample(random: rng)
elsif pickups_by_locations.any?
# No locations can access new areas, but the game isn't beatable yet.
# This means any new areas will need at least two new items to access.
# So just place a random pickup for now.
valid_pickups = pickups_by_locations.keys
if GAME == "ooe" && options[:randomize_villagers]
valid_villagers = valid_pickups & RANDOMIZABLE_VILLAGER_NAMES
if checker.albus_fight_accessible?
if valid_villagers.any?
# Once Albus is accessible, prioritize placing villagers over other pickups.
valid_pickups = valid_villagers
end
else
# Don't start placing villagers until Albus is accessible.
valid_pickups -= RANDOMIZABLE_VILLAGER_NAMES
end
if valid_pickups.empty?
# But if the only things left to place are villagers, we have no choice but to place them before Albus is accessible.
valid_pickups = pickups_by_locations.keys
end
elsif GAME == "dos" && room_rando? && accessible_rooms.include?("00-06-00")
# Player has access to the Subterranean Hell room with the huge spikes.
# To get through this room you need either rahab and bone ark or rahab, puppet master, and skeleton ape.
# The logic can have trouble placing the items necessary to get through this room, since skeleton ape and bone ark are useless everywhere else, and rahab is only useful in a handful of rooms - so if the player doesn't have access to any places that make rahab useful by itself, the randomizer might just try to place every other item, filling up all available item locations, and never place rahab.
# So we add a special case here to 100% guaranteed place rahab (assuming the player has access to under 15 item locations). From there the randomizer can figure out that it should place bone ark or puppet master and skeleton ape.
if valid_pickups.include?(0x145) && possible_locations.length < 15
valid_pickups = [0x145] # Rahab
end
end
pickup_global_id = valid_pickups.sample(random: rng)
placing_currently_useless_pickup = true
puts "Placing currently useless pickup." if verbose
else
# All progression pickups placed.
break
end
pickup_name = checker.defs.invert[pickup_global_id].to_s
puts "Trying to place #{pickup_name}" if verbose
if !options[:randomize_boss_souls]
# If randomize boss souls option is off, don't allow putting random things in these locations.
accessible_unused_boss_locations = possible_locations & checker.enemy_locations
accessible_unused_boss_locations.each do |location|
possible_locations.delete(location)
@locations_randomized_to_have_useful_pickups << location
# Also, give the player what this boss drops so the checker takes this into account.
pickup_global_id = get_entity_skill_drop_by_entity_location(location)
checker.add_item(pickup_global_id)
end
next if accessible_unused_boss_locations.length > 0
end
if !options[:randomize_villagers] && GAME == "ooe"
# If randomize villagers option is off, don't allow putting random things in these locations.
accessible_unused_villager_locations = possible_locations & checker.villager_locations
accessible_unused_villager_locations.each do |location|
possible_locations.delete(location)
@locations_randomized_to_have_useful_pickups << location
# Also, give the player this villager so the checker takes this into account.
villager_name = get_villager_name_by_entity_location(location)
checker.add_item(villager_name)
end
next if accessible_unused_villager_locations.length > 0
end
if !options[:randomize_portraits] && GAME == "por"
# If randomize portraits option is off, don't allow putting random things in these locations.
accessible_unused_portrait_locations = possible_locations & checker.portrait_locations
accessible_unused_portrait_locations -= @portrait_locations_to_remove # Don't count removed portraits in short mode as portrait locations.
accessible_unused_portrait_locations.each do |location|
possible_locations.delete(location)
@locations_randomized_to_have_useful_pickups << location
# Also, give the player this portrait so the checker takes this into account.
portrait_name = get_portrait_name_by_entity_location(location)
checker.add_item(portrait_name)
end
next if accessible_unused_portrait_locations.length > 0
end
if GAME == "ooe" && options[:randomize_world_map_exits] && !options[:open_world_map] && room_rando?
# Randomize world map exits.
unused_accessible_exits = (accessible_doors & WORLD_MAP_EXITS) - world_map_exits_randomized
if unused_accessible_exits.any?
while unused_accessible_exits.any?
world_map_exit = unused_accessible_exits.sample(random: rng)
unused_accessible_exits.delete(world_map_exit)
unused_entrances = WORLD_MAP_ENTRANCES.keys - world_map_entrances_used
possible_entrances = unused_entrances
if unused_accessible_exits.empty?
# We're on the last accessible exit.
# We need to prioritize placing entrances that lead to more exits.
# Otherwise we would exhaust all the remaining exits and the player would have no way to progress.
# (Unless this is the very last exit overall - in that case it's fine that we exhaust the last one.)
possible_entrances_that_lead_to_a_new_exit = unused_entrances & WORLD_MAP_ENTRANCES_THAT_LEAD_TO_A_WORLD_MAP_EXIT
if possible_entrances_that_lead_to_a_new_exit.any?
possible_entrances = possible_entrances_that_lead_to_a_new_exit
end
end
if possible_entrances.empty?
raise "Ran out of world map entrances to make world map exits unlock!"
end
entrance = possible_entrances.sample(random: rng)
set_world_map_exit_destination_area(world_map_exit, entrance)
world_map_exits_randomized << world_map_exit
world_map_entrances_used << entrance
end
next # Redo this progression placement loop with the world map entrances now set.
end
end
new_possible_locations = possible_locations - previous_accessible_locations.flatten
filtered_new_possible_locations = filter_locations_valid_for_pickup(new_possible_locations, pickup_global_id)
puts "Filtered new possible locations: #{filtered_new_possible_locations.size}" if verbose
puts " " + filtered_new_possible_locations.join(", ") if verbose
valid_previous_accessible_regions = previous_accessible_locations.map do |previous_accessible_region|
possible_locations = previous_accessible_region.dup
possible_locations -= @locations_randomized_to_have_useful_pickups
possible_locations = filter_locations_valid_for_pickup(possible_locations, pickup_global_id)
possible_locations = nil if possible_locations.empty?
possible_locations
end.compact
possible_locations_to_choose_from = filtered_new_possible_locations.dup
if placing_currently_useless_pickup
# Place items that don't immediately open up new areas anywhere in the game, with no weighting towards later areas.
valid_accessible_locations = previous_accessible_locations.map do |previous_accessible_region|
possible_locations = previous_accessible_region.dup
possible_locations -= @locations_randomized_to_have_useful_pickups
possible_locations = filter_locations_valid_for_pickup(possible_locations, pickup_global_id)
possible_locations = nil if possible_locations.empty?
possible_locations
end.compact.flatten
valid_accessible_locations += filtered_new_possible_locations
possible_locations_to_choose_from = valid_accessible_locations
elsif filtered_new_possible_locations.empty? && valid_previous_accessible_regions.any?
# No new locations, so select an old location.
if on_leftovers
# Just placing a leftover progression pickup.
# Weighted to be more likely to select locations you got access to later rather than earlier.
i = 1
weights = valid_previous_accessible_regions.map do |region|
# Weight later accessible regions as more likely than earlier accessible regions (exponential)
weight = i**2
i += 1
weight
end
ps = weights.map{|w| w.to_f / weights.reduce(:+)}
weighted_accessible_regions = valid_previous_accessible_regions.zip(ps).to_h
previous_accessible_region = weighted_accessible_regions.max_by{|_, weight| rng.rand ** (1.0 / weight)}.first
possible_locations_to_choose_from = previous_accessible_region
else
# Placing a main route progression pickup, just not one that immediately opens up new areas.
# Always place in the most recent accessible region.
possible_locations_to_choose_from = valid_previous_accessible_regions.last
puts "No new locations, using previous accessible location, total available: #{valid_previous_accessible_regions.last.size}" if verbose
end
elsif filtered_new_possible_locations.empty? && valid_previous_accessible_regions.empty?
# No new locations, but there's no old locations either.
if on_first_item
# If we're placing the very first item yet there's no accessible spots, then the room/map randomizer must have resulted in a bad start.
# So we place the first progression item in the starting room.
entity = @starting_room.add_new_entity()
entity.x_pos = @starting_x_pos
entity.y_pos = @starting_y_pos
@coll = RoomCollision.new(@starting_room, game.fs)
floor_y = coll.get_floor_y(entity, allow_jumpthrough: true)
entity.y_pos = floor_y - 0x18
location = "#{@starting_room.room_str}_%02X" % (@starting_room.entities.length-1)
possible_locations_to_choose_from = [location]
else
possible_locations_to_choose_from = []
end
elsif filtered_new_possible_locations.size <= 5 && valid_previous_accessible_regions.last && valid_previous_accessible_regions.last.size >= 15
# There aren't many new locations unlocked by the last item we placed.
# But there are a lot of other locations unlocked by the one we placed before that.
# So we give it a chance to put it in one of those last spots, instead of the new spots.
# The chance is proportional to how few new locations there are. 1 = 70%, 2 = 60%, 3 = 50%, 4 = 40%, 5 = 30%.
chance = 0.30 + (5-filtered_new_possible_locations.size)*10
if rng.rand() <= chance
possible_locations_to_choose_from = valid_previous_accessible_regions.last
puts "Not many new locations, using previous accessible location, total available: #{valid_previous_accessible_regions.last.size}" if verbose
end
end
previous_accessible_locations << new_possible_locations
if possible_locations_to_choose_from.empty?
item_names = checker.current_items.map do |global_id|
checker.defs.invert[global_id]
end.compact
raise "Bug: Failed to find any spots to place pickup.\nSeed: #{@seed}\n\nItems:\n#{item_names.join(", ")}"
end
#puts "Possible locations: #{possible_locations_to_choose_from.join(", ")}" if verbose
location = possible_locations_to_choose_from.sample(random: rng)
@locations_randomized_to_have_useful_pickups << location
if room_rando?
checker.set_current_location_by_entity(location)
end
if RANDOMIZABLE_VILLAGER_NAMES.include?(pickup_global_id)
# Villager
pickup_str = "villager #{pickup_global_id}"
elsif PORTRAIT_NAMES.include?(pickup_global_id)
# Portrait
pickup_str = "portrait #{pickup_global_id.to_s[8..-1]}" # Add a space between portrait and the area name
else
pickup_name = checker.defs.invert[pickup_global_id].to_s
pickup_str = "pickup %04X (#{pickup_name})" % pickup_global_id
end
location =~ /^(\h\h)-(\h\h)-(\h\h)_(\h+)$/
area_index, sector_index, room_index, entity_index = $1.to_i(16), $2.to_i(16), $3.to_i(16), $4.to_i(16)
if SECTOR_INDEX_TO_SECTOR_NAME[area_index]
area_name = SECTOR_INDEX_TO_SECTOR_NAME[area_index][sector_index]
else
area_name = AREA_INDEX_TO_AREA_NAME[area_index]
end
is_enemy_str = checker.enemy_locations.include?(location) ? " (boss)" : ""
is_event_str = checker.event_locations.include?(location) ? " (event)" : ""
is_easter_egg_str = checker.easter_egg_locations.include?(location) ? " (easter egg)" : ""
is_hidden_str = checker.hidden_locations.include?(location) ? " (hidden)" : ""
is_mirror_str = checker.mirror_locations.include?(location) ? " (mirror)" : ""
spoiler_str = " Placing #{pickup_str} at #{location}#{is_enemy_str}#{is_event_str}#{is_easter_egg_str}#{is_hidden_str}#{is_mirror_str} (#{area_name})"
spoiler_log.puts spoiler_str
puts spoiler_str if verbose
change_entity_location_to_pickup_global_id(location, pickup_global_id)
checker.add_item(pickup_global_id)
on_first_item = false
if room_rando? && GAME == "ooe"
if accessible_doors.include?("01-01-00_000") && !checker.current_items.include?(:villagernikolai)
checker.add_item(:villagernikolai)
end
if accessible_doors.include?("11-00-08_000") && !checker.current_items.include?(:villagergeorge)
checker.add_item(:villagergeorge)
end
end
progression_pickups_placed += 1
yield(progression_pickups_placed)
end
if room_rando? && false
File.open("accessible_doors.txt", "w") do |f|
accessible_doors.each do |accessible_door|
f.puts accessible_door
end
end
end
if GAME == "ooe" && options[:randomize_world_map_exits]
unused_exits = WORLD_MAP_EXITS - world_map_exits_randomized
unused_entrances = WORLD_MAP_ENTRANCES.keys - world_map_entrances_used
puts "Unused world map exits: #{unused_exits.join(", ")}"
puts "Unused world map entrances: #{unused_entrances.join(", ")}"
if unused_exits.any? && unused_entrances.any?
raise "Error: There are unplaced world map exits and entrances:\nExits: #{unused_exits.join(", ")}\nEntrances: #{unused_entrances.join(", ")}"
elsif unused_exits.any?
raise "Error: There are unplaced world map exits: #{unused_exits.join(", ")}"
elsif unused_entrances.any?
raise "Error: There are unplaced world map entrances: #{unused_entrances.join(", ")}"
end
end
spoiler_log.puts "All progression pickups placed successfully."
end
def place_non_progression_pickups
remaining_locations = checker.get_accessible_locations() - @locations_randomized_to_have_useful_pickups
remaining_locations.shuffle!(random: rng)
# In room rando, some items may be unreachable.
# We don't want the player to see these items in a different subroom and think the randomizer is bugged, so we delete them.
inaccessible_remaining_locations = checker.all_locations.keys - @locations_randomized_to_have_useful_pickups - remaining_locations
remove_inaccessible_items(inaccessible_remaining_locations)
if GAME == "ooe"
# Do event glyphs first. This is so they don't reuse a glyph already used by a glyph statue.
# If the player got the one from the glyph statue first then the one in the event/puzzle wouldn't appear, breaking the event/puzzle.
ooe_event_glyph_locations = remaining_locations.select{|location| checker.event_locations.include?(location)}
ooe_event_glyph_locations.each do |location|
pickup_global_id = get_unplaced_non_progression_skill()
change_entity_location_to_pickup_global_id(location, pickup_global_id)
end
remaining_locations -= ooe_event_glyph_locations
end
chaos_ring_placed = false
remaining_locations.each_with_index do |location, i|
if checker.enemy_locations.include?(location)
# Boss
pickup_global_id = get_unplaced_non_progression_skill()
elsif ["dos", "por"].include?(GAME) && (checker.event_locations.include?(location) || checker.easter_egg_locations.include?(location))
# Event item
pickup_global_id = get_unplaced_non_progression_item()
elsif GAME == "ooe" && location == "08-02-06_01"
# Tin man's strength ring blue chest. Can't be a glyph.
pickup_global_id = get_unplaced_non_progression_item_that_can_be_an_arm_shifted_immediate()
elsif GAME == "dos" && checker.mirror_locations.include?(location)
# Soul candles shouldn't be placed in mirrors, as they will appear even outside the mirror.
pickup_global_id = get_unplaced_non_progression_item()
elsif GAME == "dos" && !chaos_ring_placed
pickup_global_id = 0xCD
chaos_ring_placed = true
elsif GAME == "por" && !chaos_ring_placed
pickup_global_id = 0x12C
chaos_ring_placed = true
else
# Pickup
# Select the type of pickup weighed by difficulty options.
weights = {
money: @difficulty_settings[:money_placement_weight],
item: @difficulty_settings[:item_placement_weight],
}
if GAME == "por" || GAME == "ooe"
weights[:max_up] = @difficulty_settings[:max_up_placement_weight]
end
case GAME
when "dos"
weights[:skill] = @difficulty_settings[:soul_candle_placement_weight]
when "por"
weights[:skill] = @difficulty_settings[:por_skill_placement_weight]
when "ooe"
weights[:skill] = @difficulty_settings[:glyph_placement_weight]
end
weighted_pickup_types = {}
weights_sum = weights.values.reduce(:+)
weights.each do |type, weight|
weighted_pickup_types[type] = weight.to_f / weights_sum
end
random_pickup_type = weighted_pickup_types.max_by{|_, weight| rng.rand ** (1.0 / weight)}.first
case random_pickup_type
when :money
pickup_global_id = :money
when :max_up
pickup_global_id = @max_up_items.sample(random: rng)
when :skill
pickup_global_id = get_unplaced_non_progression_skill()
when :item
if checker.hidden_locations.include?(location)
# Don't let relics be inside breakable walls in OoE.
# This is because they need to be inside a chest, and chests can't be hidden.
pickup_global_id = get_unplaced_non_progression_item_except_ooe_relics()
else
pickup_global_id = get_unplaced_non_progression_item()
end
end
end
@used_non_progression_pickups << pickup_global_id
change_entity_location_to_pickup_global_id(location, pickup_global_id)
end
end
def initialize_all_non_progression_pickups
if !@all_non_progression_pickups.nil?
raise "all_non_progression_pickups was initialized too early."
end
@all_non_progression_pickups = begin
all_non_progression_pickups = PICKUP_GLOBAL_ID_RANGE.to_a - checker.all_progression_pickups
all_non_progression_pickups -= NONRANDOMIZABLE_PICKUP_GLOBAL_IDS
all_non_progression_pickups -= @max_up_items
if room_rando? || (GAME == "por" && options[:randomize_portraits])
all_non_progression_pickups -= [MAGICAL_TICKET_GLOBAL_ID]
end
all_non_progression_pickups
end
end
def filter_locations_valid_for_pickup(locations, pickup_global_id)
locations = locations.dup
if ITEM_GLOBAL_ID_RANGE.include?(pickup_global_id)
# If the pickup is an item instead of a skill, don't let bosses drop it.
locations -= checker.enemy_locations
end
# Don't let progression items be in certain problematic locations. (This function is only called for progression items.)
locations -= checker.no_progression_locations
if GAME == "dos" && SKILL_GLOBAL_ID_RANGE.include?(pickup_global_id)
# Don't let events give you souls in DoS.
locations -= checker.event_locations
locations -= checker.easter_egg_locations
# Don't let soul candles be inside mirrors. They don't get hidden, and are accessible without Paranoia.
locations -= checker.mirror_locations
# Don't let soul candles be inside specific locations that can be broken without reaching them.
locations -= checker.no_soul_locations
end
if GAME == "dos" && (0x3D..0x41).include?(pickup_global_id)
# Magic seals can't be given by easter egg locations.
locations -= checker.easter_egg_locations
end
if GAME == "ooe" && ITEM_GLOBAL_ID_RANGE.include?(pickup_global_id)
# Don't let events give you items in OoE.
locations -= checker.event_locations
end
if GAME == "ooe" && !ITEM_GLOBAL_ID_RANGE.include?(pickup_global_id)
# Glyphs/villagers can't be in the special blue chest spawned by the searchlights when you kill a Tin Man.
locations -= ["08-02-06_01"]
end
if GAME == "ooe" && (!pickup_global_id.is_a?(Integer) || !game.fs.check_integer_can_be_an_arm_shifted_immediate?(pickup_global_id))
# The pickup ID is a hardcoded arm shifted immediate for the special blue chest spawned by the searchlights when you kill a Tin Man.
locations -= ["08-02-06_01"]
end
if GAME == "ooe" && (0x6F..0x74).include?(pickup_global_id)
# Don't let relics be inside breakable walls in OoE.
# This is because they need to be inside a chest, and chests can't be hidden.
locations -= checker.hidden_locations
end
if RANDOMIZABLE_VILLAGER_NAMES.include?(pickup_global_id)
# Villagers can't be hidden, an event glyph, or a boss drop.
locations -= checker.hidden_locations
locations -= checker.event_locations
locations -= checker.enemy_locations
# Locations too close to the top of the room shouldn't be villagers, as the Torpor glyph would spawn above the screen and not be absorbable.
locations_too_high_to_be_a_villager = ["00-05-07_01", "00-05-07_02", "00-05-08_02", "00-05-08_03", "00-05-0C_01", "00-06-09_00", "0D-00-04_00", "0D-00-0C_00"]
locations -= locations_too_high_to_be_a_villager
# Two villagers shouldn't be placed in the same room, or their events will conflict and not work correctly.
locations.select! do |location|
room_str = location[0,8]
!@rooms_that_already_have_an_event.include?(room_str)
end
end
if PORTRAIT_NAMES.include?(pickup_global_id)
# Don't put portraits in certain rooms that, when you return to this room via the return portrait, would put you out of bounds.
bad_portrait_rooms = [
"03-00-05",
"03-00-06",
"03-00-07",
"03-00-08",
"03-00-09",
"03-00-0A",
"04-00-05",
"04-00-06",
"04-00-07",
"04-00-08",
"04-00-09",
"04-00-0A",
"05-01-01",
"05-01-14",
"06-01-14",
"00-05-02", # This great stairway room doesn't put you out of bounds, but does let you sequence break the button lock and needing height.
"00-05-04", # This great stairway room doesn't put you out of bounds, but does let you sequence break needing height.
"05-02-0C", # Legion's room. If a portrait gets placed here the player won't be able to activate Legion because using a portrait doesn't set the pickup flag Legion checks.
]
locations.select! do |location|
room_str = location[0,8]
!bad_portrait_rooms.include?(room_str)
end
end
if GAME == "ooe" && SKILL_GLOBAL_ID_RANGE.include?(pickup_global_id)
# Don't put progression glyph in certain locations where the player could easily get them early.
locations -= checker.no_glyph_locations
end
locations
end
def get_unplaced_non_progression_pickup(valid_ids: PICKUP_GLOBAL_ID_RANGE.to_a)
valid_possible_items = @unplaced_non_progression_pickups.select do |pickup_global_id|
valid_ids.include?(pickup_global_id)
end
pickup_global_id = valid_possible_items.sample(random: rng)
if pickup_global_id.nil?
# Ran out of unplaced pickups, so place a duplicate instead.
@unplaced_non_progression_pickups += all_non_progression_pickups().select do |pickup_global_id|
valid_ids.include?(pickup_global_id)
end
@unplaced_non_progression_pickups -= checker.current_items
# If a glyph has already been placed as an event glyph, do not place it again somewhere.
# If the player gets one from a glyph statue first, then the one in the event/puzzle won't appear.
@unplaced_non_progression_pickups -= @glyphs_placed_as_event_glyphs
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
@unplaced_non_progression_pickups.delete(pickup_global_id)
@used_non_progression_pickups << pickup_global_id
return pickup_global_id
end
def get_unplaced_non_progression_item
return get_unplaced_non_progression_pickup(valid_ids: ITEM_GLOBAL_ID_RANGE.to_a)
end
def get_unplaced_non_progression_item_that_can_be_an_arm_shifted_immediate
valid_ids = ITEM_GLOBAL_ID_RANGE.to_a
valid_ids.select!{|item_id| game.fs.check_integer_can_be_an_arm_shifted_immediate?(item_id+1)}
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
def get_unplaced_non_progression_skill
return get_unplaced_non_progression_pickup(valid_ids: SKILL_GLOBAL_ID_RANGE.to_a)
end
def get_unplaced_non_progression_item_except_ooe_relics
valid_ids = ITEM_GLOBAL_ID_RANGE.to_a
if GAME == "ooe"
valid_ids -= (0x6F..0x74).to_a
end
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
def get_unplaced_non_progression_projectile_glyph
projectile_glyph_ids = (0x16..0x18).to_a + (0x1C..0x32).to_a + (0x34..0x36).to_a
return get_unplaced_non_progression_pickup(valid_ids: projectile_glyph_ids)
end
def get_unplaced_non_progression_pickup_for_enemy_drop
valid_ids = PICKUP_GLOBAL_ID_RANGE.to_a - ITEMS_WITH_OP_HARDCODED_EFFECT
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
def get_unplaced_non_progression_item_for_enemy_drop
valid_ids = ITEM_GLOBAL_ID_RANGE.to_a - ITEMS_WITH_OP_HARDCODED_EFFECT
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
def get_unplaced_non_progression_item_except_ooe_relics_for_enemy_drop
valid_ids = ITEM_GLOBAL_ID_RANGE.to_a - ITEMS_WITH_OP_HARDCODED_EFFECT
if GAME == "ooe"
valid_ids -= (0x6F..0x74).to_a
end
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
def get_entity_by_location_str(location)
location =~ /^(\h\h)-(\h\h)-(\h\h)_(\h+)$/
area_index, sector_index, room_index, entity_index = $1.to_i(16), $2.to_i(16), $3.to_i(16), $4.to_i(16)
room = game.areas[area_index].sectors[sector_index].rooms[room_index]
entity = room.entities[entity_index]
return entity
end
def change_entity_location_to_pickup_global_id(location, pickup_global_id)
entity = get_entity_by_location_str(location)
if checker.event_locations.include?(location) || checker.easter_egg_locations.include?(location)
# Event with a hardcoded item/glyph.
change_hardcoded_event_pickup(entity, pickup_global_id)
return
end
if GAME == "ooe" && location == "08-02-06_01" # Strength Ring blue chest spawned by the searchlights after you kill the Tin Man
if entity.var_a != 2
raise "Searchlights are not of type 2 (Tin Man spawn)"
end
game.fs.replace_arm_shifted_immediate_integer(0x022A194C, pickup_global_id+1)
elsif RANDOMIZABLE_VILLAGER_NAMES.include?(pickup_global_id)
# Villager
if GAME != "ooe"
raise "Tried to place villager in #{GAME}"
end
room_str = location[0,8]
@rooms_that_already_have_an_event << room_str
entity.type = 2
entity.subtype = 0x89
entity.var_a = VILLAGER_NAME_TO_EVENT_FLAG[pickup_global_id]
entity.var_b = 0
entity.write_to_rom()
if pickup_global_id == :villageranna
# Anna must have Tom in her room, or her event will crash the game.
room = entity.room
cat = Entity.new(room, room.fs)
cat.x_pos = entity.x_pos
cat.y_pos = entity.y_pos
cat.type = 2
cat.subtype = 0x3F
cat.var_a = 3
cat.var_b = 1
room.entities << cat
room.write_entities_to_rom()
# Remove the Tom in Anna's original room since he's not needed there.
original_cat = game.areas[7].sectors[0].rooms[6].entities[2]
original_cat.type = 0
original_cat.write_to_rom()
end
elsif PORTRAIT_NAMES.include?(pickup_global_id)
# Portrait
if GAME != "por"
raise "Tried to place portrait in #{GAME}"
end
portrait_data = PORTRAIT_NAME_TO_DATA[pickup_global_id]
entity.type = 2
entity.subtype = portrait_data[:subtype]
entity.var_a = portrait_data[:var_a]
entity.var_b = portrait_data[:var_b]
# Move the portrait to a short distance above the closest floor so it looks good and is enterable.
coll = RoomCollision.new(entity.room, game.fs)
floor_y = coll.get_floor_y(entity, allow_jumpthrough: true)
entity_original_y_pos = entity.y_pos
entity.y_pos = floor_y - 0x50 # Portraits should float 5 tiles off the ground.
entity.write_to_rom()
curr_area_index = entity.room.area_index
curr_sector_index = entity.room.sector_index
curr_room_index = entity.room.room_index
# Find the return portrait.
dest_area_index = entity.var_a
dest_sector_index = (entity.var_b & 0x3C0) >> 6
dest_room_index = entity.var_b & 0x3F
dest_room = game.areas[dest_area_index].sectors[dest_sector_index].rooms[dest_room_index]
dest_portrait = dest_room.entities.find{|entity| entity.is_special_object? && [0x1A, 0x76, 0x86, 0x87].include?(entity.subtype)}
return_portraits = [dest_portrait]
# Update the list of x/y positions the player returns at in the por_distinct_return_portrait_positions patch.
return_x = entity.x_pos
return_y = floor_y
game.fs.write(0x02309010+dest_area_index*4, [return_x, return_y].pack("vv"))
# If there's a small breakable wall containing this portrait we remove it.
# Not only does the breakable wall not hide the portrait, but when the player returns they would be put out of bounds by it.
breakable_wall_x_range = (entity.x_pos-8..entity.x_pos+8)
breakable_wall_y_range = (entity_original_y_pos-8..entity_original_y_pos+8)
breakable_wall_entity = entity.room.entities.find do |e|
e.is_special_object? && e.subtype == 0x3B && breakable_wall_x_range.include?(e.x_pos) && breakable_wall_y_range.include?(e.y_pos)
end
if breakable_wall_entity
breakable_wall_entity.type = 0
breakable_wall_entity.write_to_rom()
end
# Also update the bonus return portrait at the end of some areas.
case dest_area_index
when 2 # 13th Street
return_portraits << game.entity_by_str("02-02-16_02")
when 4 # Forgotten City
return_portraits << game.entity_by_str("04-01-07_01")
when 6 # Burnt Paradise
return_portraits << game.entity_by_str("06-00-06_03")
when 8 # Dark Academy
return_portraits << game.entity_by_str("08-00-08_04")
end
return_portraits.each do |return_portrait|
return_portrait.var_a = curr_area_index
return_portrait.var_b = ((curr_sector_index & 0xF) << 6) | (curr_room_index & 0x3F)
return_portrait.subtype = case curr_area_index
when 1, 3, 5, 7 # City of Haze, Sandy Grave, Nation of Fools, or Forest of Doom.
0x1A
when 2, 4, 6, 8 # 13th Street, Forgotten City, Burnt Paradise, or Dark Academy.
0x76
when 0, 9 # Dracula's Castle or Nest of Evil.
if [2, 4, 6, 8].include?(dest_area_index)
# Use the alt portrait frame when returning to Dracula's Castle from 13th Street, Forgotten City, Burnt Paradise, or Dark Academy.
0x87
else
0x86
end
else
puts "Unknown area to portrait into: %02X" % curr_area_index
end
# Set highest bit of var B to indicate that this is a return portrait to the por_distinct_return_portrait_positions patch.
return_portrait.var_b = 0x8000 | return_portrait.var_b
return_portrait.write_to_rom()
if room_rando?
# Tell the room rando logic about this return portrait.
checker.add_return_portrait(return_portrait.room.room_str, location)
end
end
if dest_area_index == 7 # Forest of Doom
# Remove the event from the original Forest of Doom portrait room since the portrait is no longer there.
forest_event = game.entity_by_str("00-08-01_03")
forest_event.type = 0
forest_event.write_to_rom()
end
elsif entity.type == 1
# Boss
item_type, item_index = game.get_item_type_and_index_by_global_id(pickup_global_id)
if !PICKUP_SUBTYPES_FOR_SKILLS.include?(item_type)
raise "Can't make boss drop required item"
end
if GAME == "dos" && entity.room.sector_index == 9 && entity.room.room_index == 1
# Aguni. He's not placed in the room so we hardcode him.
enemy_dna = game.enemy_dnas[0x70]
else
enemy_dna = game.enemy_dnas[entity.subtype]
end
case GAME
when "dos"
enemy_dna["Soul"] = item_index
when "ooe"
enemy_dna["Glyph"] = pickup_global_id + 1
else
raise "Boss soul randomizer is bugged for #{LONG_GAME_NAME}."
end
enemy_dna.write_to_rom()
elsif GAME == "dos" || GAME == "por"
if GAME == "por" && location == "05-02-0C_01"
# Cog's location. We always make this location use pickup flag 0x10 since Legion is hardcoded to check that flag, not whether you own the cog.
pickup_flag = 0x10
is_cog = true
else
pickup_flag = get_unused_pickup_flag_for_entity(entity)
is_cog = false
end
if pickup_global_id == :money
if entity.is_hidden_pickup? || is_cog || rng.rand <= 0.80
# 80% chance to be a money bag
# Hidden pickups have to be a bag since chests can't be hidden in a wall.
# The cog location has to be a bag since chests can't have a pickup flag so they wouldn't be able to activate legion.
if entity.is_hidden_pickup?
entity.type = 7
else
entity.type = 4
end
entity.subtype = 1
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = rng.rand(4..6) # 500G, 1000G, 2000G
else
# 20% chance to be a money chest
entity.type = 2
entity.subtype = 1
if GAME == "dos"
entity.var_a = 0x10
else
entity.var_a = [0xE, 0xF, 0x12].sample(random: rng)
end
# We didn't use the pickup flag, so put it back
@unused_pickup_flags << pickup_flag
end
entity.write_to_rom()
return
end
# Make sure Chaos/Magus Ring isn't easily available.
if GAME == "dos" && pickup_global_id == 0xCD # Chaos Ring
entity.type = 2
entity.subtype = 0x4C # All-souls-owned item
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = pickup_global_id + 1
entity.write_to_rom()
return
elsif GAME == "por" && pickup_global_id == 0x12C # Magus Ring
entity.type = 6 # All-quests-complete item
entity.subtype = 7
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = 6
entity.write_to_rom()
return
end
item_type, item_index = game.get_item_type_and_index_by_global_id(pickup_global_id)
if PICKUP_SUBTYPES_FOR_SKILLS.include?(item_type)
case GAME
when "dos"
# Soul candle
entity.type = 2
entity.subtype = 1
entity.var_a = 0
entity.var_b = item_index
# We didn't use the pickup flag, so put it back
@unused_pickup_flags << pickup_flag
when "por"
# Skill
if entity.is_hidden_pickup?
entity.type = 7
else
entity.type = 4
end
entity.subtype = item_type
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = item_index
end
else
# Item
if entity.is_hidden_pickup?
entity.type = 7
else
entity.type = 4
end
entity.subtype = item_type
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = item_index
end
entity.write_to_rom()
elsif GAME == "ooe"
pickup_flag = get_unused_pickup_flag_for_entity(entity)
if entity.is_glyph? && !entity.is_hidden_pickup?
entity.y_pos += 0x20
end
if pickup_global_id == :money
if entity.is_hidden_pickup?
entity.type = 7
else
entity.type = 4
end
entity.subtype = 1
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = rng.rand(4..6) # 500G, 1000G, 2000G
entity.write_to_rom()
return
end
if (0x6F..0x74).include?(pickup_global_id)
# Relic. Must go in a chest, if you leave it lying on the ground it won't autoequip.
entity.type = 2
entity.subtype = 0x16
entity.var_a = pickup_global_id + 1
entity.var_b = pickup_flag
use_pickup_flag(pickup_flag)
entity.write_to_rom()
return
end
if pickup_global_id >= 0x6F
# Item
if entity.is_hidden_pickup?
entity.type = 7
entity.subtype = 0xFF
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = pickup_global_id + 1
else
case rng.rand
when 0.00..0.70
# 70% chance for a red chest
entity.type = 2
entity.subtype = 0x16
entity.var_a = pickup_global_id + 1
entity.var_b = pickup_flag
use_pickup_flag(pickup_flag)
when 0.70..0.95
# 15% chance for an item on the ground
entity.type = 4
entity.subtype = 0xFF
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = pickup_global_id + 1
else
# 5% chance for a hidden blue chest
entity.type = 2
entity.subtype = 0x17
entity.var_a = pickup_global_id + 1
entity.var_b = pickup_flag
use_pickup_flag(pickup_flag)
end
end
else
# Glyph
if entity.is_hidden_pickup?
entity.type = 7
entity.subtype = 2
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = pickup_global_id + 1
else
puzzle_glyph_ids = [0x1D, 0x1F, 0x20, 0x22, 0x24, 0x26, 0x27, 0x2A, 0x2B, 0x2F, 0x30, 0x31, 0x32, 0x46, 0x4E]
if puzzle_glyph_ids.include?(pickup_global_id)
# Free glyph
entity.type = 4
entity.subtype = 2
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = pickup_global_id + 1
else
# Glyph statue
entity.type = 2
entity.subtype = 2
entity.var_a = 0
entity.var_b = pickup_global_id + 1
# We didn't use the pickup flag, so put it back
@unused_pickup_flags << pickup_flag
end
end
end
if entity.is_glyph? && !entity.is_hidden_pickup?
entity.y_pos -= 0x20
end
entity.write_to_rom()
end
end
def remove_inaccessible_items(inaccessible_remaining_locations)
inaccessible_remaining_locations.each do |location|
entity = get_entity_by_location_str(location)
if checker.event_locations.include?(location) || entity.type == 1
# Don't delete inaccessible events/bosses, just in case.
next
end
entity.type = 0
entity.write_to_rom()
end
end
def get_unused_pickup_flag_for_entity(entity)
if entity.is_item_chest?
pickup_flag = entity.var_b
elsif entity.is_pickup?
pickup_flag = entity.var_a
elsif GAME == "dos" && entity.is_special_object? && entity.subtype == 0x4D # Easter egg item
pickup_flag = entity.var_b
elsif GAME == "dos" && entity.is_special_object? && entity.subtype == 0x4C # All-souls-obtained item
pickup_flag = entity.var_a
end
if GAME == "ooe" && (0..0x51).include?(pickup_flag)
# In OoE, these pickup flags are used by glyph statues automatically and we can't control those.
# Therefore we need to reassign pickups that were free glyphs in the original game a new pickup flag, so it doesn't conflict with where those glyphs (Rapidus Fio and Volaticus) got moved to when randomized.
pickup_flag = nil
end
if pickup_flag.nil? || @used_pickup_flags.include?(pickup_flag)
pickup_flag = @unused_pickup_flags.pop()
if pickup_flag.nil?
raise "No pickup flag for this item, this error shouldn't happen"
end
end
return pickup_flag
end
def get_unused_pickup_flag()
pickup_flag = @unused_pickup_flags.pop()
if pickup_flag.nil?
raise "No pickup flag for this item, this error shouldn't happen"
end
return pickup_flag
end
def use_pickup_flag(pickup_flag)
@used_pickup_flags << pickup_flag
@unused_pickup_flags -= @used_pickup_flags
end
def get_entity_skill_drop_by_entity_location(location)
entity = get_entity_by_location_str(location)
if entity.type != 1
raise "Not an enemy: #{location}"
end
if GAME == "dos" && entity.room.sector_index == 9 && entity.room.room_index == 1
# Aguni. He's not placed in the room so we hardcode him.
enemy_dna = game.enemy_dnas[0x70]
else
enemy_dna = game.enemy_dnas[entity.subtype]
end
case GAME
when "dos"
skill_local_id = enemy_dna["Soul"]
when "ooe"
skill_local_id = enemy_dna["Glyph"] - 1
else
raise "Boss soul randomizer is bugged for #{LONG_GAME_NAME}."
end
skill_global_id = skill_local_id + SKILL_GLOBAL_ID_RANGE.begin
return skill_global_id
end
def get_villager_name_by_entity_location(location)
entity = get_entity_by_location_str(location)
if GAME == "ooe" && entity.type == 2 && entity.subtype == 0x89
villager_name = VILLAGER_NAME_TO_EVENT_FLAG.invert[entity.var_a]
return villager_name
else
raise "Not a villager: #{location}"
end
end
def get_portrait_name_by_entity_location(location)
entity = get_entity_by_location_str(location)
if GAME == "por" && entity.is_special_object? && [0x1A, 0x76, 0x86, 0x87].include?(entity.subtype)
portrait_name = AREA_INDEX_TO_PORTRAIT_NAME[entity.var_a]
return portrait_name
else
raise "Not a portrait: #{location}"
end
end
def change_hardcoded_event_pickup(event_entity, pickup_global_id)
case GAME
when "dos"
dos_change_hardcoded_event_pickup(event_entity, pickup_global_id)
when "por"
por_change_hardcoded_event_pickup(event_entity, pickup_global_id)
when "ooe"
ooe_change_hardcoded_event_pickup(event_entity, pickup_global_id)
end
end
def dos_change_hardcoded_event_pickup(event_entity, pickup_global_id)
event_entity.room.sector.load_necessary_overlay()
if event_entity.subtype == 0x65 # Mina's Talisman
item_type, item_index = game.get_item_type_and_index_by_global_id(pickup_global_id)
if (0x3D..0x41).include?(pickup_global_id)
# Magic seal. These need to call a different function to be properly given.
seal_index = pickup_global_id - 0x3D
# Seal given when watching the event
game.fs.write(0x021CB9F4, [seal_index].pack("C"))
game.fs.write(0x021CB9FC, [0xEB006ECF].pack("V")) # Call func 021E7540
# Seal given when skipping the event
game.fs.write(0x021CBC14, [seal_index].pack("C"))
game.fs.write(0x021CBC1C, [0xEB006E47].pack("V")) # Call func 021E7540
else
# Regular item.
# Item given when watching the event
game.fs.write(0x021CB9F4, [item_type].pack("C"))
game.fs.write(0x021CB9F8, [item_index].pack("C"))
# Item given when skipping the event
game.fs.write(0x021CBC14, [item_type].pack("C"))
game.fs.write(0x021CBC18, [item_index].pack("C"))
end
# Item name shown in the corner of the screen when watching the event.
game.fs.write(0x021CBA08, [item_type].pack("C"))
game.fs.write(0x021CBA0C, [item_index].pack("C"))
# Also display the item's name in the corner when skipping the event.
# We add a few new lines of code in free space for this.
code = [0xE3A00000, 0xE3A010F0, 0xEBFDB6FD, 0xE1A00005, 0xEA042E64]
game.fs.write(0x020C027C, code.pack("V*"))
game.fs.write(0x020C027C, [pickup_global_id+1].pack("C"))
game.fs.write(0x021CBC20, [0xEAFBD195].pack("V"))
elsif event_entity.subtype == 0x4D # Easter egg item
# Change what item is actually placed into your inventory when you get the easter egg.
easter_egg_index = event_entity.var_a
game.fs.write(0x0222BE34 + easter_egg_index*0xC, [pickup_global_id+1].pack("v"))
# Update the pickup flag.
pickup_flag = get_unused_pickup_flag_for_entity(event_entity)
event_entity.var_b = pickup_flag
use_pickup_flag(pickup_flag)
# Make the easter egg special object use the same palette list as actual item icons, since that gives access to all 3 icon palettes, while the actual object's palette only has the first.
sprite_info = SpecialObjectType.new(0x4D, game.fs).extract_gfx_and_palette_and_sprite_from_create_code
item = game.items[pickup_global_id]
icon_palette_pointer = 0x022C4684
game.fs.write(0x021AF5CC, [icon_palette_pointer].pack("V"))
icon_palette_index = (item["Icon"] & 0xFF00) >> 8
sprite = sprite_info.sprite
sprite.frames[easter_egg_index].parts.first.palette_index = icon_palette_index
sprite.write_to_rom()
# Now update the actual item visual on the object's GFX page so it visually shows the correct item.
sprite_info = SpecialObjectType.new(0x4D, game.fs).extract_gfx_and_palette_and_sprite_from_create_code # We extract sprite info again to get the updated palette pointer after we changed it.
gfx = sprite_info.gfx_pages.first
palettes = renderer.generate_palettes(sprite_info.palette_pointer, 16)
chunky_image = renderer.render_gfx_page(gfx.file, palettes[icon_palette_index], gfx.canvas_width)
new_icon = renderer.render_icon_by_item(item)
x_offset = 16*easter_egg_index
y_offset = 0
chunky_image.replace!(new_icon, x_offset, y_offset)
renderer.save_gfx_page(chunky_image, gfx, sprite_info.palette_pointer, 16, icon_palette_index)
end
end
def por_change_hardcoded_event_pickup(event_entity, pickup_global_id)
event_entity.room.sector.load_necessary_overlay()
end
def ooe_change_hardcoded_event_pickup(event_entity, pickup_global_id)
event_entity.room.sector.load_necessary_overlay()
@glyphs_placed_as_event_glyphs << pickup_global_id
if event_entity.subtype == 0x8A # Magnes
# Get rid of the event, turn it into a normal free glyph
# We can't keep the event because it automatically equips Magnes even if the glyph it gives is not Magnes.
# Changing what it equips would just make the event not work right, so we may as well remove it.
pickup_flag = get_unused_pickup_flag()
event_entity.type = 4
event_entity.subtype = 2
event_entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
event_entity.var_b = pickup_global_id + 1
event_entity.x_pos = 0x80
event_entity.y_pos = 0x2B0
event_entity.write_to_rom()
elsif event_entity.subtype == 0x69 # Dominus Hatred
game.fs.write(0x02230A7C, [pickup_global_id+1].pack("C"))
game.fs.write(0x022C25D8, [pickup_global_id+1].pack("C"))
elsif event_entity.subtype == 0x6F # Dominus Anger
game.fs.write(0x02230A84, [pickup_global_id+1].pack("C"))
game.fs.write(0x022C25DC, [pickup_global_id+1].pack("C"))
elsif event_entity.subtype == 0x81 # Cerberus
# Get rid of the event, turn it into a normal free glyph
# We can't keep the event because it has special programming to always spawn them in order even if you get to the locations out of order.
pickup_flag = get_unused_pickup_flag()
event_entity.type = 4
event_entity.subtype = 2
event_entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
event_entity.var_b = pickup_global_id + 1
event_entity.x_pos = 0x80
event_entity.y_pos = 0x60
event_entity.write_to_rom()
other_cerberus_events = event_entity.room.entities.select{|e| e.is_special_object? && [0x82, 0x83].include?(e.subtype)}
other_cerberus_events.each do |event|
# Delete these others, we don't want the events.
event.type = 0
event.write_to_rom()
end
else
glyph_id_location, pickup_flag_read_location, pickup_flag_write_location, second_pickup_flag_read_location = case event_entity.subtype
when 0x2F # Luminatio
[0x022C4894, 0x022C483C, 0x022C4888]
when 0x3B # Pneuma
[0x022C28E8, 0x022C2880, 0x022C28DC, 0x022C279C]
when 0x44 # Lapiste
[0x022C2CB0, 0x022C2C24, 0x022C2CA0]
when 0x54 # Vol Umbra
[0x022C2FBC, 0x022C2F70, 0x022C2FB4]
when 0x4C # Vol Fulgur
[0x022C2490, 0x022C2404, 0x022C2480]
when 0x52 # Vol Ignis
[0x0221F1A0, 0x0221F148, 0x0221F194]
when 0x47 # Vol Grando
[0x022C230C, 0x022C2584, 0x022C22FC]
when 0x40 # Cubus
[0x022C31DC]
when 0x53 # Morbus
[0x022C2354, 0x022C2318, 0x022C2344]
when 0x76 # Dominus Agony
[0x022C25BC]
else
return
end
# What glyph is actually spawned.
game.fs.write(glyph_id_location, [pickup_global_id+1].pack("C"))
if pickup_flag_write_location
# The pickup flag set when you absorb the glyph.
pickup_flag = pickup_global_id+2
game.fs.write(pickup_flag_write_location, [pickup_flag].pack("C"))
end
if pickup_flag_read_location
# The pickup flag read to decide whether you've completed this puzzle yet or not.
# This is determined by two lines of code:
# The first loads the word in the bitfield containing the correct bit (0x20 bits in each word):
pickup_flag_word_offset = 0x40 + 4*(pickup_flag/0x20)
game.fs.write(pickup_flag_read_location, [pickup_flag_word_offset].pack("C"))
game.fs.write(second_pickup_flag_read_location, [pickup_flag_word_offset].pack("C")) if second_pickup_flag_read_location
# The second does a tst on the exact bit within that word:
pickup_flag_bit_index = pickup_flag % 0x20
game.fs.replace_hardcoded_bit_constant(pickup_flag_read_location+4, pickup_flag_bit_index)
game.fs.replace_hardcoded_bit_constant(second_pickup_flag_read_location+4, pickup_flag_bit_index) if second_pickup_flag_read_location
end
end
end
def set_world_map_exit_destination_area(world_map_exit_door_str, entrance_type)
room_str = world_map_exit_door_str[0,8]
area_exit_entity_str = room_str + "_00"
area_exit = game.entity_by_str(area_exit_entity_str)
if entrance_type >= 0
area_exit.var_a = entrance_type
area_exit.var_b = 0
else # Negative value indicates var B should be used instead of var A.
area_exit.var_a = 0
area_exit.var_b = -entrance_type
end
area_exit.write_to_rom()
entrance_door_str = WORLD_MAP_ENTRANCES[entrance_type]
puts "Setting world map unlock: #{world_map_exit_door_str} -> #{entrance_door_str}"
checker.set_world_map_exit_destination_area(world_map_exit_door_str, entrance_door_str)
if world_map_exit_door_str == "0A-00-0A_000"
# For now we sync up the two Tymeo exits to always unlock the same area like in vanilla.
# In the future consider randomizing these seperately.
set_world_map_exit_destination_area("0A-00-13_000", entrance_type)
end
end
end
Map rando debug code: Output all area maps
module PickupRandomizer
VILLAGER_NAME_TO_EVENT_FLAG = {
:villagerjacob => 0x2A,
:villagerabram => 0x2D,
:villageraeon => 0x3C,
:villagereugen => 0x38,
:villagermonica => 0x4F,
:villagerlaura => 0x32,
:villagermarcel => 0x40,
:villagerserge => 0x47,
:villageranna => 0x4B,
:villagerdaniela => 0x57,
:villageririna => 0x53,
}
RANDOMIZABLE_VILLAGER_NAMES = VILLAGER_NAME_TO_EVENT_FLAG.keys
PORTRAIT_NAME_TO_DATA = {
:portraitcityofhaze => {subtype: 0x1A, var_a: 1, var_b: 0x1A},
:portraitsandygrave => {subtype: 0x1A, var_a: 3, var_b: 0},
:portraitnationoffools => {subtype: 0x1A, var_a: 5, var_b: 0x21},
:portraitforestofdoom => {subtype: 0x1A, var_a: 7, var_b: 0},
:portraitdarkacademy => {subtype: 0x76, var_a: 8, var_b: 0x46},
:portraitburntparadise => {subtype: 0x76, var_a: 6, var_b: 0x20},
:portraitforgottencity => {subtype: 0x76, var_a: 4, var_b: 0},
:portrait13thstreet => {subtype: 0x76, var_a: 2, var_b: 7},
:portraitnestofevil => {subtype: 0x86, var_a: 9, var_b: 0},
}
PORTRAIT_NAMES = PORTRAIT_NAME_TO_DATA.keys
AREA_INDEX_TO_PORTRAIT_NAME = PORTRAIT_NAME_TO_DATA.map do |name, data|
[data[:var_a], name]
end.to_h
PORTRAIT_NAME_TO_AREA_INDEX = PORTRAIT_NAME_TO_DATA.map do |name, data|
[name, data[:var_a]]
end.to_h
PORTRAIT_NAME_TO_DEFAULT_ENTITY_LOCATION = {
:portraitcityofhaze => "00-01-00_00",
:portraitsandygrave => "00-04-12_00",
:portraitnationoffools => "00-06-01_00",
:portraitforestofdoom => "00-08-01_02",
:portraitdarkacademy => "00-0B-00_04",
:portraitburntparadise => "00-0B-00_03",
:portraitforgottencity => "00-0B-00_01",
:portrait13thstreet => "00-0B-00_02",
:portraitnestofevil => "00-00-05_00",
}
WORLD_MAP_EXITS = [
#"00-02-1B_000", # Exit from the castle. Don't randomize this.
"04-00-03_000",
"05-00-00_000",
"06-00-0A_000",
"06-01-00_000",
"07-00-0E_000",
"08-02-07_000",
"09-00-07_000",
"0A-00-0A_000",
#"0A-00-13_000", # Alternate exit from Tymeo. Not randomized separately from the other one.
"0B-00-10_000",
"0D-00-09_000",
"0F-00-00_000",
]
WORLD_MAP_ENTRANCES = {
#3 => "03-00-00_000", # Training Hall. Not randomized because we don't randomize the castle exit.
6 => "06-00-00_000",
8 => "08-00-00_000",
9 => "09-00-00_000", # Lighthouse. My logic has a special case here due to the spikes but it can still be randomized.
0xA => "0A-00-00_000",
0xB => "0B-00-00_000",
0xD => "0D-00-00_000",
0xE => "0E-00-0C_000",
0xF => "0F-00-08_000",
0x10 => "10-01-06_000",
0x11 => "11-00-00_000",
-1 => "06-01-09_000", # Lower Kalidus entrance.
#-2 => "0C-00-00_000", # Large Cavern. Not randomized because we don't randomize the castle exit.
}
WORLD_MAP_ENTRANCES_THAT_LEAD_TO_A_WORLD_MAP_EXIT = [
# Which entrances to prioritize placing first to avoid running out of accessible exits.
6,
8,
9,
0xA,
0xB,
0xD,
0xF,
]
def randomize_pickups_completably(&block)
spoiler_log.puts "Randomizing pickups:"
case GAME
when "dos"
checker.add_item(0x43) # knife
checker.add_item(0x91) # casual clothes
checker.add_item(0x3D) # seal 1
if options[:unlock_boss_doors]
checker.add_item(0x3E) # seal 2
checker.add_item(0x3F) # seal 3
checker.add_item(0x40) # seal 4
checker.add_item(0x41) # seal 5
end
when "por"
checker.add_item(0x61) # starting vampire killer
checker.add_item(0x6C) # encyclopedia
checker.add_item(0xAA) # casual clothes
checker.add_item(0x1AD) # call cube
if options[:dont_randomize_change_cube]
checker.add_item(0x1AC) # change cube
unless room_rando? # In room rando this item is placed in the player's starting room instead.
change_entity_location_to_pickup_global_id("00-00-01_01", 0x1AC)
end
else
# If the player doesn't start with change cube, give them skill cube instead so they can still use Charlotte's spells.
checker.add_item(0x1AE) # skill cube
unless room_rando? # In room rando this item is placed in the player's starting room instead.
change_entity_location_to_pickup_global_id("00-00-01_01", 0x1AE)
end
end
# In the corridor where Behemoth chases you, change the code of the platform to not permanently disappear.
# This is so the player can't get stuck if they miss an important item up there.
game.fs.load_overlay(79)
game.fs.write(0x022EC638, [0xEA000003].pack("V"))
# Room in Sandy Grave that has two overlapping Charm Necklaces.
# We don't want these to overlap as the player could easily think it's just one item and not see the one beneath it.
# Move one a bit to the left and the other a bit to the right. Also give one a different pickup flag.
item_a = game.areas[3].sectors[0].rooms[0x13].entities[0]
item_b = game.areas[3].sectors[0].rooms[0x13].entities[1]
item_a.x_pos = 0x120
item_b.x_pos = 0x140
pickup_flag = get_unused_pickup_flag()
item_b.var_a = pickup_flag
use_pickup_flag(pickup_flag)
item_a.write_to_rom()
item_b.write_to_rom()
when "ooe"
checker.add_item(0xE6) # casual clothes
checker.add_item(0x6F) # lizard tail
checker.add_item(0x72) # glyph union
checker.add_item(0x1E) # torpor. the player will get enough of these as it is
# Give the player the glyph sleeve in Ecclesia like in hard mode.
# To do this just get rid of the entity hider that hides it on normal mode.
entity_hider = game.areas[2].sectors[0].rooms[4].entities[6]
entity_hider.type = 0
entity_hider.write_to_rom()
# But we also need to give the chest a unique flag, because it shares the flag with the one from Minera in normal mode.
sleeve_chest = game.areas[2].sectors[0].rooms[4].entities[7]
pickup_flag = get_unused_pickup_flag()
sleeve_chest.var_b = pickup_flag
use_pickup_flag(pickup_flag)
sleeve_chest.write_to_rom()
# We also make sure the chest in Minera appears even on hard mode.
entity_hider = game.areas[8].sectors[2].rooms[7].entities[1]
entity_hider.type = 0
entity_hider.write_to_rom()
checker.add_item(0x73) # glyph sleeve
# Room in the Final Approach that has two overlapping chests both containing diamonds.
# We don't want these to overlap as the player could easily think it's just one item and not see the one beneath it.
# Move one a bit to the left and the other a bit to the right. Also give one a different pickup flag.
chest_a = game.areas[0].sectors[0xA].rooms[0xB].entities[1]
chest_b = game.areas[0].sectors[0xA].rooms[0xB].entities[2]
chest_a.x_pos = 0xE0
chest_b.x_pos = 0x130
pickup_flag = get_unused_pickup_flag()
chest_b.var_b = pickup_flag
use_pickup_flag(pickup_flag)
chest_a.write_to_rom()
chest_b.write_to_rom()
end
total_progression_pickups = checker.all_progression_pickups.length
place_progression_pickups() do |progression_pickups_placed|
percent_done = progression_pickups_placed.to_f / total_progression_pickups
yield percent_done
end
if !checker.game_beatable?
#if options[:randomize_rooms_map_friendly]
# # When debugging logic errors in map rando, output a list of what room strings were accessible at the end.
# File.open("./logs/accessed rooms debug #{GAME} #{seed}.txt", "w") do |f|
# for room_str in @rooms_by_progression_order_accessed
# f.puts(room_str)
# end
# end
#
# # And also output an image of the map with accessible rooms highlighted in red.
# unique_rooms_accessed = @rooms_by_progression_order_accessed.flatten.uniq
# game.areas.each_index do |area_index|
# map = game.get_map(area_index, 0)
# for tile in map.tiles
# if tile.sector_index.nil? || tile.room_index.nil?
# next
# end
# room_str_for_tile = "%02X-%02X-%02X" % [area_index, tile.sector_index, tile.room_index]
# if unique_rooms_accessed.include?(room_str_for_tile)
# tile.is_save = true
# tile.is_warp = false
# tile.is_entrance = false
# else
# tile.is_save = false
# tile.is_warp = false
# tile.is_entrance = false
# end
# end
# hardcoded_transition_rooms = (GAME == "dos" ? @transition_rooms : [])
# filename = "./logs/map debug #{GAME} area %02X #{seed}.png" % area_index
# renderer.render_map(map, scale=3, hardcoded_transition_rooms=hardcoded_transition_rooms).save(filename)
# end
#end
item_names = checker.current_items.map do |global_id|
if global_id.is_a?(Symbol)
global_id
else
checker.defs.invert[global_id]
end
end.compact
raise "Bug: Game is not beatable on this seed!\nThis error shouldn't happen.\nSeed: #{@seed}\n\nItems:\n#{item_names.join(", ")}"
end
if GAME == "por" && options[:randomize_portraits]
# Remove the extra portraits at the end of 13th Street, Forgotten City, Burnt Paradise, and Dark Academy.
# (The one return portrait back to where you entered this portrait from is not removed, and is updated elsewhere in the code.)
[
"02-02-16_01",
"02-02-16_03",
"02-02-16_04",
"04-01-07_02",
"04-01-07_03",
"04-01-07_04",
"06-00-06_01",
"06-00-06_02",
"06-00-06_04",
"08-00-08_01",
"08-00-08_02",
"08-00-08_03",
].each do |entity_str|
portrait = game.entity_by_str(entity_str)
portrait.type = 0
portrait.write_to_rom()
end
end
end
def place_progression_pickups(&block)
previous_accessible_locations = []
@locations_randomized_to_have_useful_pickups = []
@rooms_that_already_have_an_event = []
progression_pickups_placed = 0
total_progression_pickups = checker.all_progression_pickups.length
on_leftovers = false
@rooms_by_progression_order_accessed = []
world_map_exits_randomized = []
world_map_entrances_used = []
game.each_room do |room|
room.entities.each do |entity|
if entity.is_special_object? && (0x5F..0x88).include?(entity.subtype)
room_str = "%02X-%02X-%02X" % [room.area_index, room.sector_index, room.room_index]
@rooms_that_already_have_an_event << room_str
break
end
end
end
if GAME == "por" && options[:randomize_starting_room] && options[:randomize_portraits]
starting_portrait_name = AREA_INDEX_TO_PORTRAIT_NAME[@starting_room.area_index]
if starting_portrait_name
# The starting room randomizer started the player in a portrait.
# This is problematic because the portrait randomizer will traditionally never place a portrait back to Dracula's castle, making it inaccessible.
# So we need to place the starting portrait at a random location in Dracula's Castle and register it with the logic.
# First pick a random valid location.
possible_portrait_locations = checker.all_locations.keys
possible_portrait_locations = filter_locations_valid_for_pickup(possible_portrait_locations, starting_portrait_name)
unused_room_strs = @unused_rooms.map{|room| room.room_str}
possible_portrait_locations.reject! do |location|
room_str = location[0,8]
unused_room_strs.include?(room_str)
end
possible_portrait_locations.select! do |location|
area_index = location[0,2].to_i(16)
area_index == 0
end
starting_portrait_location_in_castle = possible_portrait_locations.sample(random: rng)
# Then place the portrait.
change_entity_location_to_pickup_global_id(starting_portrait_location_in_castle, starting_portrait_name)
@locations_randomized_to_have_useful_pickups << starting_portrait_location_in_castle
end
end
verbose = false
# First place progression pickups needed to beat the game.
spoiler_log.puts "Placing main route progression pickups:"
on_first_item = true
while true
case GAME
when "por"
if !checker.current_items.include?(0x1B2) && checker.wind_accessible? && checker.vincent_accessible?
checker.add_item(0x1B2) # give lizard tail if the player has reached wind
end
end
if room_rando?
possible_locations, accessible_doors = checker.get_accessible_locations_and_doors()
accessible_rooms = accessible_doors.map{|door_str| door_str[0,8]}
@rooms_by_progression_order_accessed << accessible_rooms
else
possible_locations = checker.get_accessible_locations()
end
possible_locations -= @locations_randomized_to_have_useful_pickups
puts "Total possible locations: #{possible_locations.size}" if verbose
pickups_by_locations = checker.pickups_by_current_num_locations_they_access()
if starting_portrait_name
# Don't place the starting portrait anywhere, it's already in Dracula's Castle.
pickups_by_locations.delete(starting_portrait_name)
end
if GAME == "por" && options[:randomize_portraits] && (!room_rando? || !options[:rebalance_enemies_in_room_rando])
# If portraits are randomized but we can't rebalance enemies, try to avoid placing late game portraits in the early game.
if progression_pickups_placed < 5
pickups_by_locations_filtered = pickups_by_locations.reject do |pickup, usefulness|
[:portraitdarkacademy, :portraitburntparadise, :portraitforgottencity, :portrait13thstreet].include?(pickup)
end
if pickups_by_locations_filtered.any?
pickups_by_locations = pickups_by_locations_filtered
end
end
end
pickups_by_usefulness = pickups_by_locations.select{|pickup, num_locations| num_locations > 0}
currently_useless_pickups = pickups_by_locations.select{|pickup, num_locations| num_locations == 0}
puts "Num useless pickups: #{currently_useless_pickups.size}" if verbose
placing_currently_useless_pickup = false
if pickups_by_usefulness.any?
max_usefulness = pickups_by_usefulness.values.max
weights = pickups_by_usefulness.map do |pickup, usefulness|
# Weight less useful pickups as being more likely to be chosen.
weight = max_usefulness - usefulness + 1
weight = Math.sqrt(weight)
if checker.preferences[pickup]
weight *= checker.preferences[pickup]
end
weight
end
ps = weights.map{|w| w.to_f / weights.reduce(:+)}
useful_pickups = pickups_by_usefulness.keys
weighted_useful_pickups = useful_pickups.zip(ps).to_h
pickup_global_id = weighted_useful_pickups.max_by{|_, weight| rng.rand ** (1.0 / weight)}.first
weighted_useful_pickups_names = weighted_useful_pickups.map do |global_id, weight|
"%.2f %s" % [weight, checker.defs.invert[global_id]]
end
#puts "Weighted less useful pickups: [" + weighted_useful_pickups_names.join(", ") + "]"
elsif pickups_by_locations.any? && checker.game_beatable?
# The player can access all locations.
# So we just randomly place one progression pickup.
if !on_leftovers
spoiler_log.puts "Placing leftover progression pickups:"
on_leftovers = true
end
pickup_global_id = pickups_by_locations.keys.sample(random: rng)
elsif pickups_by_locations.any?
# No locations can access new areas, but the game isn't beatable yet.
# This means any new areas will need at least two new items to access.
# So just place a random pickup for now.
valid_pickups = pickups_by_locations.keys
if GAME == "ooe" && options[:randomize_villagers]
valid_villagers = valid_pickups & RANDOMIZABLE_VILLAGER_NAMES
if checker.albus_fight_accessible?
if valid_villagers.any?
# Once Albus is accessible, prioritize placing villagers over other pickups.
valid_pickups = valid_villagers
end
else
# Don't start placing villagers until Albus is accessible.
valid_pickups -= RANDOMIZABLE_VILLAGER_NAMES
end
if valid_pickups.empty?
# But if the only things left to place are villagers, we have no choice but to place them before Albus is accessible.
valid_pickups = pickups_by_locations.keys
end
elsif GAME == "dos" && room_rando? && accessible_rooms.include?("00-06-00")
# Player has access to the Subterranean Hell room with the huge spikes.
# To get through this room you need either rahab and bone ark or rahab, puppet master, and skeleton ape.
# The logic can have trouble placing the items necessary to get through this room, since skeleton ape and bone ark are useless everywhere else, and rahab is only useful in a handful of rooms - so if the player doesn't have access to any places that make rahab useful by itself, the randomizer might just try to place every other item, filling up all available item locations, and never place rahab.
# So we add a special case here to 100% guaranteed place rahab (assuming the player has access to under 15 item locations). From there the randomizer can figure out that it should place bone ark or puppet master and skeleton ape.
if valid_pickups.include?(0x145) && possible_locations.length < 15
valid_pickups = [0x145] # Rahab
end
end
pickup_global_id = valid_pickups.sample(random: rng)
placing_currently_useless_pickup = true
puts "Placing currently useless pickup." if verbose
else
# All progression pickups placed.
break
end
pickup_name = checker.defs.invert[pickup_global_id].to_s
puts "Trying to place #{pickup_name}" if verbose
if !options[:randomize_boss_souls]
# If randomize boss souls option is off, don't allow putting random things in these locations.
accessible_unused_boss_locations = possible_locations & checker.enemy_locations
accessible_unused_boss_locations.each do |location|
possible_locations.delete(location)
@locations_randomized_to_have_useful_pickups << location
# Also, give the player what this boss drops so the checker takes this into account.
pickup_global_id = get_entity_skill_drop_by_entity_location(location)
checker.add_item(pickup_global_id)
end
next if accessible_unused_boss_locations.length > 0
end
if !options[:randomize_villagers] && GAME == "ooe"
# If randomize villagers option is off, don't allow putting random things in these locations.
accessible_unused_villager_locations = possible_locations & checker.villager_locations
accessible_unused_villager_locations.each do |location|
possible_locations.delete(location)
@locations_randomized_to_have_useful_pickups << location
# Also, give the player this villager so the checker takes this into account.
villager_name = get_villager_name_by_entity_location(location)
checker.add_item(villager_name)
end
next if accessible_unused_villager_locations.length > 0
end
if !options[:randomize_portraits] && GAME == "por"
# If randomize portraits option is off, don't allow putting random things in these locations.
accessible_unused_portrait_locations = possible_locations & checker.portrait_locations
accessible_unused_portrait_locations -= @portrait_locations_to_remove # Don't count removed portraits in short mode as portrait locations.
accessible_unused_portrait_locations.each do |location|
possible_locations.delete(location)
@locations_randomized_to_have_useful_pickups << location
# Also, give the player this portrait so the checker takes this into account.
portrait_name = get_portrait_name_by_entity_location(location)
checker.add_item(portrait_name)
end
next if accessible_unused_portrait_locations.length > 0
end
if GAME == "ooe" && options[:randomize_world_map_exits] && !options[:open_world_map] && room_rando?
# Randomize world map exits.
unused_accessible_exits = (accessible_doors & WORLD_MAP_EXITS) - world_map_exits_randomized
if unused_accessible_exits.any?
while unused_accessible_exits.any?
world_map_exit = unused_accessible_exits.sample(random: rng)
unused_accessible_exits.delete(world_map_exit)
unused_entrances = WORLD_MAP_ENTRANCES.keys - world_map_entrances_used
possible_entrances = unused_entrances
if unused_accessible_exits.empty?
# We're on the last accessible exit.
# We need to prioritize placing entrances that lead to more exits.
# Otherwise we would exhaust all the remaining exits and the player would have no way to progress.
# (Unless this is the very last exit overall - in that case it's fine that we exhaust the last one.)
possible_entrances_that_lead_to_a_new_exit = unused_entrances & WORLD_MAP_ENTRANCES_THAT_LEAD_TO_A_WORLD_MAP_EXIT
if possible_entrances_that_lead_to_a_new_exit.any?
possible_entrances = possible_entrances_that_lead_to_a_new_exit
end
end
if possible_entrances.empty?
raise "Ran out of world map entrances to make world map exits unlock!"
end
entrance = possible_entrances.sample(random: rng)
set_world_map_exit_destination_area(world_map_exit, entrance)
world_map_exits_randomized << world_map_exit
world_map_entrances_used << entrance
end
next # Redo this progression placement loop with the world map entrances now set.
end
end
new_possible_locations = possible_locations - previous_accessible_locations.flatten
filtered_new_possible_locations = filter_locations_valid_for_pickup(new_possible_locations, pickup_global_id)
puts "Filtered new possible locations: #{filtered_new_possible_locations.size}" if verbose
puts " " + filtered_new_possible_locations.join(", ") if verbose
valid_previous_accessible_regions = previous_accessible_locations.map do |previous_accessible_region|
possible_locations = previous_accessible_region.dup
possible_locations -= @locations_randomized_to_have_useful_pickups
possible_locations = filter_locations_valid_for_pickup(possible_locations, pickup_global_id)
possible_locations = nil if possible_locations.empty?
possible_locations
end.compact
possible_locations_to_choose_from = filtered_new_possible_locations.dup
if placing_currently_useless_pickup
# Place items that don't immediately open up new areas anywhere in the game, with no weighting towards later areas.
valid_accessible_locations = previous_accessible_locations.map do |previous_accessible_region|
possible_locations = previous_accessible_region.dup
possible_locations -= @locations_randomized_to_have_useful_pickups
possible_locations = filter_locations_valid_for_pickup(possible_locations, pickup_global_id)
possible_locations = nil if possible_locations.empty?
possible_locations
end.compact.flatten
valid_accessible_locations += filtered_new_possible_locations
possible_locations_to_choose_from = valid_accessible_locations
elsif filtered_new_possible_locations.empty? && valid_previous_accessible_regions.any?
# No new locations, so select an old location.
if on_leftovers
# Just placing a leftover progression pickup.
# Weighted to be more likely to select locations you got access to later rather than earlier.
i = 1
weights = valid_previous_accessible_regions.map do |region|
# Weight later accessible regions as more likely than earlier accessible regions (exponential)
weight = i**2
i += 1
weight
end
ps = weights.map{|w| w.to_f / weights.reduce(:+)}
weighted_accessible_regions = valid_previous_accessible_regions.zip(ps).to_h
previous_accessible_region = weighted_accessible_regions.max_by{|_, weight| rng.rand ** (1.0 / weight)}.first
possible_locations_to_choose_from = previous_accessible_region
else
# Placing a main route progression pickup, just not one that immediately opens up new areas.
# Always place in the most recent accessible region.
possible_locations_to_choose_from = valid_previous_accessible_regions.last
puts "No new locations, using previous accessible location, total available: #{valid_previous_accessible_regions.last.size}" if verbose
end
elsif filtered_new_possible_locations.empty? && valid_previous_accessible_regions.empty?
# No new locations, but there's no old locations either.
if on_first_item
# If we're placing the very first item yet there's no accessible spots, then the room/map randomizer must have resulted in a bad start.
# So we place the first progression item in the starting room.
entity = @starting_room.add_new_entity()
entity.x_pos = @starting_x_pos
entity.y_pos = @starting_y_pos
@coll = RoomCollision.new(@starting_room, game.fs)
floor_y = coll.get_floor_y(entity, allow_jumpthrough: true)
entity.y_pos = floor_y - 0x18
location = "#{@starting_room.room_str}_%02X" % (@starting_room.entities.length-1)
possible_locations_to_choose_from = [location]
else
possible_locations_to_choose_from = []
end
elsif filtered_new_possible_locations.size <= 5 && valid_previous_accessible_regions.last && valid_previous_accessible_regions.last.size >= 15
# There aren't many new locations unlocked by the last item we placed.
# But there are a lot of other locations unlocked by the one we placed before that.
# So we give it a chance to put it in one of those last spots, instead of the new spots.
# The chance is proportional to how few new locations there are. 1 = 70%, 2 = 60%, 3 = 50%, 4 = 40%, 5 = 30%.
chance = 0.30 + (5-filtered_new_possible_locations.size)*10
if rng.rand() <= chance
possible_locations_to_choose_from = valid_previous_accessible_regions.last
puts "Not many new locations, using previous accessible location, total available: #{valid_previous_accessible_regions.last.size}" if verbose
end
end
previous_accessible_locations << new_possible_locations
if possible_locations_to_choose_from.empty?
item_names = checker.current_items.map do |global_id|
checker.defs.invert[global_id]
end.compact
raise "Bug: Failed to find any spots to place pickup.\nSeed: #{@seed}\n\nItems:\n#{item_names.join(", ")}"
end
#puts "Possible locations: #{possible_locations_to_choose_from.join(", ")}" if verbose
location = possible_locations_to_choose_from.sample(random: rng)
@locations_randomized_to_have_useful_pickups << location
if room_rando?
checker.set_current_location_by_entity(location)
end
if RANDOMIZABLE_VILLAGER_NAMES.include?(pickup_global_id)
# Villager
pickup_str = "villager #{pickup_global_id}"
elsif PORTRAIT_NAMES.include?(pickup_global_id)
# Portrait
pickup_str = "portrait #{pickup_global_id.to_s[8..-1]}" # Add a space between portrait and the area name
else
pickup_name = checker.defs.invert[pickup_global_id].to_s
pickup_str = "pickup %04X (#{pickup_name})" % pickup_global_id
end
location =~ /^(\h\h)-(\h\h)-(\h\h)_(\h+)$/
area_index, sector_index, room_index, entity_index = $1.to_i(16), $2.to_i(16), $3.to_i(16), $4.to_i(16)
if SECTOR_INDEX_TO_SECTOR_NAME[area_index]
area_name = SECTOR_INDEX_TO_SECTOR_NAME[area_index][sector_index]
else
area_name = AREA_INDEX_TO_AREA_NAME[area_index]
end
is_enemy_str = checker.enemy_locations.include?(location) ? " (boss)" : ""
is_event_str = checker.event_locations.include?(location) ? " (event)" : ""
is_easter_egg_str = checker.easter_egg_locations.include?(location) ? " (easter egg)" : ""
is_hidden_str = checker.hidden_locations.include?(location) ? " (hidden)" : ""
is_mirror_str = checker.mirror_locations.include?(location) ? " (mirror)" : ""
spoiler_str = " Placing #{pickup_str} at #{location}#{is_enemy_str}#{is_event_str}#{is_easter_egg_str}#{is_hidden_str}#{is_mirror_str} (#{area_name})"
spoiler_log.puts spoiler_str
puts spoiler_str if verbose
change_entity_location_to_pickup_global_id(location, pickup_global_id)
checker.add_item(pickup_global_id)
on_first_item = false
if room_rando? && GAME == "ooe"
if accessible_doors.include?("01-01-00_000") && !checker.current_items.include?(:villagernikolai)
checker.add_item(:villagernikolai)
end
if accessible_doors.include?("11-00-08_000") && !checker.current_items.include?(:villagergeorge)
checker.add_item(:villagergeorge)
end
end
progression_pickups_placed += 1
yield(progression_pickups_placed)
end
if room_rando? && false
File.open("accessible_doors.txt", "w") do |f|
accessible_doors.each do |accessible_door|
f.puts accessible_door
end
end
end
if GAME == "ooe" && options[:randomize_world_map_exits]
unused_exits = WORLD_MAP_EXITS - world_map_exits_randomized
unused_entrances = WORLD_MAP_ENTRANCES.keys - world_map_entrances_used
puts "Unused world map exits: #{unused_exits.join(", ")}"
puts "Unused world map entrances: #{unused_entrances.join(", ")}"
if unused_exits.any? && unused_entrances.any?
raise "Error: There are unplaced world map exits and entrances:\nExits: #{unused_exits.join(", ")}\nEntrances: #{unused_entrances.join(", ")}"
elsif unused_exits.any?
raise "Error: There are unplaced world map exits: #{unused_exits.join(", ")}"
elsif unused_entrances.any?
raise "Error: There are unplaced world map entrances: #{unused_entrances.join(", ")}"
end
end
spoiler_log.puts "All progression pickups placed successfully."
end
def place_non_progression_pickups
remaining_locations = checker.get_accessible_locations() - @locations_randomized_to_have_useful_pickups
remaining_locations.shuffle!(random: rng)
# In room rando, some items may be unreachable.
# We don't want the player to see these items in a different subroom and think the randomizer is bugged, so we delete them.
inaccessible_remaining_locations = checker.all_locations.keys - @locations_randomized_to_have_useful_pickups - remaining_locations
remove_inaccessible_items(inaccessible_remaining_locations)
if GAME == "ooe"
# Do event glyphs first. This is so they don't reuse a glyph already used by a glyph statue.
# If the player got the one from the glyph statue first then the one in the event/puzzle wouldn't appear, breaking the event/puzzle.
ooe_event_glyph_locations = remaining_locations.select{|location| checker.event_locations.include?(location)}
ooe_event_glyph_locations.each do |location|
pickup_global_id = get_unplaced_non_progression_skill()
change_entity_location_to_pickup_global_id(location, pickup_global_id)
end
remaining_locations -= ooe_event_glyph_locations
end
chaos_ring_placed = false
remaining_locations.each_with_index do |location, i|
if checker.enemy_locations.include?(location)
# Boss
pickup_global_id = get_unplaced_non_progression_skill()
elsif ["dos", "por"].include?(GAME) && (checker.event_locations.include?(location) || checker.easter_egg_locations.include?(location))
# Event item
pickup_global_id = get_unplaced_non_progression_item()
elsif GAME == "ooe" && location == "08-02-06_01"
# Tin man's strength ring blue chest. Can't be a glyph.
pickup_global_id = get_unplaced_non_progression_item_that_can_be_an_arm_shifted_immediate()
elsif GAME == "dos" && checker.mirror_locations.include?(location)
# Soul candles shouldn't be placed in mirrors, as they will appear even outside the mirror.
pickup_global_id = get_unplaced_non_progression_item()
elsif GAME == "dos" && !chaos_ring_placed
pickup_global_id = 0xCD
chaos_ring_placed = true
elsif GAME == "por" && !chaos_ring_placed
pickup_global_id = 0x12C
chaos_ring_placed = true
else
# Pickup
# Select the type of pickup weighed by difficulty options.
weights = {
money: @difficulty_settings[:money_placement_weight],
item: @difficulty_settings[:item_placement_weight],
}
if GAME == "por" || GAME == "ooe"
weights[:max_up] = @difficulty_settings[:max_up_placement_weight]
end
case GAME
when "dos"
weights[:skill] = @difficulty_settings[:soul_candle_placement_weight]
when "por"
weights[:skill] = @difficulty_settings[:por_skill_placement_weight]
when "ooe"
weights[:skill] = @difficulty_settings[:glyph_placement_weight]
end
weighted_pickup_types = {}
weights_sum = weights.values.reduce(:+)
weights.each do |type, weight|
weighted_pickup_types[type] = weight.to_f / weights_sum
end
random_pickup_type = weighted_pickup_types.max_by{|_, weight| rng.rand ** (1.0 / weight)}.first
case random_pickup_type
when :money
pickup_global_id = :money
when :max_up
pickup_global_id = @max_up_items.sample(random: rng)
when :skill
pickup_global_id = get_unplaced_non_progression_skill()
when :item
if checker.hidden_locations.include?(location)
# Don't let relics be inside breakable walls in OoE.
# This is because they need to be inside a chest, and chests can't be hidden.
pickup_global_id = get_unplaced_non_progression_item_except_ooe_relics()
else
pickup_global_id = get_unplaced_non_progression_item()
end
end
end
@used_non_progression_pickups << pickup_global_id
change_entity_location_to_pickup_global_id(location, pickup_global_id)
end
end
def initialize_all_non_progression_pickups
if !@all_non_progression_pickups.nil?
raise "all_non_progression_pickups was initialized too early."
end
@all_non_progression_pickups = begin
all_non_progression_pickups = PICKUP_GLOBAL_ID_RANGE.to_a - checker.all_progression_pickups
all_non_progression_pickups -= NONRANDOMIZABLE_PICKUP_GLOBAL_IDS
all_non_progression_pickups -= @max_up_items
if room_rando? || (GAME == "por" && options[:randomize_portraits])
all_non_progression_pickups -= [MAGICAL_TICKET_GLOBAL_ID]
end
all_non_progression_pickups
end
end
def filter_locations_valid_for_pickup(locations, pickup_global_id)
locations = locations.dup
if ITEM_GLOBAL_ID_RANGE.include?(pickup_global_id)
# If the pickup is an item instead of a skill, don't let bosses drop it.
locations -= checker.enemy_locations
end
# Don't let progression items be in certain problematic locations. (This function is only called for progression items.)
locations -= checker.no_progression_locations
if GAME == "dos" && SKILL_GLOBAL_ID_RANGE.include?(pickup_global_id)
# Don't let events give you souls in DoS.
locations -= checker.event_locations
locations -= checker.easter_egg_locations
# Don't let soul candles be inside mirrors. They don't get hidden, and are accessible without Paranoia.
locations -= checker.mirror_locations
# Don't let soul candles be inside specific locations that can be broken without reaching them.
locations -= checker.no_soul_locations
end
if GAME == "dos" && (0x3D..0x41).include?(pickup_global_id)
# Magic seals can't be given by easter egg locations.
locations -= checker.easter_egg_locations
end
if GAME == "ooe" && ITEM_GLOBAL_ID_RANGE.include?(pickup_global_id)
# Don't let events give you items in OoE.
locations -= checker.event_locations
end
if GAME == "ooe" && !ITEM_GLOBAL_ID_RANGE.include?(pickup_global_id)
# Glyphs/villagers can't be in the special blue chest spawned by the searchlights when you kill a Tin Man.
locations -= ["08-02-06_01"]
end
if GAME == "ooe" && (!pickup_global_id.is_a?(Integer) || !game.fs.check_integer_can_be_an_arm_shifted_immediate?(pickup_global_id))
# The pickup ID is a hardcoded arm shifted immediate for the special blue chest spawned by the searchlights when you kill a Tin Man.
locations -= ["08-02-06_01"]
end
if GAME == "ooe" && (0x6F..0x74).include?(pickup_global_id)
# Don't let relics be inside breakable walls in OoE.
# This is because they need to be inside a chest, and chests can't be hidden.
locations -= checker.hidden_locations
end
if RANDOMIZABLE_VILLAGER_NAMES.include?(pickup_global_id)
# Villagers can't be hidden, an event glyph, or a boss drop.
locations -= checker.hidden_locations
locations -= checker.event_locations
locations -= checker.enemy_locations
# Locations too close to the top of the room shouldn't be villagers, as the Torpor glyph would spawn above the screen and not be absorbable.
locations_too_high_to_be_a_villager = ["00-05-07_01", "00-05-07_02", "00-05-08_02", "00-05-08_03", "00-05-0C_01", "00-06-09_00", "0D-00-04_00", "0D-00-0C_00"]
locations -= locations_too_high_to_be_a_villager
# Two villagers shouldn't be placed in the same room, or their events will conflict and not work correctly.
locations.select! do |location|
room_str = location[0,8]
!@rooms_that_already_have_an_event.include?(room_str)
end
end
if PORTRAIT_NAMES.include?(pickup_global_id)
# Don't put portraits in certain rooms that, when you return to this room via the return portrait, would put you out of bounds.
bad_portrait_rooms = [
"03-00-05",
"03-00-06",
"03-00-07",
"03-00-08",
"03-00-09",
"03-00-0A",
"04-00-05",
"04-00-06",
"04-00-07",
"04-00-08",
"04-00-09",
"04-00-0A",
"05-01-01",
"05-01-14",
"06-01-14",
"00-05-02", # This great stairway room doesn't put you out of bounds, but does let you sequence break the button lock and needing height.
"00-05-04", # This great stairway room doesn't put you out of bounds, but does let you sequence break needing height.
"05-02-0C", # Legion's room. If a portrait gets placed here the player won't be able to activate Legion because using a portrait doesn't set the pickup flag Legion checks.
]
locations.select! do |location|
room_str = location[0,8]
!bad_portrait_rooms.include?(room_str)
end
end
if GAME == "ooe" && SKILL_GLOBAL_ID_RANGE.include?(pickup_global_id)
# Don't put progression glyph in certain locations where the player could easily get them early.
locations -= checker.no_glyph_locations
end
locations
end
def get_unplaced_non_progression_pickup(valid_ids: PICKUP_GLOBAL_ID_RANGE.to_a)
valid_possible_items = @unplaced_non_progression_pickups.select do |pickup_global_id|
valid_ids.include?(pickup_global_id)
end
pickup_global_id = valid_possible_items.sample(random: rng)
if pickup_global_id.nil?
# Ran out of unplaced pickups, so place a duplicate instead.
@unplaced_non_progression_pickups += all_non_progression_pickups().select do |pickup_global_id|
valid_ids.include?(pickup_global_id)
end
@unplaced_non_progression_pickups -= checker.current_items
# If a glyph has already been placed as an event glyph, do not place it again somewhere.
# If the player gets one from a glyph statue first, then the one in the event/puzzle won't appear.
@unplaced_non_progression_pickups -= @glyphs_placed_as_event_glyphs
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
@unplaced_non_progression_pickups.delete(pickup_global_id)
@used_non_progression_pickups << pickup_global_id
return pickup_global_id
end
def get_unplaced_non_progression_item
return get_unplaced_non_progression_pickup(valid_ids: ITEM_GLOBAL_ID_RANGE.to_a)
end
def get_unplaced_non_progression_item_that_can_be_an_arm_shifted_immediate
valid_ids = ITEM_GLOBAL_ID_RANGE.to_a
valid_ids.select!{|item_id| game.fs.check_integer_can_be_an_arm_shifted_immediate?(item_id+1)}
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
def get_unplaced_non_progression_skill
return get_unplaced_non_progression_pickup(valid_ids: SKILL_GLOBAL_ID_RANGE.to_a)
end
def get_unplaced_non_progression_item_except_ooe_relics
valid_ids = ITEM_GLOBAL_ID_RANGE.to_a
if GAME == "ooe"
valid_ids -= (0x6F..0x74).to_a
end
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
def get_unplaced_non_progression_projectile_glyph
projectile_glyph_ids = (0x16..0x18).to_a + (0x1C..0x32).to_a + (0x34..0x36).to_a
return get_unplaced_non_progression_pickup(valid_ids: projectile_glyph_ids)
end
def get_unplaced_non_progression_pickup_for_enemy_drop
valid_ids = PICKUP_GLOBAL_ID_RANGE.to_a - ITEMS_WITH_OP_HARDCODED_EFFECT
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
def get_unplaced_non_progression_item_for_enemy_drop
valid_ids = ITEM_GLOBAL_ID_RANGE.to_a - ITEMS_WITH_OP_HARDCODED_EFFECT
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
def get_unplaced_non_progression_item_except_ooe_relics_for_enemy_drop
valid_ids = ITEM_GLOBAL_ID_RANGE.to_a - ITEMS_WITH_OP_HARDCODED_EFFECT
if GAME == "ooe"
valid_ids -= (0x6F..0x74).to_a
end
return get_unplaced_non_progression_pickup(valid_ids: valid_ids)
end
def get_entity_by_location_str(location)
location =~ /^(\h\h)-(\h\h)-(\h\h)_(\h+)$/
area_index, sector_index, room_index, entity_index = $1.to_i(16), $2.to_i(16), $3.to_i(16), $4.to_i(16)
room = game.areas[area_index].sectors[sector_index].rooms[room_index]
entity = room.entities[entity_index]
return entity
end
def change_entity_location_to_pickup_global_id(location, pickup_global_id)
entity = get_entity_by_location_str(location)
if checker.event_locations.include?(location) || checker.easter_egg_locations.include?(location)
# Event with a hardcoded item/glyph.
change_hardcoded_event_pickup(entity, pickup_global_id)
return
end
if GAME == "ooe" && location == "08-02-06_01" # Strength Ring blue chest spawned by the searchlights after you kill the Tin Man
if entity.var_a != 2
raise "Searchlights are not of type 2 (Tin Man spawn)"
end
game.fs.replace_arm_shifted_immediate_integer(0x022A194C, pickup_global_id+1)
elsif RANDOMIZABLE_VILLAGER_NAMES.include?(pickup_global_id)
# Villager
if GAME != "ooe"
raise "Tried to place villager in #{GAME}"
end
room_str = location[0,8]
@rooms_that_already_have_an_event << room_str
entity.type = 2
entity.subtype = 0x89
entity.var_a = VILLAGER_NAME_TO_EVENT_FLAG[pickup_global_id]
entity.var_b = 0
entity.write_to_rom()
if pickup_global_id == :villageranna
# Anna must have Tom in her room, or her event will crash the game.
room = entity.room
cat = Entity.new(room, room.fs)
cat.x_pos = entity.x_pos
cat.y_pos = entity.y_pos
cat.type = 2
cat.subtype = 0x3F
cat.var_a = 3
cat.var_b = 1
room.entities << cat
room.write_entities_to_rom()
# Remove the Tom in Anna's original room since he's not needed there.
original_cat = game.areas[7].sectors[0].rooms[6].entities[2]
original_cat.type = 0
original_cat.write_to_rom()
end
elsif PORTRAIT_NAMES.include?(pickup_global_id)
# Portrait
if GAME != "por"
raise "Tried to place portrait in #{GAME}"
end
portrait_data = PORTRAIT_NAME_TO_DATA[pickup_global_id]
entity.type = 2
entity.subtype = portrait_data[:subtype]
entity.var_a = portrait_data[:var_a]
entity.var_b = portrait_data[:var_b]
# Move the portrait to a short distance above the closest floor so it looks good and is enterable.
coll = RoomCollision.new(entity.room, game.fs)
floor_y = coll.get_floor_y(entity, allow_jumpthrough: true)
entity_original_y_pos = entity.y_pos
entity.y_pos = floor_y - 0x50 # Portraits should float 5 tiles off the ground.
entity.write_to_rom()
curr_area_index = entity.room.area_index
curr_sector_index = entity.room.sector_index
curr_room_index = entity.room.room_index
# Find the return portrait.
dest_area_index = entity.var_a
dest_sector_index = (entity.var_b & 0x3C0) >> 6
dest_room_index = entity.var_b & 0x3F
dest_room = game.areas[dest_area_index].sectors[dest_sector_index].rooms[dest_room_index]
dest_portrait = dest_room.entities.find{|entity| entity.is_special_object? && [0x1A, 0x76, 0x86, 0x87].include?(entity.subtype)}
return_portraits = [dest_portrait]
# Update the list of x/y positions the player returns at in the por_distinct_return_portrait_positions patch.
return_x = entity.x_pos
return_y = floor_y
game.fs.write(0x02309010+dest_area_index*4, [return_x, return_y].pack("vv"))
# If there's a small breakable wall containing this portrait we remove it.
# Not only does the breakable wall not hide the portrait, but when the player returns they would be put out of bounds by it.
breakable_wall_x_range = (entity.x_pos-8..entity.x_pos+8)
breakable_wall_y_range = (entity_original_y_pos-8..entity_original_y_pos+8)
breakable_wall_entity = entity.room.entities.find do |e|
e.is_special_object? && e.subtype == 0x3B && breakable_wall_x_range.include?(e.x_pos) && breakable_wall_y_range.include?(e.y_pos)
end
if breakable_wall_entity
breakable_wall_entity.type = 0
breakable_wall_entity.write_to_rom()
end
# Also update the bonus return portrait at the end of some areas.
case dest_area_index
when 2 # 13th Street
return_portraits << game.entity_by_str("02-02-16_02")
when 4 # Forgotten City
return_portraits << game.entity_by_str("04-01-07_01")
when 6 # Burnt Paradise
return_portraits << game.entity_by_str("06-00-06_03")
when 8 # Dark Academy
return_portraits << game.entity_by_str("08-00-08_04")
end
return_portraits.each do |return_portrait|
return_portrait.var_a = curr_area_index
return_portrait.var_b = ((curr_sector_index & 0xF) << 6) | (curr_room_index & 0x3F)
return_portrait.subtype = case curr_area_index
when 1, 3, 5, 7 # City of Haze, Sandy Grave, Nation of Fools, or Forest of Doom.
0x1A
when 2, 4, 6, 8 # 13th Street, Forgotten City, Burnt Paradise, or Dark Academy.
0x76
when 0, 9 # Dracula's Castle or Nest of Evil.
if [2, 4, 6, 8].include?(dest_area_index)
# Use the alt portrait frame when returning to Dracula's Castle from 13th Street, Forgotten City, Burnt Paradise, or Dark Academy.
0x87
else
0x86
end
else
puts "Unknown area to portrait into: %02X" % curr_area_index
end
# Set highest bit of var B to indicate that this is a return portrait to the por_distinct_return_portrait_positions patch.
return_portrait.var_b = 0x8000 | return_portrait.var_b
return_portrait.write_to_rom()
if room_rando?
# Tell the room rando logic about this return portrait.
checker.add_return_portrait(return_portrait.room.room_str, location)
end
end
if dest_area_index == 7 # Forest of Doom
# Remove the event from the original Forest of Doom portrait room since the portrait is no longer there.
forest_event = game.entity_by_str("00-08-01_03")
forest_event.type = 0
forest_event.write_to_rom()
end
elsif entity.type == 1
# Boss
item_type, item_index = game.get_item_type_and_index_by_global_id(pickup_global_id)
if !PICKUP_SUBTYPES_FOR_SKILLS.include?(item_type)
raise "Can't make boss drop required item"
end
if GAME == "dos" && entity.room.sector_index == 9 && entity.room.room_index == 1
# Aguni. He's not placed in the room so we hardcode him.
enemy_dna = game.enemy_dnas[0x70]
else
enemy_dna = game.enemy_dnas[entity.subtype]
end
case GAME
when "dos"
enemy_dna["Soul"] = item_index
when "ooe"
enemy_dna["Glyph"] = pickup_global_id + 1
else
raise "Boss soul randomizer is bugged for #{LONG_GAME_NAME}."
end
enemy_dna.write_to_rom()
elsif GAME == "dos" || GAME == "por"
if GAME == "por" && location == "05-02-0C_01"
# Cog's location. We always make this location use pickup flag 0x10 since Legion is hardcoded to check that flag, not whether you own the cog.
pickup_flag = 0x10
is_cog = true
else
pickup_flag = get_unused_pickup_flag_for_entity(entity)
is_cog = false
end
if pickup_global_id == :money
if entity.is_hidden_pickup? || is_cog || rng.rand <= 0.80
# 80% chance to be a money bag
# Hidden pickups have to be a bag since chests can't be hidden in a wall.
# The cog location has to be a bag since chests can't have a pickup flag so they wouldn't be able to activate legion.
if entity.is_hidden_pickup?
entity.type = 7
else
entity.type = 4
end
entity.subtype = 1
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = rng.rand(4..6) # 500G, 1000G, 2000G
else
# 20% chance to be a money chest
entity.type = 2
entity.subtype = 1
if GAME == "dos"
entity.var_a = 0x10
else
entity.var_a = [0xE, 0xF, 0x12].sample(random: rng)
end
# We didn't use the pickup flag, so put it back
@unused_pickup_flags << pickup_flag
end
entity.write_to_rom()
return
end
# Make sure Chaos/Magus Ring isn't easily available.
if GAME == "dos" && pickup_global_id == 0xCD # Chaos Ring
entity.type = 2
entity.subtype = 0x4C # All-souls-owned item
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = pickup_global_id + 1
entity.write_to_rom()
return
elsif GAME == "por" && pickup_global_id == 0x12C # Magus Ring
entity.type = 6 # All-quests-complete item
entity.subtype = 7
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = 6
entity.write_to_rom()
return
end
item_type, item_index = game.get_item_type_and_index_by_global_id(pickup_global_id)
if PICKUP_SUBTYPES_FOR_SKILLS.include?(item_type)
case GAME
when "dos"
# Soul candle
entity.type = 2
entity.subtype = 1
entity.var_a = 0
entity.var_b = item_index
# We didn't use the pickup flag, so put it back
@unused_pickup_flags << pickup_flag
when "por"
# Skill
if entity.is_hidden_pickup?
entity.type = 7
else
entity.type = 4
end
entity.subtype = item_type
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = item_index
end
else
# Item
if entity.is_hidden_pickup?
entity.type = 7
else
entity.type = 4
end
entity.subtype = item_type
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = item_index
end
entity.write_to_rom()
elsif GAME == "ooe"
pickup_flag = get_unused_pickup_flag_for_entity(entity)
if entity.is_glyph? && !entity.is_hidden_pickup?
entity.y_pos += 0x20
end
if pickup_global_id == :money
if entity.is_hidden_pickup?
entity.type = 7
else
entity.type = 4
end
entity.subtype = 1
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = rng.rand(4..6) # 500G, 1000G, 2000G
entity.write_to_rom()
return
end
if (0x6F..0x74).include?(pickup_global_id)
# Relic. Must go in a chest, if you leave it lying on the ground it won't autoequip.
entity.type = 2
entity.subtype = 0x16
entity.var_a = pickup_global_id + 1
entity.var_b = pickup_flag
use_pickup_flag(pickup_flag)
entity.write_to_rom()
return
end
if pickup_global_id >= 0x6F
# Item
if entity.is_hidden_pickup?
entity.type = 7
entity.subtype = 0xFF
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = pickup_global_id + 1
else
case rng.rand
when 0.00..0.70
# 70% chance for a red chest
entity.type = 2
entity.subtype = 0x16
entity.var_a = pickup_global_id + 1
entity.var_b = pickup_flag
use_pickup_flag(pickup_flag)
when 0.70..0.95
# 15% chance for an item on the ground
entity.type = 4
entity.subtype = 0xFF
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = pickup_global_id + 1
else
# 5% chance for a hidden blue chest
entity.type = 2
entity.subtype = 0x17
entity.var_a = pickup_global_id + 1
entity.var_b = pickup_flag
use_pickup_flag(pickup_flag)
end
end
else
# Glyph
if entity.is_hidden_pickup?
entity.type = 7
entity.subtype = 2
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = pickup_global_id + 1
else
puzzle_glyph_ids = [0x1D, 0x1F, 0x20, 0x22, 0x24, 0x26, 0x27, 0x2A, 0x2B, 0x2F, 0x30, 0x31, 0x32, 0x46, 0x4E]
if puzzle_glyph_ids.include?(pickup_global_id)
# Free glyph
entity.type = 4
entity.subtype = 2
entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
entity.var_b = pickup_global_id + 1
else
# Glyph statue
entity.type = 2
entity.subtype = 2
entity.var_a = 0
entity.var_b = pickup_global_id + 1
# We didn't use the pickup flag, so put it back
@unused_pickup_flags << pickup_flag
end
end
end
if entity.is_glyph? && !entity.is_hidden_pickup?
entity.y_pos -= 0x20
end
entity.write_to_rom()
end
end
def remove_inaccessible_items(inaccessible_remaining_locations)
inaccessible_remaining_locations.each do |location|
entity = get_entity_by_location_str(location)
if checker.event_locations.include?(location) || entity.type == 1
# Don't delete inaccessible events/bosses, just in case.
next
end
entity.type = 0
entity.write_to_rom()
end
end
def get_unused_pickup_flag_for_entity(entity)
if entity.is_item_chest?
pickup_flag = entity.var_b
elsif entity.is_pickup?
pickup_flag = entity.var_a
elsif GAME == "dos" && entity.is_special_object? && entity.subtype == 0x4D # Easter egg item
pickup_flag = entity.var_b
elsif GAME == "dos" && entity.is_special_object? && entity.subtype == 0x4C # All-souls-obtained item
pickup_flag = entity.var_a
end
if GAME == "ooe" && (0..0x51).include?(pickup_flag)
# In OoE, these pickup flags are used by glyph statues automatically and we can't control those.
# Therefore we need to reassign pickups that were free glyphs in the original game a new pickup flag, so it doesn't conflict with where those glyphs (Rapidus Fio and Volaticus) got moved to when randomized.
pickup_flag = nil
end
if pickup_flag.nil? || @used_pickup_flags.include?(pickup_flag)
pickup_flag = @unused_pickup_flags.pop()
if pickup_flag.nil?
raise "No pickup flag for this item, this error shouldn't happen"
end
end
return pickup_flag
end
def get_unused_pickup_flag()
pickup_flag = @unused_pickup_flags.pop()
if pickup_flag.nil?
raise "No pickup flag for this item, this error shouldn't happen"
end
return pickup_flag
end
def use_pickup_flag(pickup_flag)
@used_pickup_flags << pickup_flag
@unused_pickup_flags -= @used_pickup_flags
end
def get_entity_skill_drop_by_entity_location(location)
entity = get_entity_by_location_str(location)
if entity.type != 1
raise "Not an enemy: #{location}"
end
if GAME == "dos" && entity.room.sector_index == 9 && entity.room.room_index == 1
# Aguni. He's not placed in the room so we hardcode him.
enemy_dna = game.enemy_dnas[0x70]
else
enemy_dna = game.enemy_dnas[entity.subtype]
end
case GAME
when "dos"
skill_local_id = enemy_dna["Soul"]
when "ooe"
skill_local_id = enemy_dna["Glyph"] - 1
else
raise "Boss soul randomizer is bugged for #{LONG_GAME_NAME}."
end
skill_global_id = skill_local_id + SKILL_GLOBAL_ID_RANGE.begin
return skill_global_id
end
def get_villager_name_by_entity_location(location)
entity = get_entity_by_location_str(location)
if GAME == "ooe" && entity.type == 2 && entity.subtype == 0x89
villager_name = VILLAGER_NAME_TO_EVENT_FLAG.invert[entity.var_a]
return villager_name
else
raise "Not a villager: #{location}"
end
end
def get_portrait_name_by_entity_location(location)
entity = get_entity_by_location_str(location)
if GAME == "por" && entity.is_special_object? && [0x1A, 0x76, 0x86, 0x87].include?(entity.subtype)
portrait_name = AREA_INDEX_TO_PORTRAIT_NAME[entity.var_a]
return portrait_name
else
raise "Not a portrait: #{location}"
end
end
def change_hardcoded_event_pickup(event_entity, pickup_global_id)
case GAME
when "dos"
dos_change_hardcoded_event_pickup(event_entity, pickup_global_id)
when "por"
por_change_hardcoded_event_pickup(event_entity, pickup_global_id)
when "ooe"
ooe_change_hardcoded_event_pickup(event_entity, pickup_global_id)
end
end
def dos_change_hardcoded_event_pickup(event_entity, pickup_global_id)
event_entity.room.sector.load_necessary_overlay()
if event_entity.subtype == 0x65 # Mina's Talisman
item_type, item_index = game.get_item_type_and_index_by_global_id(pickup_global_id)
if (0x3D..0x41).include?(pickup_global_id)
# Magic seal. These need to call a different function to be properly given.
seal_index = pickup_global_id - 0x3D
# Seal given when watching the event
game.fs.write(0x021CB9F4, [seal_index].pack("C"))
game.fs.write(0x021CB9FC, [0xEB006ECF].pack("V")) # Call func 021E7540
# Seal given when skipping the event
game.fs.write(0x021CBC14, [seal_index].pack("C"))
game.fs.write(0x021CBC1C, [0xEB006E47].pack("V")) # Call func 021E7540
else
# Regular item.
# Item given when watching the event
game.fs.write(0x021CB9F4, [item_type].pack("C"))
game.fs.write(0x021CB9F8, [item_index].pack("C"))
# Item given when skipping the event
game.fs.write(0x021CBC14, [item_type].pack("C"))
game.fs.write(0x021CBC18, [item_index].pack("C"))
end
# Item name shown in the corner of the screen when watching the event.
game.fs.write(0x021CBA08, [item_type].pack("C"))
game.fs.write(0x021CBA0C, [item_index].pack("C"))
# Also display the item's name in the corner when skipping the event.
# We add a few new lines of code in free space for this.
code = [0xE3A00000, 0xE3A010F0, 0xEBFDB6FD, 0xE1A00005, 0xEA042E64]
game.fs.write(0x020C027C, code.pack("V*"))
game.fs.write(0x020C027C, [pickup_global_id+1].pack("C"))
game.fs.write(0x021CBC20, [0xEAFBD195].pack("V"))
elsif event_entity.subtype == 0x4D # Easter egg item
# Change what item is actually placed into your inventory when you get the easter egg.
easter_egg_index = event_entity.var_a
game.fs.write(0x0222BE34 + easter_egg_index*0xC, [pickup_global_id+1].pack("v"))
# Update the pickup flag.
pickup_flag = get_unused_pickup_flag_for_entity(event_entity)
event_entity.var_b = pickup_flag
use_pickup_flag(pickup_flag)
# Make the easter egg special object use the same palette list as actual item icons, since that gives access to all 3 icon palettes, while the actual object's palette only has the first.
sprite_info = SpecialObjectType.new(0x4D, game.fs).extract_gfx_and_palette_and_sprite_from_create_code
item = game.items[pickup_global_id]
icon_palette_pointer = 0x022C4684
game.fs.write(0x021AF5CC, [icon_palette_pointer].pack("V"))
icon_palette_index = (item["Icon"] & 0xFF00) >> 8
sprite = sprite_info.sprite
sprite.frames[easter_egg_index].parts.first.palette_index = icon_palette_index
sprite.write_to_rom()
# Now update the actual item visual on the object's GFX page so it visually shows the correct item.
sprite_info = SpecialObjectType.new(0x4D, game.fs).extract_gfx_and_palette_and_sprite_from_create_code # We extract sprite info again to get the updated palette pointer after we changed it.
gfx = sprite_info.gfx_pages.first
palettes = renderer.generate_palettes(sprite_info.palette_pointer, 16)
chunky_image = renderer.render_gfx_page(gfx.file, palettes[icon_palette_index], gfx.canvas_width)
new_icon = renderer.render_icon_by_item(item)
x_offset = 16*easter_egg_index
y_offset = 0
chunky_image.replace!(new_icon, x_offset, y_offset)
renderer.save_gfx_page(chunky_image, gfx, sprite_info.palette_pointer, 16, icon_palette_index)
end
end
def por_change_hardcoded_event_pickup(event_entity, pickup_global_id)
event_entity.room.sector.load_necessary_overlay()
end
def ooe_change_hardcoded_event_pickup(event_entity, pickup_global_id)
event_entity.room.sector.load_necessary_overlay()
@glyphs_placed_as_event_glyphs << pickup_global_id
if event_entity.subtype == 0x8A # Magnes
# Get rid of the event, turn it into a normal free glyph
# We can't keep the event because it automatically equips Magnes even if the glyph it gives is not Magnes.
# Changing what it equips would just make the event not work right, so we may as well remove it.
pickup_flag = get_unused_pickup_flag()
event_entity.type = 4
event_entity.subtype = 2
event_entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
event_entity.var_b = pickup_global_id + 1
event_entity.x_pos = 0x80
event_entity.y_pos = 0x2B0
event_entity.write_to_rom()
elsif event_entity.subtype == 0x69 # Dominus Hatred
game.fs.write(0x02230A7C, [pickup_global_id+1].pack("C"))
game.fs.write(0x022C25D8, [pickup_global_id+1].pack("C"))
elsif event_entity.subtype == 0x6F # Dominus Anger
game.fs.write(0x02230A84, [pickup_global_id+1].pack("C"))
game.fs.write(0x022C25DC, [pickup_global_id+1].pack("C"))
elsif event_entity.subtype == 0x81 # Cerberus
# Get rid of the event, turn it into a normal free glyph
# We can't keep the event because it has special programming to always spawn them in order even if you get to the locations out of order.
pickup_flag = get_unused_pickup_flag()
event_entity.type = 4
event_entity.subtype = 2
event_entity.var_a = pickup_flag
use_pickup_flag(pickup_flag)
event_entity.var_b = pickup_global_id + 1
event_entity.x_pos = 0x80
event_entity.y_pos = 0x60
event_entity.write_to_rom()
other_cerberus_events = event_entity.room.entities.select{|e| e.is_special_object? && [0x82, 0x83].include?(e.subtype)}
other_cerberus_events.each do |event|
# Delete these others, we don't want the events.
event.type = 0
event.write_to_rom()
end
else
glyph_id_location, pickup_flag_read_location, pickup_flag_write_location, second_pickup_flag_read_location = case event_entity.subtype
when 0x2F # Luminatio
[0x022C4894, 0x022C483C, 0x022C4888]
when 0x3B # Pneuma
[0x022C28E8, 0x022C2880, 0x022C28DC, 0x022C279C]
when 0x44 # Lapiste
[0x022C2CB0, 0x022C2C24, 0x022C2CA0]
when 0x54 # Vol Umbra
[0x022C2FBC, 0x022C2F70, 0x022C2FB4]
when 0x4C # Vol Fulgur
[0x022C2490, 0x022C2404, 0x022C2480]
when 0x52 # Vol Ignis
[0x0221F1A0, 0x0221F148, 0x0221F194]
when 0x47 # Vol Grando
[0x022C230C, 0x022C2584, 0x022C22FC]
when 0x40 # Cubus
[0x022C31DC]
when 0x53 # Morbus
[0x022C2354, 0x022C2318, 0x022C2344]
when 0x76 # Dominus Agony
[0x022C25BC]
else
return
end
# What glyph is actually spawned.
game.fs.write(glyph_id_location, [pickup_global_id+1].pack("C"))
if pickup_flag_write_location
# The pickup flag set when you absorb the glyph.
pickup_flag = pickup_global_id+2
game.fs.write(pickup_flag_write_location, [pickup_flag].pack("C"))
end
if pickup_flag_read_location
# The pickup flag read to decide whether you've completed this puzzle yet or not.
# This is determined by two lines of code:
# The first loads the word in the bitfield containing the correct bit (0x20 bits in each word):
pickup_flag_word_offset = 0x40 + 4*(pickup_flag/0x20)
game.fs.write(pickup_flag_read_location, [pickup_flag_word_offset].pack("C"))
game.fs.write(second_pickup_flag_read_location, [pickup_flag_word_offset].pack("C")) if second_pickup_flag_read_location
# The second does a tst on the exact bit within that word:
pickup_flag_bit_index = pickup_flag % 0x20
game.fs.replace_hardcoded_bit_constant(pickup_flag_read_location+4, pickup_flag_bit_index)
game.fs.replace_hardcoded_bit_constant(second_pickup_flag_read_location+4, pickup_flag_bit_index) if second_pickup_flag_read_location
end
end
end
def set_world_map_exit_destination_area(world_map_exit_door_str, entrance_type)
room_str = world_map_exit_door_str[0,8]
area_exit_entity_str = room_str + "_00"
area_exit = game.entity_by_str(area_exit_entity_str)
if entrance_type >= 0
area_exit.var_a = entrance_type
area_exit.var_b = 0
else # Negative value indicates var B should be used instead of var A.
area_exit.var_a = 0
area_exit.var_b = -entrance_type
end
area_exit.write_to_rom()
entrance_door_str = WORLD_MAP_ENTRANCES[entrance_type]
puts "Setting world map unlock: #{world_map_exit_door_str} -> #{entrance_door_str}"
checker.set_world_map_exit_destination_area(world_map_exit_door_str, entrance_door_str)
if world_map_exit_door_str == "0A-00-0A_000"
# For now we sync up the two Tymeo exits to always unlock the same area like in vanilla.
# In the future consider randomizing these seperately.
set_world_map_exit_destination_area("0A-00-13_000", entrance_type)
end
end
end
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'collection_json_serializer/version'
Gem::Specification.new do |spec|
spec.name = "collection_json_serializer"
spec.version = CollectionJson::Serializer::VERSION
spec.authors = ["Carles Jove i Buxeda"]
spec.email = ["hola@carlus.cat"]
spec.summary = %q{Serialize objects as Collection+JSON.}
spec.description = %q{CollectionJson::Serializer lets you have models' serializers to format JSON responses following the Collection+JSON media type by Mike Amudsen.}
spec.homepage = "https://github.com/carlesjove/collection_json_serializer"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "activesupport", "~> 4.1"
spec.add_development_dependency "bundler", "~> 1.7"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "minitest", "~> 5.4"
end
edited gemspec
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'collection_json_serializer/version'
Gem::Specification.new do |spec|
spec.name = "collection_json_serializer"
spec.version = CollectionJson::Serializer::VERSION
spec.authors = ["Carles Jove i Buxeda"]
spec.email = ["hola@carlus.cat"]
spec.summary = %q{Serialize objects as Collection+JSON.}
spec.description = %q{CollectionJson::Serializer makes it easy to serialize objects into the Collection+JSON hypermedia type.}
spec.homepage = "https://github.com/carlesjove/collection_json_serializer"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency "activesupport", "~> 4.1"
spec.add_development_dependency "bundler", "~> 1.7"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "minitest", "~> 5.4"
end
|
class Beast < Formula
homepage "http://beast.bio.ed.ac.uk/"
# doi "10.1093/molbev/mss075"
# tag "bioinformatics"
url "http://tree.bio.ed.ac.uk/download.php?id=92&num=3"
version "1.8.2"
sha1 "47a5aca20fecf6cb61a301f8b03d1e750858721a"
head do
url "https://beast-mcmc.googlecode.com/svn/trunk/"
depends_on :ant
end
def install
system "ant", "linux" if build.head?
# Move jars to libexec
inreplace Dir["bin/*"] do |s|
s["$BEAST/lib"] = "$BEAST/libexec"
end
mv "lib", "libexec"
prefix.install Dir[build.head? ? "release/Linux/BEASTv*/*" : "*"]
end
test do
system "#{bin}/beast", "-help"
end
def caveats; <<-EOS.undent
Examples are installed in:
#{opt_prefix}/examples/
EOS
end
end
beast: add 1.8.2 bottle.
class Beast < Formula
homepage "http://beast.bio.ed.ac.uk/"
# doi "10.1093/molbev/mss075"
# tag "bioinformatics"
url "http://tree.bio.ed.ac.uk/download.php?id=92&num=3"
version "1.8.2"
sha1 "47a5aca20fecf6cb61a301f8b03d1e750858721a"
bottle do
root_url "https://homebrew.bintray.com/bottles-science"
cellar :any
sha256 "e1357fad70b3a51ce734a705667f2e9d16bdddf480bf340559cdad0bbcaacb65" => :yosemite
sha256 "c411831dc26441e4b5bd92dc1926fbd8171d5c8d26d17239f2ce1e9604f67f8b" => :mavericks
sha256 "c3974c08c01dfa26db9407b070b4302a109043725fef586b4d82290603f2dfee" => :mountain_lion
end
head do
url "https://beast-mcmc.googlecode.com/svn/trunk/"
depends_on :ant
end
def install
system "ant", "linux" if build.head?
# Move jars to libexec
inreplace Dir["bin/*"] do |s|
s["$BEAST/lib"] = "$BEAST/libexec"
end
mv "lib", "libexec"
prefix.install Dir[build.head? ? "release/Linux/BEASTv*/*" : "*"]
end
test do
system "#{bin}/beast", "-help"
end
def caveats; <<-EOS.undent
Examples are installed in:
#{opt_prefix}/examples/
EOS
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{quarter_time}
s.version = "0.3.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["evizitei"]
s.date = %q{2010-06-22}
s.description = %q{A simple gem for dealing with quarter logic. I happen to have a project where half the models in the database recur every three months as part of a "quarter" of the year. Within the code, we constantly are asking "what quarter is this for?", or "show me all the records for this quarter". Well, now I need the same power on another application, so say hello to "quarter_time".}
s.email = %q{ethan.vizitei@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".document",
".gitignore",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/extensions/date_extension.rb",
"lib/extensions/time_extension.rb",
"lib/quarter_time.rb",
"lib/quarter_time/quarter.rb",
"lib/quarter_time/quarter_driven.rb",
"lib/quarter_time/quarter_knowledge.rb",
"quarter_time.gemspec",
"test/helper.rb",
"test/test_date_extensions.rb",
"test/test_quarter.rb",
"test/test_quarter_driven.rb",
"test/test_time_extensions.rb"
]
s.homepage = %q{http://github.com/evizitei/quarter_time}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{library for measuring time in quarters (three month periods) and interacting with models that are tied to a specific quarter.}
s.test_files = [
"test/helper.rb",
"test/test_date_extensions.rb",
"test/test_quarter.rb",
"test/test_quarter_driven.rb",
"test/test_time_extensions.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<thoughtbot-shoulda>, [">= 0"])
else
s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
end
else
s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
end
end
Regenerated gemspec for version 0.3.1
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run the gemspec command
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = %q{quarter_time}
s.version = "0.3.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["evizitei"]
s.date = %q{2010-06-22}
s.description = %q{A simple gem for dealing with quarter logic. I happen to have a project where half the models in the database recur every three months as part of a "quarter" of the year. Within the code, we constantly are asking "what quarter is this for?", or "show me all the records for this quarter". Well, now I need the same power on another application, so say hello to "quarter_time".}
s.email = %q{ethan.vizitei@gmail.com}
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
".document",
".gitignore",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"lib/extensions/date_extension.rb",
"lib/extensions/time_extension.rb",
"lib/quarter_time.rb",
"lib/quarter_time/quarter.rb",
"lib/quarter_time/quarter_driven.rb",
"lib/quarter_time/quarter_knowledge.rb",
"quarter_time.gemspec",
"test/helper.rb",
"test/test_date_extensions.rb",
"test/test_quarter.rb",
"test/test_quarter_driven.rb",
"test/test_time_extensions.rb"
]
s.homepage = %q{http://github.com/evizitei/quarter_time}
s.rdoc_options = ["--charset=UTF-8"]
s.require_paths = ["lib"]
s.rubygems_version = %q{1.3.7}
s.summary = %q{library for measuring time in quarters (three month periods) and interacting with models that are tied to a specific quarter.}
s.test_files = [
"test/helper.rb",
"test/test_date_extensions.rb",
"test/test_quarter.rb",
"test/test_quarter_driven.rb",
"test/test_time_extensions.rb"
]
if s.respond_to? :specification_version then
current_version = Gem::Specification::CURRENT_SPECIFICATION_VERSION
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<thoughtbot-shoulda>, [">= 0"])
else
s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
end
else
s.add_dependency(%q<thoughtbot-shoulda>, [">= 0"])
end
end
|
module NotificationHelper
def twilio_client
twilio_token = ENV["TWILIO_API_KEY"]
twilio_sid = ENV["TWILIO_ACC_SID"]
client = Twilio::REST::Client.new twilio_sid, twilio_token
end
def send_sms(phone_number, body)
twilio_client.account.messages.create(
:from => "+18454434529",
:to => phone_number,
:body => body
)
end
def get_sms
twilio_client.account.messages.list({:to => "+18454434529"}).first
end
def read_sms
text = get_sms
phone_number = text.from
text_body = text.body
# Search User's table by phone_number
user = User.find_by(phone_number: phone_number)
# Determine incoming format of text message
verification_text = text_body[0].include?("#")
new_song_text = text_body[0].include?("1")
comment_text = text_body[0].include?("2")
#Determine reception of text message
case
when verification_text
text_body = text.body.split(",")
text_body
hash_tag = text_body[0]
name = text_body[1]
title_artist = text_body[2]
party = Party.find_by_hash_tag(hash_tag)
p party
when new_song_text
song_info = text_body
party = user.party
when comment_text
comment = text_body
else
text_body
end
# Determine outgoing format of text message response
get_ready_to_sing = 'Get ready to sing SuprStar! To sing again text 1 and then your song. To send a comment text 2 and then your comment.'
check_format_for_hashtag = "Please try to verify party, name, artist and song again"
did_not_recognize = "Try again SuprStar. example #SuprStar, Matt Bunday, Friday by Rebecca Black"
second_song = "Going again SuprStar?"
be_nice = "Comment Received"
# Determine parameters of incoming text message
all_parameters_met = hash_tag && name && title_artist
user_not_verified = text_body && name && title_artist
user_comment = user && comment
user_sing_again = party && song_info
# Determine which actions to take with incoming text message
case
when all_parameters_met
video = find(title_artist)
p video
p party
user = User.create(name: name, phone_number: phone_number,
party_id: party.id)
song = Song.create(name: video[:title], user_id: user.id,
party_id: user.party.id, youtube_url: video[:ytid])
party_queue = user.party.queue
party_queue << song.serializable_hash
party.update(queue: party_queue)
send_sms(phone_number, get_ready_to_sing)
when user_not_verified
send_sms(phone_number, check_format_for_hashtag)
when user_comment
send_sms(phone_number, be_nice)
Comment.create(content: text_body, user_id: user.id, party_id: user.party.id)
when user_sing_again
video = find(song_info)
p video[:title]
song = Song.create(name: video[:title], user_id: user.id,
party_id: user.party.id, youtube_url: video[:ytid])
party_queue = user.party.queue
party_queue << song.serializable_hash
party.update(queue: party_queue)
send_sms(phone_number, second_song)
else
send_sms(phone_number, did_not_recognize)
end
end
end
took out numbers entered by user from text messages
module NotificationHelper
def twilio_client
twilio_token = ENV["TWILIO_API_KEY"]
twilio_sid = ENV["TWILIO_ACC_SID"]
client = Twilio::REST::Client.new twilio_sid, twilio_token
end
def send_sms(phone_number, body)
twilio_client.account.messages.create(
:from => "+18454434529",
:to => phone_number,
:body => body
)
end
def get_sms
twilio_client.account.messages.list({:to => "+18454434529"}).first
end
def read_sms
text = get_sms
phone_number = text.from
text_body = text.body
# Search User's table by phone_number
user = User.find_by(phone_number: phone_number)
# Determine incoming format of text message
verification_text = text_body[0].include?("#")
new_song_text = text_body[0].include?("1")
comment_text = text_body[0].include?("2")
#Determine reception of text message
case
when verification_text
text_body = text.body.split(",")
text_body
hash_tag = text_body[0]
name = text_body[1]
title_artist = text_body[2]
party = Party.find_by_hash_tag(hash_tag)
p party
when new_song_text
song_info = text_body[1..text_body.length]
party = user.party
when comment_text
comment = text_body
else
text_body
end
# Determine outgoing format of text message response
get_ready_to_sing = 'Get ready to sing SuprStar! To sing again text 1 and then your song. To send a comment text 2 and then your comment.'
check_format_for_hashtag = "Please try to verify party, name, artist and song again"
did_not_recognize = "Try again SuprStar. example #SuprStar, Matt Bunday, Friday by Rebecca Black"
second_song = "Going again SuprStar?"
be_nice = "Comment Received"
# Determine parameters of incoming text message
all_parameters_met = hash_tag && name && title_artist
user_not_verified = text_body && name && title_artist
user_comment = user && comment
user_sing_again = party && song_info
# Determine which actions to take with incoming text message
case
when all_parameters_met
video = find(title_artist)
p video
p party
user = User.create(name: name, phone_number: phone_number,
party_id: party.id)
song = Song.create(name: video[:title], user_id: user.id,
party_id: user.party.id, youtube_url: video[:ytid])
party_queue = user.party.queue
party_queue << song.serializable_hash
party.update(queue: party_queue)
send_sms(phone_number, get_ready_to_sing)
when user_not_verified
send_sms(phone_number, check_format_for_hashtag)
when user_comment
text_body[1..text_body.length]
send_sms(phone_number, be_nice)
Comment.create(content: text_body, user_id: user.id, party_id: user.party.id)
when user_sing_again
video = find(song_info)
p video[:title]
song = Song.create(name: video[:title], user_id: user.id,
party_id: user.party.id, youtube_url: video[:ytid])
party_queue = user.party.queue
party_queue << song.serializable_hash
party.update(queue: party_queue)
send_sms(phone_number, second_song)
else
send_sms(phone_number, did_not_recognize)
end
end
end
|
# encoding: utf-8
module RepositoriesHelper
def log_path(objectish = "master", options = {})
objectish = ensplat_path(objectish)
if options.blank? # just to avoid the ? being tacked onto the url
repo_owner_path(@repository, :project_repository_commits_in_ref_path, @project, @repository, objectish)
else
repo_owner_path(@repository, :project_repository_commits_in_ref_path, @project, @repository, objectish, options)
end
end
def commit_path(objectish = "master")
repo_owner_path(@repository, :repository_commit_path, @repository, objectish)
end
def hash_path(ref, path, type)
"#/#{type}/#{ref}/#{path}"
end
def tree_path(treeish = "master", path = [], *args)
if path.respond_to?(:to_str)
path = path.split("/")
end
repo_owner_path(@repository, :repository_tree_path, @repository, branch_with_tree(treeish, path), *args)
end
def submission_path(treeish = "master", path = [])
if path.respond_to?(:to_str)
path = path.split("/")
end
repo_owner_path(@repository, :lab_submission_tree_path, @lab, @submission, @repository, branch_with_tree(treeish, path))
end
def repository_path(action, sha1=nil)
repo_owner_path(@repository, :project_repository_path, @project, @repository)+"/"+action+"/"+sha1.to_s
end
def blob_path(shaish, path, *args)
repo_owner_path(@repository, :repository_blob_path, @repository, branch_with_tree(shaish, path), *args)
end
def raw_blob_path(shaish, path)
repo_owner_path(@repository, :project_repository_raw_blob_path, @project, @repository, branch_with_tree(shaish, path))
end
def blob_history_path(shaish, path)
repo_owner_path(@repository, :project_repository_blob_history_path, @project, @repository, branch_with_tree(shaish, path))
end
def namespaced_branch?(branchname)
branchname.include?("/")
end
def edit_or_show_group_text
if @repository.admin?(current_user)
t("views.repos.edit_group")
else
t("views.repos.show_group")
end
end
def render_branch_list_items(branches)
sorted_git_heads(branches).map do |branch|
content_tag(:li,
link_to(h(branch.name), log_path(branch.name), :title => branch_link_title_text(branch)),
:class => "branch #{highlight_if_head(branch)}")
end.join("\n ")
end
def highlight_if_head(branch)
if branch.head?
"head"
end
end
def branch_link_title_text(branch)
"branch " + h(branch.name) + (branch.head? ? " (HEAD)" : "")
end
# Sorts the +heads+ alphanumerically with the HEAD first
def sorted_git_heads(heads)
heads.select{|h| !h.head? }.sort{|a,b|
a.name <=> b.name
}.unshift(heads.find{|h| h.head? }).compact
end
# Renders a set of list items, cut off at around +max_line_length+
def render_chunked_branch_list_items(repository, max_line_length = 80)
heads = sorted_git_heads(repository.git.heads)
cumulative_line_length = 0
heads_to_display = heads.select do |h|
cumulative_line_length += (h.name.length + 2)
cumulative_line_length < max_line_length
end
list_items = heads_to_display.map do |head|
li = %Q{<li class="#{highlight_if_head(head)}">}
li << link_to(h(head.name), repo_owner_path(repository, :project_repository_commits_in_ref_path,
repository.project, repository, ensplat_path(head.name)),
:title => branch_link_title_text(head))
li << "</li>"
li
end
if heads_to_display.size < repository.git.heads.size
rest_size = repository.git.heads.size - heads_to_display.size
list_items << %{<li class="rest-of-branches">
<small>and #{rest_size} more…</small>
</li>}
end
list_items.join("\n")
end
def show_clone_list_search?(group_clones, user_clones)
user_clones.size >= 5 || group_clones.size >= 5
end
def css_class_for_extended_clone_url_field(repository)
if (logged_in? && !current_user.can_write_to?(repository)) || !logged_in?
return "extended"
end
end
end
Remove ref from hash_path
# encoding: utf-8
module RepositoriesHelper
def log_path(objectish = "master", options = {})
objectish = ensplat_path(objectish)
if options.blank? # just to avoid the ? being tacked onto the url
repo_owner_path(@repository, :project_repository_commits_in_ref_path, @project, @repository, objectish)
else
repo_owner_path(@repository, :project_repository_commits_in_ref_path, @project, @repository, objectish, options)
end
end
def commit_path(objectish = "master")
repo_owner_path(@repository, :repository_commit_path, @repository, objectish)
end
def hash_path(ref, path, type)
"#/#{type}/#{path}"
end
def tree_path(treeish = "master", path = [], *args)
if path.respond_to?(:to_str)
path = path.split("/")
end
repo_owner_path(@repository, :repository_tree_path, @repository, branch_with_tree(treeish, path), *args)
end
def submission_path(treeish = "master", path = [])
if path.respond_to?(:to_str)
path = path.split("/")
end
repo_owner_path(@repository, :lab_submission_tree_path, @lab, @submission, @repository, branch_with_tree(treeish, path))
end
def repository_path(action, sha1=nil)
repo_owner_path(@repository, :project_repository_path, @project, @repository)+"/"+action+"/"+sha1.to_s
end
def blob_path(shaish, path, *args)
repo_owner_path(@repository, :repository_blob_path, @repository, branch_with_tree(shaish, path), *args)
end
def raw_blob_path(shaish, path)
repo_owner_path(@repository, :project_repository_raw_blob_path, @project, @repository, branch_with_tree(shaish, path))
end
def blob_history_path(shaish, path)
repo_owner_path(@repository, :project_repository_blob_history_path, @project, @repository, branch_with_tree(shaish, path))
end
def namespaced_branch?(branchname)
branchname.include?("/")
end
def edit_or_show_group_text
if @repository.admin?(current_user)
t("views.repos.edit_group")
else
t("views.repos.show_group")
end
end
def render_branch_list_items(branches)
sorted_git_heads(branches).map do |branch|
content_tag(:li,
link_to(h(branch.name), log_path(branch.name), :title => branch_link_title_text(branch)),
:class => "branch #{highlight_if_head(branch)}")
end.join("\n ")
end
def highlight_if_head(branch)
if branch.head?
"head"
end
end
def branch_link_title_text(branch)
"branch " + h(branch.name) + (branch.head? ? " (HEAD)" : "")
end
# Sorts the +heads+ alphanumerically with the HEAD first
def sorted_git_heads(heads)
heads.select{|h| !h.head? }.sort{|a,b|
a.name <=> b.name
}.unshift(heads.find{|h| h.head? }).compact
end
# Renders a set of list items, cut off at around +max_line_length+
def render_chunked_branch_list_items(repository, max_line_length = 80)
heads = sorted_git_heads(repository.git.heads)
cumulative_line_length = 0
heads_to_display = heads.select do |h|
cumulative_line_length += (h.name.length + 2)
cumulative_line_length < max_line_length
end
list_items = heads_to_display.map do |head|
li = %Q{<li class="#{highlight_if_head(head)}">}
li << link_to(h(head.name), repo_owner_path(repository, :project_repository_commits_in_ref_path,
repository.project, repository, ensplat_path(head.name)),
:title => branch_link_title_text(head))
li << "</li>"
li
end
if heads_to_display.size < repository.git.heads.size
rest_size = repository.git.heads.size - heads_to_display.size
list_items << %{<li class="rest-of-branches">
<small>and #{rest_size} more…</small>
</li>}
end
list_items.join("\n")
end
def show_clone_list_search?(group_clones, user_clones)
user_clones.size >= 5 || group_clones.size >= 5
end
def css_class_for_extended_clone_url_field(repository)
if (logged_in? && !current_user.can_write_to?(repository)) || !logged_in?
return "extended"
end
end
end
|
helpers do
def make_coinbase_payemt(params)
account = coinbase_client.primary_account
if account
account.send(to: params[:to], amount: params[:amount], currency: params[:currency], two_factor_token: params[:auth], description: params[:description])
else
halt! 'current user coinbase account not found'
end
end
def venmo_transfer_to_initializing_user
uri = URI("https://api.venmo.com/v1/payments?access_token=#{session['venmo_token']['access_token']}&user_id=#{receiver.email}¬e=#{params[:description].delete(' ')}&amount=#{params[:amount].to_f/100}")
return Transaction.make_venmo_payment(uri)
end
def venmo_payment_from_currentuser_to_receipant(receiver)
uri = URI("https://api.venmo.com/v1/payments?access_token=#{session['venmo_token']['access_token']}&user_id=#{receiver.email}¬e=#{params[:description].delete(' ')}&amount=#{params[:amount].to_f/100}")
return Transaction.make_venmo_payment(uri)
end
end
merging coinbase error rescue
Merge branch 'coinbase_api_setup' of https://github.com/cusackalex/Bitmo into coinbase_api_setup
helpers do
def make_coinbase_payemt(params)
account = coinbase_client.primary_account
if account
account.send(
to: params[:to],
amount: params[:amount],
currency: params[:currency],
two_factor_token: params[:auth],
description: params[:description],
)
else
halt! 'current user coinbase account not found'
end
rescue Coinbase::Wallet::InternalServerError
# ignore these errors because it works
end
def venmo_transfer_to_initializing_user
uri = URI("https://api.venmo.com/v1/payments?access_token=#{session['venmo_token']['access_token']}&user_id=#{receiver.email}¬e=#{params[:description].delete(' ')}&amount=#{params[:amount].to_f/100}")
return Transaction.make_venmo_payment(uri)
end
def venmo_payment_from_currentuser_to_receipant(receiver)
uri = URI("https://api.venmo.com/v1/payments?access_token=#{session['venmo_token']['access_token']}&user_id=#{receiver.email}¬e=#{params[:description].delete(' ')}&amount=#{params[:amount].to_f/100}")
return Transaction.make_venmo_payment(uri)
end
end
|
added NCBI BLAST 2.2.24+
Closes #4033.
Signed-off-by: Mike McQuaid <a17fed27eaa842282862ff7c1b9c8395a26ac320@mikemcquaid.com>
require 'formula'
class Blast <Formula
url 'ftp://ftp.ncbi.nih.gov/blast/executables/blast+/2.2.24/ncbi-blast-2.2.24+-src.tar.gz'
homepage 'http://blast.ncbi.nlm.nih.gov/Blast.cgi'
md5 '8877bf01a7370ffa01b5978b8460a4d9'
version '2.2.24'
def install
Dir.chdir 'c++' do
system "./configure --prefix=#{prefix}"
system "make"
system "make install"
end
end
end
|
class Article < ActiveRecord::Base
validates_presence_of :name
validates_presence_of :price
validates_numericality_of :price, greater_than_or_equal_to: 0
has_many :tags
has_many :article_structures
has_many :shopping_cart_entries
has_many :article_structure_to, :foreign_key => 'upper_part_id', :class_name => 'ArticleStructure'
has_many :article_structure_from, :foreign_key => 'lower_part_id', :class_name => 'ArticleStructure'
end
Removed unnessary relation
class Article < ActiveRecord::Base
validates_presence_of :name
validates_presence_of :price
validates_numericality_of :price, greater_than_or_equal_to: 0
has_many :tags
has_many :shopping_cart_entries
has_many :article_structure_to, :foreign_key => 'upper_part_id', :class_name => 'ArticleStructure'
has_many :article_structure_from, :foreign_key => 'lower_part_id', :class_name => 'ArticleStructure'
end
|
initialize board with pieces, move pieces, check if chosen piece can be moved by player, print out board nicely
require_relative 'pieces'
class ChessBoard
attr_reader :board
def initialize(board = nil)
@board = board || {
'a8'=> Rook.new('black'), 'b8'=> Knight.new('black'), 'c8'=> Bishop.new('black'), 'd8'=> King.new('black'), 'e8'=> Queen.new('black'), 'f8'=> Bishop.new('black'), 'g8'=> Knight.new('black'), 'h8'=> Rook.new('black'),
'a7'=> Pawn.new('black'), 'b7'=> Pawn.new('black'), 'c7'=> Pawn.new('black'), 'd7'=> Pawn.new('black'), 'e7'=> Pawn.new('black'), 'f7'=> Pawn.new('black'), 'g7'=> Pawn.new('black'), 'h7'=> Pawn.new('black'),
'a6'=> " ", 'b6'=> " ", 'c6'=> " ", 'd6'=> " ", 'e6'=> " ", 'f6'=> " ", 'g6'=> " ", 'h6'=> " ",
'a5'=> " ", 'b5'=> " ", 'c5'=> " ", 'd5'=> " ", 'e5'=> " ", 'f5'=> " ", 'g5'=> " ", 'h5'=> " ",
'a4'=> " ", 'b4'=> " ", 'c4'=> " ", 'd4'=> " ", 'e4'=> " ", 'f4'=> " ", 'g4'=> " ", 'h4'=> " ",
'a3'=> " ", 'b3'=> " ", 'c3'=> " ", 'd3'=> " ", 'e3'=> " ", 'f3'=> " ", 'g3'=> " ", 'h3'=> " ",
'a2'=> Pawn.new('white'), 'b2'=> Pawn.new('white'), 'c2'=> Pawn.new('white'), 'd2'=> Pawn.new('white'), 'e2'=> Pawn.new('white'), 'f2'=> Pawn.new('white'), 'g2'=> Pawn.new('white'), 'h2'=> Pawn.new('white'),
'a1'=> Rook.new('white'), 'b1'=> Knight.new('white'), 'c1'=> Bishop.new('white'), 'd1'=> King.new('white'), 'e1'=> Queen.new('white'), 'f1'=> Bishop.new('white'), 'g1'=> Knight.new('white'), 'h1'=> Rook.new('white')
}
end
def move_piece(source, destination, player_color)
#checks before moving
if valid_piece?(source, player_color)
@board[source] = @board[destination]
@board[source] = " "
end
end
def valid_piece?(piece_location, player_color)
return @board[piece_location].color == player_color if @board[piece_location] != " "
end
def to_s
@board.values.each_slice(8).each_with_index { |row, index| puts row.unshift(8 - index).join(" ") }
puts " a b c d e f g h"
end
end
|
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "dm-aggregates"
s.version = "1.2.0.rc1"
s.required_rubygems_version = Gem::Requirement.new("> 1.3.1") if s.respond_to? :required_rubygems_version=
s.authors = ["Foy Savas"]
s.date = "2011-09-05"
s.description = "DataMapper plugin providing support for aggregates on collections"
s.email = "foysavas [a] gmail [d] com"
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
"Gemfile",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"dm-aggregates.gemspec",
"lib/dm-aggregates.rb",
"lib/dm-aggregates/adapters/dm-do-adapter.rb",
"lib/dm-aggregates/aggregate_functions.rb",
"lib/dm-aggregates/collection.rb",
"lib/dm-aggregates/core_ext/symbol.rb",
"lib/dm-aggregates/functions.rb",
"lib/dm-aggregates/model.rb",
"lib/dm-aggregates/operators.rb",
"lib/dm-aggregates/query.rb",
"lib/dm-aggregates/repository.rb",
"spec/isolated/require_after_setup_spec.rb",
"spec/isolated/require_before_setup_spec.rb",
"spec/isolated/require_spec.rb",
"spec/public/collection_spec.rb",
"spec/public/model_spec.rb",
"spec/public/shared/aggregate_shared_spec.rb",
"spec/rcov.opts",
"spec/spec.opts",
"spec/spec_helper.rb",
"tasks/spec.rake",
"tasks/yard.rake",
"tasks/yardstick.rake"
]
s.homepage = "http://github.com/datamapper/dm-aggregates"
s.require_paths = ["lib"]
s.rubyforge_project = "datamapper"
s.rubygems_version = "1.8.10"
s.summary = "DataMapper plugin providing support for aggregates on collections"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<dm-core>, ["~> 1.2.0.rc1"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<rake>, ["~> 0.9.2"])
s.add_development_dependency(%q<rspec>, ["~> 1.3.2"])
else
s.add_dependency(%q<dm-core>, ["~> 1.2.0.rc1"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rake>, ["~> 0.9.2"])
s.add_dependency(%q<rspec>, ["~> 1.3.2"])
end
else
s.add_dependency(%q<dm-core>, ["~> 1.2.0.rc1"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rake>, ["~> 0.9.2"])
s.add_dependency(%q<rspec>, ["~> 1.3.2"])
end
end
Regenerate gemspec for version 1.2.0.rc1
# Generated by jeweler
# DO NOT EDIT THIS FILE DIRECTLY
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
# -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = "dm-aggregates"
s.version = "1.2.0.rc1"
s.required_rubygems_version = Gem::Requirement.new("> 1.3.1") if s.respond_to? :required_rubygems_version=
s.authors = ["Foy Savas"]
s.date = "2011-09-09"
s.description = "DataMapper plugin providing support for aggregates on collections"
s.email = "foysavas [a] gmail [d] com"
s.extra_rdoc_files = [
"LICENSE",
"README.rdoc"
]
s.files = [
"Gemfile",
"LICENSE",
"README.rdoc",
"Rakefile",
"VERSION",
"dm-aggregates.gemspec",
"lib/dm-aggregates.rb",
"lib/dm-aggregates/adapters/dm-do-adapter.rb",
"lib/dm-aggregates/aggregate_functions.rb",
"lib/dm-aggregates/collection.rb",
"lib/dm-aggregates/core_ext/symbol.rb",
"lib/dm-aggregates/functions.rb",
"lib/dm-aggregates/model.rb",
"lib/dm-aggregates/operators.rb",
"lib/dm-aggregates/query.rb",
"lib/dm-aggregates/repository.rb",
"spec/isolated/require_after_setup_spec.rb",
"spec/isolated/require_before_setup_spec.rb",
"spec/isolated/require_spec.rb",
"spec/public/collection_spec.rb",
"spec/public/model_spec.rb",
"spec/public/shared/aggregate_shared_spec.rb",
"spec/rcov.opts",
"spec/spec.opts",
"spec/spec_helper.rb",
"tasks/spec.rake",
"tasks/yard.rake",
"tasks/yardstick.rake"
]
s.homepage = "http://github.com/datamapper/dm-aggregates"
s.require_paths = ["lib"]
s.rubyforge_project = "datamapper"
s.rubygems_version = "1.8.10"
s.summary = "DataMapper plugin providing support for aggregates on collections"
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<dm-core>, ["~> 1.2.0.rc1"])
s.add_development_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_development_dependency(%q<rake>, ["~> 0.9.2"])
s.add_development_dependency(%q<rspec>, ["~> 1.3.2"])
else
s.add_dependency(%q<dm-core>, ["~> 1.2.0.rc1"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rake>, ["~> 0.9.2"])
s.add_dependency(%q<rspec>, ["~> 1.3.2"])
end
else
s.add_dependency(%q<dm-core>, ["~> 1.2.0.rc1"])
s.add_dependency(%q<jeweler>, ["~> 1.6.4"])
s.add_dependency(%q<rake>, ["~> 0.9.2"])
s.add_dependency(%q<rspec>, ["~> 1.3.2"])
end
end
|
class Portal::ClazzesController < ApplicationController
# TODO: There need to be a lot more
# controller filters here...
# this only protects management actions:
include RestrictedPortalController
# PUNDIT_CHECK_FILTERS
before_filter :teacher_admin_or_config, :only => [:class_list, :edit]
before_filter :student_teacher_admin_or_config, :only => [:show]
#
# Check that the current teacher owns the class they are
# accessing.
#
include RestrictedTeacherController
before_filter :check_teacher_owns_clazz, :only => [ :roster,
:materials,
:fullstatus ]
def current_clazz
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHOOSE_AUTHORIZE
# no authorization needed ...
# authorize Portal::Clazz
# authorize @clazz
# authorize Portal::Clazz, :new_or_create?
# authorize @clazz, :update_edit_or_destroy?
Portal::Clazz.find(params[:id])
end
public
# GET /portal_clazzes
# GET /portal_clazzes.xml
def index
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHECK_AUTHORIZE
# authorize Portal::Clazz
# PUNDIT_REVIEW_SCOPE
# PUNDIT_CHECK_SCOPE (did not find instance)
# @clazzes = policy_scope(Portal::Clazz)
@portal_clazzes = Portal::Clazz.all
respond_to do |format|
format.html # index.html.erb
format.xml { render :xml => @portal_clazzes }
end
end
# GET /portal_clazzes/1
# GET /portal_clazzes/1.xml
def show
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHECK_AUTHORIZE (did not find instance)
# authorize @clazz
@portal_clazz = Portal::Clazz.find(params[:id], :include => [:teachers, { :offerings => [:learners, :open_responses, :multiple_choices] }])
@portal_clazz.refresh_saveable_response_objects
@teacher = @portal_clazz.parent
if current_settings.allow_default_class
@offerings = @portal_clazz.offerings_with_default_classes(current_visitor)
else
@offerings = @portal_clazz.offerings
end
# Save the left pane sub-menu item
Portal::Teacher.save_left_pane_submenu_item(current_visitor, Portal::Teacher.LEFT_PANE_ITEM['NONE'])
if current_user.portal_teacher
redirect_to(action: 'materials') and return
end
respond_to do |format|
format.html # show.html.erb
format.xml {render :xml => @portal_clazz}
end
end
# GET /portal_clazzes/new
# GET /portal_clazzes/new.xml
def new
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHECK_AUTHORIZE
# authorize Portal::Clazz
@portal_clazz = Portal::Clazz.new
if params[:teacher_id]
@portal_clazz.teacher = Portal::Teacher.find(params[:teacher_id])
elsif current_visitor.portal_teacher
@portal_clazz.teacher = current_visitor.portal_teacher
@portal_clazz.teacher_id = current_visitor.portal_teacher.id
end
respond_to do |format|
format.html # new.html.erb
format.xml { render :xml => @portal_clazz }
end
end
# GET /portal_clazzes/1/edit
def edit
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHECK_AUTHORIZE (did not find instance)
# authorize @clazz
@portal_clazz = Portal::Clazz.find(params[:id])
# Save the left pane sub-menu item
Portal::Teacher.save_left_pane_submenu_item(current_visitor, Portal::Teacher.LEFT_PANE_ITEM['CLASS_SETUP'])
end
# POST /portal_clazzes
# POST /portal_clazzes.xml
def create
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHECK_AUTHORIZE
# authorize Portal::Clazz
@object_params = params[:portal_clazz]
school_id = @object_params.delete(:school)
grade_levels = @object_params.delete(:grade_levels)
@portal_clazz = Portal::Clazz.new(@object_params)
okToCreate = true
if !school_id
# This should never happen, since the schools dropdown should consist of the default site school if the current user has no schools
flash['error'] = "You need to belong to a school in order to create classes. Please join a school and try again."
okToCreate = false
end
if current_visitor.anonymous?
flash['error'] = "Anonymous can't create classes. Please log in and try again."
okToCreate = false
end
if okToCreate and Admin::Settings.default_settings.enable_grade_levels?
grade_levels.each do |name, v|
grade = Portal::Grade.where(name: name).first_or_create
@portal_clazz.grades << grade if grade
end if grade_levels
if @portal_clazz.grades.empty?
flash['error'] = "You need to select at least one grade level for this class."
okToCreate = false
end
end
if okToCreate && !@portal_clazz.teacher
if current_visitor.portal_teacher
@portal_clazz.teacher_id = current_visitor.portal_teacher.id
@portal_clazz.teacher = current_visitor.portal_teacher
else
teacher = Portal::Teacher.create(:user => current_visitor) # Former call set :user_id directly; class validations didn't like that
if teacher && teacher.id # Former call used .id directly on create method, leaving room for NilClass error
@portal_clazz.teacher_id = teacher.id # Former call tried to do another Portal::Teacher.create. We don't want to double-create this teacher
@portal_clazz.teacher = teacher
@portal_clazz.teacher.schools << Portal::School.find_by_name(APP_CONFIG[:site_school])
else
flash['error'] = "There was an error trying to associate you with this class. Please try again."
okToCreate = false
end
end
end
if okToCreate
# We can't use Course.find_or_create_by_course_number_name_and_school_id here, because we don't know what course_number we're looking for
course = Portal::Course.find_by_name_and_school_id(@portal_clazz.name, school_id)
course = Portal::Course.create({
:name => @portal_clazz.name,
:course_number => nil,
:school_id => school_id
}) if course.nil?
if course
# This will finally tie this clazz to a course and a school
@portal_clazz.course = course
else
flash['error'] = "There was an error trying to create your new class. Please try again."
okToCreate = false
end
end
respond_to do |format|
if okToCreate && @portal_clazz.save
# send email notifications about class creation
Portal::ClazzMailer.clazz_creation_notification(@current_user, @portal_clazz).deliver
flash['notice'] = 'Class was successfully created.'
format.html { redirect_to(url_for([:materials, @portal_clazz])) }
format.xml { render :xml => @portal_clazz, :status => :created, :location => @portal_clazz }
else
format.html { render :action => "new" }
format.xml { render :xml => @portal_clazz.errors, :status => :unprocessable_entity }
end
end
end
# PUT /portal_clazzes/1
# PUT /portal_clazzes/1.xml
def update
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHECK_AUTHORIZE (did not find instance)
# authorize @clazz
@portal_clazz = Portal::Clazz.find(params[:id])
object_params = params[:portal_clazz]
grade_levels = object_params.delete(:grade_levels)
if grade_levels
# This logic will attempt to prevent someone from removing all grade levels from a class.
grades_to_add = []
grade_levels.each do |name, v|
grade = Portal::Grade.find_by_name(name)
grades_to_add << grade if grade
end
object_params[:grades] = grades_to_add if !grades_to_add.empty?
end
new_teacher_ids = (object_params.delete(:current_teachers) || '').split(',').map {|id| id.to_i}
update_teachers = -> {
current_teacher_ids = @portal_clazz.teachers.map {|t| t.id}
new_teacher_ids.each do |new_teacher_id|
if !current_teacher_ids.include?(new_teacher_id)
teacher = Portal::Teacher.find_by_id(new_teacher_id)
teacher.add_clazz(@portal_clazz) if teacher
end
end
current_teacher_ids.each do |current_teacher_id|
if !new_teacher_ids.include?(current_teacher_id)
teacher = Portal::Teacher.find_by_id(current_teacher_id)
teacher.remove_clazz(@portal_clazz) if teacher
end
end
@portal_clazz.reload
}
if request.xhr?
if @portal_clazz.update_attributes(object_params)
update_teachers.call
end
render :partial => 'show', :locals => { :portal_clazz => @portal_clazz }
else
respond_to do |format|
okToUpdate = true
if Admin::Settings.default_settings.enable_grade_levels?
if !grade_levels
flash['error'] = "You need to select at least one grade level for this class."
okToUpdate = false
end
end
if okToUpdate && @portal_clazz.update_attributes(object_params)
update_teachers.call
flash['notice'] = 'Class was successfully updated.'
format.html { redirect_to(url_for([:materials, @portal_clazz])) }
format.xml { head :ok }
else
format.html { render :action => "edit" }
format.xml { render :xml => @portal_clazz.errors, :status => :unprocessable_entity }
end
end
end
end
# DELETE /portal_clazzes/1
# DELETE /portal_clazzes/1.xml
def destroy
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHECK_AUTHORIZE (did not find instance)
# authorize @clazz
@portal_clazz = Portal::Clazz.find(params[:id])
@portal_clazz.destroy
respond_to do |format|
format.html { redirect_to(portal_clazzes_url) }
format.js
format.xml { head :ok }
end
end
## END OF CRUD METHODS
def edit_offerings
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHOOSE_AUTHORIZE
# no authorization needed ...
# authorize Portal::Clazz
# authorize @clazz
# authorize Portal::Clazz, :new_or_create?
# authorize @clazz, :update_edit_or_destroy?
@portal_clazz = Portal::Clazz.find(params[:id])
end
# GET /portal_clazzes/1/class_list
def class_list
@portal_clazz = Portal::Clazz.find_by_id(params[:id])
respond_to do |format|
format.html { render :layout => 'report'}
end
end
# GET /portal_clazzes/1/roster
def roster
@portal_clazz = Portal::Clazz.find(params[:id])
# Save the left pane sub-menu item
Portal::Teacher.save_left_pane_submenu_item(current_visitor, Portal::Teacher.LEFT_PANE_ITEM['STUDENT_ROSTER'])
end
def manage_classes
if current_user.nil? || !current_visitor.portal_teacher
raise Pundit::NotAuthorizedError
end
@teacher = current_visitor.portal_teacher;
end
def materials
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHOOSE_AUTHORIZE
# no authorization needed ...
# authorize Portal::Clazz
# authorize @clazz
# authorize Portal::Clazz, :new_or_create?
# authorize @clazz, :update_edit_or_destroy?
@portal_clazz = Portal::Clazz.includes(:offerings => :learners, :students => :user).find(params[:id])
# Save the left pane sub-menu item
Portal::Teacher.save_left_pane_submenu_item(current_visitor, Portal::Teacher.LEFT_PANE_ITEM['MATERIALS'])
end
def sort_offerings
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHOOSE_AUTHORIZE
# no authorization needed ...
# authorize Portal::Clazz
# authorize @clazz
# authorize Portal::Clazz, :new_or_create?
# authorize @clazz, :update_edit_or_destroy?
if current_visitor.portal_teacher
params[:clazz_offerings].each_with_index{|id,idx| Portal::Offering.update(id, :position => (idx + 1))}
end
render :nothing => true
end
def fullstatus
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHOOSE_AUTHORIZE
# no authorization needed ...
# authorize Portal::Clazz
# authorize @clazz
# authorize Portal::Clazz, :new_or_create?
# authorize @clazz, :update_edit_or_destroy?
@portal_clazz = Portal::Clazz.find(params[:id]);
@portal_clazz = Portal::Clazz.find_by_id(params[:id])
# Save the left pane sub-menu item
Portal::Teacher.save_left_pane_submenu_item(current_visitor, Portal::Teacher.LEFT_PANE_ITEM['FULL_STATUS'])
end
# this is used by the iSENSE interactive and app inorder to get information
# about the class given the class_word. It does not require authorization
# because the user needs to know the classword.
# Most of this information is already available just by signing up as a student
# and entering in the class word.
def info
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHOOSE_AUTHORIZE
# no authorization needed ...
# authorize Portal::Clazz
# authorize @clazz
# authorize Portal::Clazz, :new_or_create?
# authorize @clazz, :update_edit_or_destroy?
# look up the class with the class word and return a json object
clazz = Portal::Clazz.find_by_class_word(params[:class_word])
if clazz
state = nil
if school = clazz.school
state = school.state
end
render :json => {
:uri => url_for(clazz),
:name => clazz.name,
:state => state,
:teachers => clazz.teachers.map{|teacher|
{
:id => url_for(teacher.user),
:first_name => teacher.user.first_name,
:last_name => teacher.user.last_name
}
}
}
else
render :json => {:error => "No class found"}, :status => :not_found
end
end
def external_report
portal_clazz = Portal::Clazz.find(params[:id])
report = ExternalReport.find(params[:report_id])
next_url = report.url_for_class(portal_clazz.id, current_visitor, request.protocol, request.host_with_port)
redirect_to next_url
end
end
Fixed find by id with includes in classes_controller
class Portal::ClazzesController < ApplicationController
# TODO: There need to be a lot more
# controller filters here...
# this only protects management actions:
include RestrictedPortalController
# PUNDIT_CHECK_FILTERS
before_filter :teacher_admin_or_config, :only => [:class_list, :edit]
before_filter :student_teacher_admin_or_config, :only => [:show]
#
# Check that the current teacher owns the class they are
# accessing.
#
include RestrictedTeacherController
before_filter :check_teacher_owns_clazz, :only => [ :roster,
:materials,
:fullstatus ]
def current_clazz
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHOOSE_AUTHORIZE
# no authorization needed ...
# authorize Portal::Clazz
# authorize @clazz
# authorize Portal::Clazz, :new_or_create?
# authorize @clazz, :update_edit_or_destroy?
Portal::Clazz.find(params[:id])
end
public
# GET /portal_clazzes
# GET /portal_clazzes.xml
def index
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHECK_AUTHORIZE
# authorize Portal::Clazz
# PUNDIT_REVIEW_SCOPE
# PUNDIT_CHECK_SCOPE (did not find instance)
# @clazzes = policy_scope(Portal::Clazz)
@portal_clazzes = Portal::Clazz.all
respond_to do |format|
format.html # index.html.erb
format.xml { render :xml => @portal_clazzes }
end
end
# GET /portal_clazzes/1
# GET /portal_clazzes/1.xml
def show
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHECK_AUTHORIZE (did not find instance)
# authorize @clazz
@portal_clazz = Portal::Clazz.where(id: params[:id]).includes([:teachers, { :offerings => [:learners, :open_responses, :multiple_choices] }]).first
@portal_clazz.refresh_saveable_response_objects
@teacher = @portal_clazz.parent
if current_settings.allow_default_class
@offerings = @portal_clazz.offerings_with_default_classes(current_visitor)
else
@offerings = @portal_clazz.offerings
end
# Save the left pane sub-menu item
Portal::Teacher.save_left_pane_submenu_item(current_visitor, Portal::Teacher.LEFT_PANE_ITEM['NONE'])
if current_user.portal_teacher
redirect_to(action: 'materials') and return
end
respond_to do |format|
format.html # show.html.erb
format.xml {render :xml => @portal_clazz}
end
end
# GET /portal_clazzes/new
# GET /portal_clazzes/new.xml
def new
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHECK_AUTHORIZE
# authorize Portal::Clazz
@portal_clazz = Portal::Clazz.new
if params[:teacher_id]
@portal_clazz.teacher = Portal::Teacher.find(params[:teacher_id])
elsif current_visitor.portal_teacher
@portal_clazz.teacher = current_visitor.portal_teacher
@portal_clazz.teacher_id = current_visitor.portal_teacher.id
end
respond_to do |format|
format.html # new.html.erb
format.xml { render :xml => @portal_clazz }
end
end
# GET /portal_clazzes/1/edit
def edit
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHECK_AUTHORIZE (did not find instance)
# authorize @clazz
@portal_clazz = Portal::Clazz.find(params[:id])
# Save the left pane sub-menu item
Portal::Teacher.save_left_pane_submenu_item(current_visitor, Portal::Teacher.LEFT_PANE_ITEM['CLASS_SETUP'])
end
# POST /portal_clazzes
# POST /portal_clazzes.xml
def create
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHECK_AUTHORIZE
# authorize Portal::Clazz
@object_params = params[:portal_clazz]
school_id = @object_params.delete(:school)
grade_levels = @object_params.delete(:grade_levels)
@portal_clazz = Portal::Clazz.new(@object_params)
okToCreate = true
if !school_id
# This should never happen, since the schools dropdown should consist of the default site school if the current user has no schools
flash['error'] = "You need to belong to a school in order to create classes. Please join a school and try again."
okToCreate = false
end
if current_visitor.anonymous?
flash['error'] = "Anonymous can't create classes. Please log in and try again."
okToCreate = false
end
if okToCreate and Admin::Settings.default_settings.enable_grade_levels?
grade_levels.each do |name, v|
grade = Portal::Grade.where(name: name).first_or_create
@portal_clazz.grades << grade if grade
end if grade_levels
if @portal_clazz.grades.empty?
flash['error'] = "You need to select at least one grade level for this class."
okToCreate = false
end
end
if okToCreate && !@portal_clazz.teacher
if current_visitor.portal_teacher
@portal_clazz.teacher_id = current_visitor.portal_teacher.id
@portal_clazz.teacher = current_visitor.portal_teacher
else
teacher = Portal::Teacher.create(:user => current_visitor) # Former call set :user_id directly; class validations didn't like that
if teacher && teacher.id # Former call used .id directly on create method, leaving room for NilClass error
@portal_clazz.teacher_id = teacher.id # Former call tried to do another Portal::Teacher.create. We don't want to double-create this teacher
@portal_clazz.teacher = teacher
@portal_clazz.teacher.schools << Portal::School.find_by_name(APP_CONFIG[:site_school])
else
flash['error'] = "There was an error trying to associate you with this class. Please try again."
okToCreate = false
end
end
end
if okToCreate
# We can't use Course.find_or_create_by_course_number_name_and_school_id here, because we don't know what course_number we're looking for
course = Portal::Course.find_by_name_and_school_id(@portal_clazz.name, school_id)
course = Portal::Course.create({
:name => @portal_clazz.name,
:course_number => nil,
:school_id => school_id
}) if course.nil?
if course
# This will finally tie this clazz to a course and a school
@portal_clazz.course = course
else
flash['error'] = "There was an error trying to create your new class. Please try again."
okToCreate = false
end
end
respond_to do |format|
if okToCreate && @portal_clazz.save
# send email notifications about class creation
Portal::ClazzMailer.clazz_creation_notification(@current_user, @portal_clazz).deliver
flash['notice'] = 'Class was successfully created.'
format.html { redirect_to(url_for([:materials, @portal_clazz])) }
format.xml { render :xml => @portal_clazz, :status => :created, :location => @portal_clazz }
else
format.html { render :action => "new" }
format.xml { render :xml => @portal_clazz.errors, :status => :unprocessable_entity }
end
end
end
# PUT /portal_clazzes/1
# PUT /portal_clazzes/1.xml
def update
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHECK_AUTHORIZE (did not find instance)
# authorize @clazz
@portal_clazz = Portal::Clazz.find(params[:id])
object_params = params[:portal_clazz]
grade_levels = object_params.delete(:grade_levels)
if grade_levels
# This logic will attempt to prevent someone from removing all grade levels from a class.
grades_to_add = []
grade_levels.each do |name, v|
grade = Portal::Grade.find_by_name(name)
grades_to_add << grade if grade
end
object_params[:grades] = grades_to_add if !grades_to_add.empty?
end
new_teacher_ids = (object_params.delete(:current_teachers) || '').split(',').map {|id| id.to_i}
update_teachers = -> {
current_teacher_ids = @portal_clazz.teachers.map {|t| t.id}
new_teacher_ids.each do |new_teacher_id|
if !current_teacher_ids.include?(new_teacher_id)
teacher = Portal::Teacher.find_by_id(new_teacher_id)
teacher.add_clazz(@portal_clazz) if teacher
end
end
current_teacher_ids.each do |current_teacher_id|
if !new_teacher_ids.include?(current_teacher_id)
teacher = Portal::Teacher.find_by_id(current_teacher_id)
teacher.remove_clazz(@portal_clazz) if teacher
end
end
@portal_clazz.reload
}
if request.xhr?
if @portal_clazz.update_attributes(object_params)
update_teachers.call
end
render :partial => 'show', :locals => { :portal_clazz => @portal_clazz }
else
respond_to do |format|
okToUpdate = true
if Admin::Settings.default_settings.enable_grade_levels?
if !grade_levels
flash['error'] = "You need to select at least one grade level for this class."
okToUpdate = false
end
end
if okToUpdate && @portal_clazz.update_attributes(object_params)
update_teachers.call
flash['notice'] = 'Class was successfully updated.'
format.html { redirect_to(url_for([:materials, @portal_clazz])) }
format.xml { head :ok }
else
format.html { render :action => "edit" }
format.xml { render :xml => @portal_clazz.errors, :status => :unprocessable_entity }
end
end
end
end
# DELETE /portal_clazzes/1
# DELETE /portal_clazzes/1.xml
def destroy
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHECK_AUTHORIZE (did not find instance)
# authorize @clazz
@portal_clazz = Portal::Clazz.find(params[:id])
@portal_clazz.destroy
respond_to do |format|
format.html { redirect_to(portal_clazzes_url) }
format.js
format.xml { head :ok }
end
end
## END OF CRUD METHODS
def edit_offerings
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHOOSE_AUTHORIZE
# no authorization needed ...
# authorize Portal::Clazz
# authorize @clazz
# authorize Portal::Clazz, :new_or_create?
# authorize @clazz, :update_edit_or_destroy?
@portal_clazz = Portal::Clazz.find(params[:id])
end
# GET /portal_clazzes/1/class_list
def class_list
@portal_clazz = Portal::Clazz.find_by_id(params[:id])
respond_to do |format|
format.html { render :layout => 'report'}
end
end
# GET /portal_clazzes/1/roster
def roster
@portal_clazz = Portal::Clazz.find(params[:id])
# Save the left pane sub-menu item
Portal::Teacher.save_left_pane_submenu_item(current_visitor, Portal::Teacher.LEFT_PANE_ITEM['STUDENT_ROSTER'])
end
def manage_classes
if current_user.nil? || !current_visitor.portal_teacher
raise Pundit::NotAuthorizedError
end
@teacher = current_visitor.portal_teacher;
end
def materials
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHOOSE_AUTHORIZE
# no authorization needed ...
# authorize Portal::Clazz
# authorize @clazz
# authorize Portal::Clazz, :new_or_create?
# authorize @clazz, :update_edit_or_destroy?
@portal_clazz = Portal::Clazz.includes(:offerings => :learners, :students => :user).find(params[:id])
# Save the left pane sub-menu item
Portal::Teacher.save_left_pane_submenu_item(current_visitor, Portal::Teacher.LEFT_PANE_ITEM['MATERIALS'])
end
def sort_offerings
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHOOSE_AUTHORIZE
# no authorization needed ...
# authorize Portal::Clazz
# authorize @clazz
# authorize Portal::Clazz, :new_or_create?
# authorize @clazz, :update_edit_or_destroy?
if current_visitor.portal_teacher
params[:clazz_offerings].each_with_index{|id,idx| Portal::Offering.update(id, :position => (idx + 1))}
end
render :nothing => true
end
def fullstatus
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHOOSE_AUTHORIZE
# no authorization needed ...
# authorize Portal::Clazz
# authorize @clazz
# authorize Portal::Clazz, :new_or_create?
# authorize @clazz, :update_edit_or_destroy?
@portal_clazz = Portal::Clazz.find(params[:id]);
@portal_clazz = Portal::Clazz.find_by_id(params[:id])
# Save the left pane sub-menu item
Portal::Teacher.save_left_pane_submenu_item(current_visitor, Portal::Teacher.LEFT_PANE_ITEM['FULL_STATUS'])
end
# this is used by the iSENSE interactive and app inorder to get information
# about the class given the class_word. It does not require authorization
# because the user needs to know the classword.
# Most of this information is already available just by signing up as a student
# and entering in the class word.
def info
# PUNDIT_REVIEW_AUTHORIZE
# PUNDIT_CHOOSE_AUTHORIZE
# no authorization needed ...
# authorize Portal::Clazz
# authorize @clazz
# authorize Portal::Clazz, :new_or_create?
# authorize @clazz, :update_edit_or_destroy?
# look up the class with the class word and return a json object
clazz = Portal::Clazz.find_by_class_word(params[:class_word])
if clazz
state = nil
if school = clazz.school
state = school.state
end
render :json => {
:uri => url_for(clazz),
:name => clazz.name,
:state => state,
:teachers => clazz.teachers.map{|teacher|
{
:id => url_for(teacher.user),
:first_name => teacher.user.first_name,
:last_name => teacher.user.last_name
}
}
}
else
render :json => {:error => "No class found"}, :status => :not_found
end
end
def external_report
portal_clazz = Portal::Clazz.find(params[:id])
report = ExternalReport.find(params[:report_id])
next_url = report.url_for_class(portal_clazz.id, current_visitor, request.protocol, request.host_with_port)
redirect_to next_url
end
end |
require 'benchmark'
require 'fast_output_buffer'
require 'action_view'
require 'action_view/buffers'
class RenderContext
include ActionView::Context
OriginalBuffer = ActionView::OutputBuffer
def initialize
@view = ActionView::Base.new
end
def self.original!
silence_warnings do
ActionView.const_set :OutputBuffer, OriginalBuffer
end
end
def self.fast!
silence_warnings do
ActionView.const_set :OutputBuffer, FastSafeBuffer
end
end
def render(times, size)
@view.render(inline: "<%= 'x' * size %>\n" * times, locals: { size: size })
end
end
REPS = 20 # Number of times to call render
TAGS = 300 # Number of ERB tags in template
TAG_SIZE = 3000 # Size of string inside ERB tags.
Benchmark.bmbm do |x|
x.report("fast ") do
RenderContext.fast!
context = RenderContext.new
REPS.times { context.render(TAGS, TAG_SIZE) }
end
x.report("orignal ") do
RenderContext.original!
context = RenderContext.new
REPS.times { context.render(TAGS, TAG_SIZE) }
end
end
Better benchmark.
require 'benchmark'
require 'fast_output_buffer'
require 'action_view'
require 'action_view/buffers'
class RenderContext
include ActionView::Context
OriginalBuffer = ActionView::OutputBuffer
def initialize
@view = ActionView::Base.new
end
def self.original!
silence_warnings do
ActionView.const_set :OutputBuffer, OriginalBuffer
end
end
def self.fast!
silence_warnings do
ActionView.const_set :OutputBuffer, FastSafeBuffer
end
end
def render(times, size)
@view.render(inline: "<%= 'x' * size %>\n" * times, locals: { size: size })
end
end
REPS = 500 # Number of times to call render
TAGS = [100, 100, 500, 500, 1000] # Number of ERB tags in template
TAG_SIZE = [100, 2500, 2500, 5000, 1000] # Size of string inside ERB tags.
#### Rehearsal ####
RenderContext.fast!
context = RenderContext.new
context.render(10, 10)
RenderContext.original!
context = RenderContext.new
context.render(10, 10)
###################
TAGS.size.times do |i|
tags = TAGS[i]
tag_size = TAG_SIZE[i]
puts "-" * 60
puts "#{tags} tags, #{tag_size} characters in tags, rendered #{REPS} times"
puts
Benchmark.bm do |x|
x.report("FastSafeBuffer ") do
RenderContext.fast!
context = RenderContext.new
REPS.times { context.render(tags, tag_size) }
end
x.report("ActionView::OutputBuffer ") do
RenderContext.original!
context = RenderContext.new
REPS.times { context.render(tags, tag_size) }
end
end
end |
# TODO Style devise forms for bootstrap for gem
# TODO Add a footer to application layout
path = "https://raw.githubusercontent.com/cwsaylor/rails-quickstart/master/templates/"
gem_group :test, :development do
gem 'byebug'
end
gem_group :production do
gem 'newrelic_rpm'
gem 'rails_12factor'
gem 'unicorn'
end
gem 'autoprefixer-rails'
gem 'bootstrap_form'
gem 'bootstrap-sass', '~> 3.2.0'
gem 'devise'
gem 'foreman'
gem 'slim-rails'
inject_into_file "Gemfile", :after => "source 'https://rubygems.org'\n" do
"ruby '2.1.3'\n"
end
run "bundle install"
generate "devise:install"
generate "devise user"
#generate "devise:views"
run "bundle exec rake db:create"
# Don't run migrate so you can customize the devise migration
#run "bundle exec rake db:migrate"
run "erb2slim -d app/views/devise"
generate "controller pages --no-helper --no-assets --no-test-framework"
route "get '/styleguide', to: 'pages#styleguide', as: :styleguide"
route "root to: 'pages#index'"
remove_file "app/views/layouts/application.html.erb"
remove_file "app/assets/stylesheets/application.css"
get path + "bootstrap/application.html.slim", "app/views/layouts/application.html.slim"
get path + "bootstrap/application.css.scss", "app/assets/stylesheets/application.css.scss"
get path + "bootstrap/navbar.html.slim", "app/views/layouts/_navbar.html.slim"
get path + "bootstrap/styleguide.html.erb", "app/views/pages/styleguide.html.erb"
get path + "bootstrap/index.html.slim", "app/views/pages/index.html.slim"
get path + "holder.js", "vendor/assets/javascripts/holder.js"
get path + "unicorn.rb", "config/unicorn.rb"
get path + "bootstrap/devise", "app/views/"
get "https://gist.githubusercontent.com/rwdaigle/2253296/raw/newrelic.yml", "config/newrelic.yml"
gsub_file "app/views/layouts/application.html.slim", "changeme", app_name.titleize
gsub_file "app/views/layouts/_navbar.html.slim", "changeme", app_name.titleize
inject_into_file 'app/assets/javascripts/application.js', :before => "//= require_tree ." do
<<-EOS
//= require bootstrap-sprockets
//= require holder
EOS
end
append_file ".gitignore" do
<<-EOS
.DS_Store
.env
EOS
end
create_file ".slugignore" do
<<-EOS
/test
/doc
EOS
end
application(nil, env: "development") do
"config.action_mailer.default_url_options = { :host => 'localhost:3000' }\n"
end
application(nil, env: "test") do
"config.action_mailer.default_url_options = { :host => 'localhost:3000' }\n"
end
application(nil, env: "production") do
<<-EOS
# TODO Change default host
config.action_mailer.default_url_options = { :host => '#{app_name}.herokuapp.com' }
ActionMailer::Base.smtp_settings = {
:address => 'smtp.sendgrid.net',
:port => '587',
:authentication => :plain,
:user_name => ENV['SENDGRID_USERNAME'],
:password => ENV['SENDGRID_PASSWORD'],
:domain => '#{app_name}.herokuapp.com'
}
ActionMailer::Base.delivery_method ||= :smtp
EOS
end
append_to_file 'test/test_helper.rb' do
<<-EOS
class ActionController::TestCase
include Devise::TestHelpers
end
EOS
end
create_file "Procfile" do
"web: bundle exec unicorn -p $PORT -c ./config/unicorn.rb"
end
create_file ".env" do
<<-EOS
RACK_ENV=development
PORT=5000
NEW_RELIC_APP_NAME=#{app_name}
EOS
end
run "bundle exec spring binstub --all"
git :init
git :add => "."
git :commit => "-m 'Setup base Rails 4.1 app.'"
puts "################################################################################"
puts "heroku create"
puts "heroku addons:add newrelic:stark"
puts "git push heroku master"
puts "heroku config:set NEW_RELIC_APP_NAME=APP_NAME"
puts "heroku run rake db:migrate"
puts "heroku restart"
puts "heroku addons:open newrelic"
puts "################################################################################"
Convert to local paths
# TODO Add a footer to application layout
def source_paths
[File.expand_path(File.dirname(__FILE__))]
end
gem_group :test, :development do
gem 'byebug'
end
gem_group :production do
gem 'newrelic_rpm'
gem 'rails_12factor'
gem 'unicorn'
end
gem 'autoprefixer-rails'
gem 'bootstrap_form'
gem 'bootstrap-sass', '~> 3.2.0'
gem 'devise'
gem 'foreman'
gem 'slim-rails'
inject_into_file "Gemfile", :after => "source 'https://rubygems.org'\n" do
"ruby '2.1.3'\n"
end
run "bundle install"
generate "devise:install"
generate "devise user"
#generate "devise:views"
run "bundle exec rake db:create"
# Don't run migrate so you can customize the devise migration
#run "bundle exec rake db:migrate"
#run "erb2slim -d app/views/devise"
generate "controller pages --no-helper --no-assets --no-test-framework"
route "get '/styleguide', to: 'pages#styleguide', as: :styleguide"
route "root to: 'pages#index'"
remove_file "app/views/layouts/application.html.erb"
remove_file "app/assets/stylesheets/application.css"
copy_file "templates/bootstrap/application.html.slim", "app/views/layouts/application.html.slim"
copy_file "templates/bootstrap/application.css.scss", "app/assets/stylesheets/application.css.scss"
copy_file "templates/bootstrap/navbar.html.slim", "app/views/layouts/_navbar.html.slim"
copy_file "templates/bootstrap/styleguide.html.erb", "app/views/pages/styleguide.html.erb"
copy_file "templates/bootstrap/index.html.slim", "app/views/pages/index.html.slim"
copy_file "templates/holder.js", "vendor/assets/javascripts/holder.js"
copy_file "templates/unicorn.rb", "config/unicorn.rb"
directory "templates/bootstrap/devise", "app/views/devise"
get "https://gist.githubusercontent.com/rwdaigle/2253296/raw/newrelic.yml", "config/newrelic.yml"
gsub_file "app/views/layouts/application.html.slim", "changeme", app_name.titleize
gsub_file "app/views/layouts/_navbar.html.slim", "changeme", app_name.titleize
inject_into_file 'app/assets/javascripts/application.js', :before => "//= require_tree ." do
<<-EOS
//= require bootstrap-sprockets
//= require holder
EOS
end
append_file ".gitignore" do
<<-EOS
.DS_Store
.env
EOS
end
create_file ".slugignore" do
<<-EOS
/test
/doc
EOS
end
application(nil, env: "development") do
"config.action_mailer.default_url_options = { :host => 'localhost:3000' }\n"
end
application(nil, env: "test") do
"config.action_mailer.default_url_options = { :host => 'localhost:3000' }\n"
end
application(nil, env: "production") do
<<-EOS
# TODO Change default host
config.action_mailer.default_url_options = { :host => '#{app_name}.herokuapp.com' }
ActionMailer::Base.smtp_settings = {
:address => 'smtp.sendgrid.net',
:port => '587',
:authentication => :plain,
:user_name => ENV['SENDGRID_USERNAME'],
:password => ENV['SENDGRID_PASSWORD'],
:domain => '#{app_name}.herokuapp.com'
}
ActionMailer::Base.delivery_method ||= :smtp
EOS
end
append_to_file 'test/test_helper.rb' do
<<-EOS
class ActionController::TestCase
include Devise::TestHelpers
end
EOS
end
create_file "Procfile" do
"web: bundle exec unicorn -p $PORT -c ./config/unicorn.rb"
end
create_file ".env" do
<<-EOS
RACK_ENV=development
PORT=5000
NEW_RELIC_APP_NAME=#{app_name}
EOS
end
run "bundle exec spring binstub --all"
git :init
git :add => "."
git :commit => "-m 'Setup base Rails 4.1 app.'"
puts "################################################################################"
puts "heroku create"
puts "heroku addons:add newrelic:stark"
puts "git push heroku master"
puts "heroku config:set NEW_RELIC_APP_NAME=APP_NAME"
puts "heroku run rake db:migrate"
puts "heroku restart"
puts "heroku addons:open newrelic"
puts "################################################################################"
readme "README.md"
|
require 'rubygems/package_task'
require './lib/rspec/given/version'
if ! defined?(Gem)
puts "Package Target requires RubyGEMs"
else
PKG_FILES = FileList[
'[A-Z]*',
'lib/*.rb',
'lib/**/*.rb',
'test/**/*.rb',
'examples/**/*',
'doc/**/*',
]
PKG_FILES.exclude('TAGS')
SPEC = Gem::Specification.new do |s|
#### Basic information.
s.name = 'rspec-given'
s.version = RSpec::Given::VERSION
s.summary = "Given/When/Then Specification Extensions for RSpec."
s.description = <<EOF
Given is an RSpec extension that allows explicit definition of the
pre and post-conditions for code under test.
EOF
s.files = PKG_FILES.to_a
s.require_path = 'lib' # Use these for libraries.
s.rdoc_options = [
'--line-numbers', '--inline-source',
'--main' , 'README.md',
'--title', 'RSpec Given Extensions'
]
s.add_dependency("rspec", "> 1.2.8")
s.add_development_dependency("bluecloth")
s.add_development_dependency("rdoc", "> 2.4.2")
s.author = "Jim Weirich"
s.email = "jim.weirich@gmail.com"
s.homepage = "http://github.com/jimweirich/rspec-given"
s.rubyforge_project = "given"
end
package_task = Gem::PackageTask.new(SPEC) do |pkg|
pkg.need_zip = true
pkg.need_tar = true
end
file "rspec-given.gemspec" => ["rakelib/gemspec.rake"] do |t|
require 'yaml'
open(t.name, "w") { |f| f.puts SPEC.to_yaml }
end
desc "Create a stand-alone gemspec"
task :gemspec => "rspec-given.gemspec"
end
Added explicit requirement for 1.9 to gemspec.
require 'rubygems/package_task'
require './lib/rspec/given/version'
if ! defined?(Gem)
puts "Package Target requires RubyGEMs"
else
PKG_FILES = FileList[
'[A-Z]*',
'lib/*.rb',
'lib/**/*.rb',
'test/**/*.rb',
'examples/**/*',
'doc/**/*',
]
PKG_FILES.exclude('TAGS')
SPEC = Gem::Specification.new do |s|
#### Basic information.
s.name = 'rspec-given'
s.version = RSpec::Given::VERSION
s.summary = "Given/When/Then Specification Extensions for RSpec."
s.description = <<EOF
Given is an RSpec extension that allows explicit definition of the
pre and post-conditions for code under test.
EOF
s.files = PKG_FILES.to_a
s.require_path = 'lib' # Use these for libraries.
s.rdoc_options = [
'--line-numbers', '--inline-source',
'--main' , 'README.md',
'--title', 'RSpec Given Extensions'
]
s.add_dependency("rspec", "> 1.2.8")
s.add_development_dependency("bluecloth")
s.add_development_dependency("rdoc", "> 2.4.2")
s.required_ruby_version = '>= 1.9.2'
s.author = "Jim Weirich"
s.email = "jim.weirich@gmail.com"
s.homepage = "http://github.com/jimweirich/rspec-given"
s.rubyforge_project = "given"
end
package_task = Gem::PackageTask.new(SPEC) do |pkg|
pkg.need_zip = true
pkg.need_tar = true
end
file "rspec-given.gemspec" => ["rakelib/gemspec.rake"] do |t|
require 'yaml'
open(t.name, "w") { |f| f.puts SPEC.to_yaml }
end
desc "Create a stand-alone gemspec"
task :gemspec => "rspec-given.gemspec"
end
|
# this file is updated automatically using a rake task, any changes will be lost
GOCD_VERSION = '17.11.0'
bump version to 17.12.0
# this file is updated automatically using a rake task, any changes will be lost
GOCD_VERSION = '17.12.0'
|
Ruby solution to Problem 2 added
#!/usr/bin/env ruby
# Declare a method that returns the nth fibonacci number
def fib (n)
a,b = 0, 1
return 0 if n <= 0
i = 0
until i >= n do
a, b = b, a + b
i += 1
end
return a
end
# Populate an array with even fibonacci numbers
i = 1
initial_range = []
begin
initial_range.push(fib(i)) if fib(i).even?
i += 1
end until fib(i) > 4000000
# print the sum of that array
p initial_range.inject(0) { |a, b| a + b }
|
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'regaliator/version'
Gem::Specification.new do |spec|
spec.name = "regaliator"
spec.version = Regaliator::VERSION
spec.authors = ["Hesham El-Nahhas"]
spec.email = ["hesham@regalii.com"]
spec.summary = %q{Ruby wrapper for Regalii's API}
spec.description = ""
spec.homepage = "https://www.regalii.com"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_dependency 'json'
spec.add_development_dependency "bundler", "~> 1.7"
# Old version of Guard to support Ruby 2.1.5.
spec.add_development_dependency "guard", "~> 2.11.0"
spec.add_development_dependency "guard-minitest"
spec.add_development_dependency "minitest-reporters", "~> 1.1.13"
spec.add_development_dependency "minitest", "~> 5.8.4"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "vcr", "~> 2.9.3"
spec.add_development_dependency "webmock", "~> 1.19"
end
Add informations in gem spec.
- Add contributor ;
- Add Ruby versions supported ;
- Update website.
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'regaliator/version'
Gem::Specification.new do |spec|
spec.name = "regaliator"
spec.version = Regaliator::VERSION
spec.authors = ["Hesham El-Nahhas", "Geoffrey Roguelon"]
spec.email = ["hesham@regalii.com", "geoffrey@regalii.com"]
spec.summary = %q{Ruby wrapper for Regalii's API}
spec.description = ""
spec.homepage = "https://www.regalii.com/api"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.required_ruby_version = ['>= 2.1.0', '<= 2.4']
spec.add_dependency 'json'
spec.add_development_dependency "bundler", "~> 1.7"
# Old version of Guard to support Ruby 2.1.5.
spec.add_development_dependency "guard", "~> 2.11.0"
spec.add_development_dependency "guard-minitest"
spec.add_development_dependency "minitest-reporters", "~> 1.1.13"
spec.add_development_dependency "minitest", "~> 5.8.4"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "vcr", "~> 2.9.3"
spec.add_development_dependency "webmock", "~> 1.19"
end
|
# Job to update molecule info for molecules with no CID
# associated CID (molecule tag) and iupac names (molecule_names) are updated if
# inchikey found in PC db
class MoveToCollectionJob < ActiveJob::Base
# queue_as :move_to_collection
STATE_MOVING = 'moving'
STATE_MOVED = 'moved'
def perform(id, reactions, samples)
collection = Collection.find(id)
tr_col = collection.children.find_or_create_by(
user_id: collection.user_id, label: 'transferred'
)
begin
reactions&.map do |reaction|
next unless reaction[:state] == GateTransferJob::STATE_TRANSFERRED
begin
reaction[:state] = MoveToCollectionJob::STATE_MOVING
CollectionsReaction.move_to_collection(
[reaction[:id]], collection, tr_col.id
)
reaction[:state] = MoveToCollectionJob::STATE_MOVED
rescue StandardError => e
Rails.logger.error e
reaction[:msg] = e
end
end
samples&.map do |sample|
next unless sample[:state] == GateTransferJob::STATE_TRANSFERRED
begin
sample[:state] = MoveToCollectionJob::STATE_MOVING
CollectionsSample.move_to_collection(
[sample[:id]], collection, tr_col.id
)
sample[:state] = MoveToCollectionJob::STATE_MOVED
rescue => e
Rails.logger.error e
sample[:msg] = e
end
end
moresamples = CollectionsSample.select(:sample_id).where(collection_id: id)
.limit(1).pluck(:sample_id)
channel = Channel.find_by(subject: Channel::GATE_TRANSFER_NOTIFICATION)
return true if channel&.msg_template.nil?
error_samples = samples.select{ |o| o[:state] != MoveToCollectionJob::STATE_MOVED }
error_reactions = reactions.select{ |o| o[:state] != MoveToCollectionJob::STATE_MOVED }
raise "jobs are not completed!! " if error_samples&.count > 0 || error_reactions&.count > 0
rescue => e
Rails.logger.error moresamples if moresamples
Rails.logger.error error_samples if error_samples&.count > 0
Rails.logger.error error_reactions if error_reactions&.count > 0
raise "Jobs are not completed!! " + error_reactions&.to_json + error_samples&.to_json
ensure
content = channel.msg_template
content['data'] = 'Still some samples are on the blanket, please sync. again.' if moresamples&.count > 0
content['data'] = 'Some samples/reaction are not completed....' if error_samples&.count > 0 || error_reactions&.count > 0
users = [collection.user_id]
users.push(101) if moresamples&.count > 0 || error_samples&.count > 0 || error_reactions&.count > 0
message = Message.create_msg_notification(channel.id,content,collection.user_id,users)
end
true
end
end
fix move collection with id
# Job to update molecule info for molecules with no CID
# associated CID (molecule tag) and iupac names (molecule_names) are updated if
# inchikey found in PC db
class MoveToCollectionJob < ActiveJob::Base
# queue_as :move_to_collection
STATE_MOVING = 'moving'
STATE_MOVED = 'moved'
def perform(id, reactions, samples)
collection = Collection.find(id)
tr_col = collection.children.find_or_create_by(
user_id: collection.user_id, label: 'transferred'
)
begin
reactions&.map do |reaction|
next unless reaction[:state] == GateTransferJob::STATE_TRANSFERRED
begin
reaction[:state] = MoveToCollectionJob::STATE_MOVING
CollectionsReaction.move_to_collection(
reaction[:id], collection.id, tr_col.id
)
reaction[:state] = MoveToCollectionJob::STATE_MOVED
rescue StandardError => e
Rails.logger.error e
reaction[:msg] = e
end
end
samples&.map do |sample|
next unless sample[:state] == GateTransferJob::STATE_TRANSFERRED
begin
sample[:state] = MoveToCollectionJob::STATE_MOVING
CollectionsSample.move_to_collection(
sample[:id], collection.id, tr_col.id
)
sample[:state] = MoveToCollectionJob::STATE_MOVED
rescue => e
Rails.logger.error e
sample[:msg] = e
end
end
moresamples = CollectionsSample.select(:sample_id).where(collection_id: id)
.limit(1).pluck(:sample_id)
channel = Channel.find_by(subject: Channel::GATE_TRANSFER_NOTIFICATION)
return true if channel&.msg_template.nil?
error_samples = samples.select{ |o| o[:state] != MoveToCollectionJob::STATE_MOVED }
error_reactions = reactions.select{ |o| o[:state] != MoveToCollectionJob::STATE_MOVED }
raise "jobs are not completed!! " if error_samples&.count > 0 || error_reactions&.count > 0
rescue => e
Rails.logger.error moresamples if moresamples
Rails.logger.error error_samples if error_samples&.count > 0
Rails.logger.error error_reactions if error_reactions&.count > 0
raise "Jobs are not completed!! " + error_reactions&.to_json + error_samples&.to_json
ensure
content = channel.msg_template
content['data'] = 'Still some samples are on the blanket, please sync. again.' if moresamples&.count > 0
content['data'] = 'Some samples/reaction are not completed....' if error_samples&.count > 0 || error_reactions&.count > 0
users = [collection.user_id]
users.push(101) if moresamples&.count > 0 || error_samples&.count > 0 || error_reactions&.count > 0
message = Message.create_msg_notification(channel.id,content,collection.user_id,users)
end
true
end
end
|
%W[aptitude
gnome-terminal
inotify-tools
lubuntu-desktop zsh autotools-dev automake libtool cmake
libevent-dev libncurses5-dev zlib1g-dev libbz2-dev libyaml-dev
libxml2-dev libxslt1-dev libreadline-dev xclip patch
libx11-dev
libgmp3-dev
chromium-browser
cpanminus
wget ncftp ntp nmap
ibus-mozc mozc-server mozc-utils-gui
exuberant-ctags
emacs emacsen-common rdtool-elisp emacs-goodies-el
emacs-mozc
markdown
fonts-takao-gothic
xfonts-shinonome python-xlib
libterm-readkey-perl
mlocate ssh
libsqlite3-dev sqlite3
expect dkms
libgif-dev
libmcrypt-dev
libffi-dev
libclang-dev
libcurl4-gnutls-dev
libgnutls28-dev
libgtk-3-dev
libtiff-dev
libwebkit2gtk-4.0-dev
libid3-dev
eblook lookup-el
python3-sphinx
python3-pip
python3-sphinxcontrib.plantuml
pylint3
python3-pytest-pylint
postgresql postgresql-client libpq-dev
manpages-dev
iftop bmon iptraf nload nethogs speedometer slurm
iptraf tcptrack vnstat bwm-ng cbm pktstat netdiag
ifstat nmon saidar
jq whois
irony-server
nginx
tokyotyrant tokyocabinet-bin libtokyotyrant-dev
postfix
cmigemo
mailutils
curl unzip libsnappy-dev
qt4-qmake libqt4-dev xvfb
rlwrap
texi2html texinfo
libc6:i386 libncurses5:i386 libstdc++6:i386 lib32z1 libbz2-1.0:i386
qemu-kvm virt-manager libvirt-bin bridge-utils
libgdbm-dev libgroonga-dev
llvm clang].each do |pkg|
package pkg do
action :install
end
end
remove irony-server
%W[aptitude
gnome-terminal
inotify-tools
lubuntu-desktop zsh autotools-dev automake libtool cmake
libevent-dev libncurses5-dev zlib1g-dev libbz2-dev libyaml-dev
libxml2-dev libxslt1-dev libreadline-dev xclip patch
libx11-dev
libgmp3-dev
chromium-browser
cpanminus
wget ncftp ntp nmap
ibus-mozc mozc-server mozc-utils-gui
exuberant-ctags
emacs emacsen-common rdtool-elisp emacs-goodies-el
emacs-mozc
markdown
fonts-takao-gothic
xfonts-shinonome python-xlib
libterm-readkey-perl
mlocate ssh
libsqlite3-dev sqlite3
expect dkms
libgif-dev
libmcrypt-dev
libffi-dev
libclang-dev
libcurl4-gnutls-dev
libgnutls28-dev
libgtk-3-dev
libtiff-dev
libwebkit2gtk-4.0-dev
libid3-dev
eblook lookup-el
python3-sphinx
python3-pip
python3-sphinxcontrib.plantuml
pylint3
python3-pytest-pylint
postgresql postgresql-client libpq-dev
manpages-dev
iftop bmon iptraf nload nethogs speedometer slurm
iptraf tcptrack vnstat bwm-ng cbm pktstat netdiag
ifstat nmon saidar
jq whois
nginx
tokyotyrant tokyocabinet-bin libtokyotyrant-dev
postfix
cmigemo
mailutils
curl unzip libsnappy-dev
qt4-qmake libqt4-dev xvfb
rlwrap
texi2html texinfo
libc6:i386 libncurses5:i386 libstdc++6:i386 lib32z1 libbz2-1.0:i386
qemu-kvm virt-manager libvirt-bin bridge-utils
libgdbm-dev libgroonga-dev
llvm clang].each do |pkg|
package pkg do
action :install
end
end
|
# Based on opsworks-cookbooks/opsworks_berkshelf/providers/runner.rb
site_cookbooks_dir = ::File.join(node[:tabula_rasa][:home_dir], 'site-cookbooks')
berkshelf_cookbooks_dir = ::File.join(node[:tabula_rasa][:home_dir],'berkshelf-cookbooks')
berks_install_options = if node['opsworks_berkshelf'] && node['opsworks_berkshelf']['version'].to_i >= 3
"vendor #{berkshelf_cookbooks_dir}"
else
"install --path #{berkshelf_cookbooks_dir}"
end
berks_install_options += ' --debug' if node['opsworks_berkshelf'] && node['opsworks_berkshelf']['debug']
berks_install_command = "/opt/aws/opsworks/local/bin/berks #{berks_install_options}"
directory berkshelf_cookbooks_dir do
action :delete
recursive true
only_if do
node['opsworks_berkshelf'] && node['opsworks_berkshelf']['version'].to_i >= 3
end
end
ruby_block 'Install the cookbooks specified in the Tabula Rasa cookbook\'s Berksfile and their dependencies' do
block do
# TODO: This snippet is repeated in the only_if block below. Bad style.
berksfile_top = ::File.join(site_cookbooks_dir, 'Berksfile')
# only return Berksfile if there is exactly one folder no matter if this folder contains a berksfile or not
folders = Dir.glob(::File.join(site_cookbooks_dir, '*')).select { |f| ::File.directory? f }
if ::File.exist? berksfile_top
berksfile = berksfile_top
elsif folders.size == 1
berksfile = ::File.join(folders.first, 'Berksfile')
else
berksfile = ''
end
Chef::Log.info OpsWorks::ShellOut.shellout(
berks_install_command,
:cwd => ::File.dirname(berksfile),
:environment => {
"BERKSHELF_PATH" => ::File.join(node[:tabula_rasa][:home_dir], 'cache'),
"LC_ALL" => "en_US.UTF-8",
"LANG" => "en_US.UTF-8"
}
)
end
only_if do
# TODO: This snippet is a duplicate of that foudn in the block above. Bad style.
berksfile_top = ::File.join(site_cookbooks_dir, 'Berksfile')
# only return Berksfile if there is exactly one folder no matter if this folder contains a berksfile or not
folders = Dir.glob(::File.join(site_cookbooks_dir, '*')).select { |f| ::File.directory? f }
if ::File.exist? berksfile_top
berksfile = berksfile_top
elsif folders.size == 1
berksfile = ::File.join(folders.first, 'Berksfile')
else
berksfile = ''
end
::File.exist?('/opt/aws/opsworks/local/bin/berks') && ::File.exist?(berksfile)
end
end
Isolate berks cache dir
# Based on opsworks-cookbooks/opsworks_berkshelf/providers/runner.rb
site_cookbooks_dir = ::File.join(node[:tabula_rasa][:home_dir], 'site-cookbooks')
berkshelf_cookbooks_dir = ::File.join(node[:tabula_rasa][:home_dir],'berkshelf-cookbooks')
berks_cache_dir = ::File.join(node[:tabula_rasa][:home_dir], 'cache', 'berks')
berks_install_options = if node['opsworks_berkshelf'] && node['opsworks_berkshelf']['version'].to_i >= 3
"vendor #{berkshelf_cookbooks_dir}"
else
"install --path #{berkshelf_cookbooks_dir}"
end
berks_install_options += ' --debug' if node['opsworks_berkshelf'] && node['opsworks_berkshelf']['debug']
berks_install_command = "/opt/aws/opsworks/local/bin/berks #{berks_install_options}"
directory berkshelf_cookbooks_dir do
action :delete
recursive true
only_if do
node['opsworks_berkshelf'] && node['opsworks_berkshelf']['version'].to_i >= 3
end
end
directory berks_cache_dir do
recursive true
action :create
only_if do
!::File.directory?(berks_cache_dir)
end
end
ruby_block 'Install the cookbooks specified in the Tabula Rasa cookbook\'s Berksfile and their dependencies' do
block do
# TODO: This snippet is repeated in the only_if block below. Bad style.
berksfile_top = ::File.join(site_cookbooks_dir, 'Berksfile')
# only return Berksfile if there is exactly one folder no matter if this folder contains a berksfile or not
folders = Dir.glob(::File.join(site_cookbooks_dir, '*')).select { |f| ::File.directory? f }
if ::File.exist? berksfile_top
berksfile = berksfile_top
elsif folders.size == 1
berksfile = ::File.join(folders.first, 'Berksfile')
else
berksfile = ''
end
Chef::Log.info OpsWorks::ShellOut.shellout(
berks_install_command,
:cwd => ::File.dirname(berksfile),
:environment => {
"BERKSHELF_PATH" => berks_cache_dir,
"LC_ALL" => "en_US.UTF-8",
"LANG" => "en_US.UTF-8"
}
)
end
only_if do
# TODO: This snippet is a duplicate of that foudn in the block above. Bad style.
berksfile_top = ::File.join(site_cookbooks_dir, 'Berksfile')
# only return Berksfile if there is exactly one folder no matter if this folder contains a berksfile or not
folders = Dir.glob(::File.join(site_cookbooks_dir, '*')).select { |f| ::File.directory? f }
if ::File.exist? berksfile_top
berksfile = berksfile_top
elsif folders.size == 1
berksfile = ::File.join(folders.first, 'Berksfile')
else
berksfile = ''
end
::File.exist?('/opt/aws/opsworks/local/bin/berks') && ::File.exist?(berksfile)
end
end
|
# Cookbook Name:: mcafee
# Recipe:: eposerver
#
# Copyright 2016, YOUR_COMPANY_NAME
#
# All rights reserved - Do Not Redistribute
# Once McAfee software is installed on the endpoint, it will attempt to communicate with the
# McAfee epo server agent handler (there are some exceptions but this is a general rule).
# If you are running hosts on cloud instances like AWS and do not have VPN connectivity to the ePO
# server, you may need to have a host entry in order to resolve what might be an internal hostname/fqdn.
# Check your ePO server for the configured hostname. By default, the agent software will attempt to first
# communicate with the Agent Handler by IP address, hostname, then netbios name.
# A recommended configuration when the ePO server is hosted behind a NAT device on a private network is to
# configure a dedicated Agent Handler to manage connections from remote hosts (although not required as long
# as there is connectivity to the default agent handler on the ePO server host)
# This recipe will add the host entry into the node platform host file so the agent handler can be resolved.
# Configure the mcafee.epo.agenthndlr attribute in the attributes/default.rb
# If this is a remotely connecting client (i.e. over the internet) use the publicly reolvable IP and/or fqdn
# and the hostname in the same mapping. For example:
# mcafee.epo.agenthndlr.fqdn = www.lepages.net
# mcafee.epo.agenthndlr.ip = xx.xx.xx.xx
#
# If the ePO server configured can be resolved, nothing is created on the endpoint
mcafee = node['mcafee']['epo']['agenthndlr']
ruby_block "ensure node can resolve ePO FQDN" do
block do
if node['platform_family'] == 'windows'
fe = Chef::Util::FileEdit.new("C:\\Windows\\system32\\drivers\\etc\\hosts")
else
fe = Chef::Util::FileEdit.new("/etc/hosts")
end
fe.insert_line_if_no_match(/#{mcafee['fqdn']}/,
"#{mcafee['ip']} #{mcafee['fqdn']} #{mcafee['shortname']}")
fe.write_file
Chef::Log.info "Added host entry for ePO Agent Handler into host file"
end
not_if { Resolv.getaddress(mcafee['fqdn']) rescue false }
end
#---------------------------End of Recipe-----------------------------------
modified eposerver
# Cookbook Name:: mcafee
# Recipe:: eposerver
#
# Copyright 2016, YOUR_COMPANY_NAME
#
# All rights reserved - Do Not Redistribute
# Once McAfee software is installed on the endpoint, it will attempt to communicate with the
# McAfee epo server agent handler (there are some exceptions but this is a general rule).
# If you are running hosts on cloud instances like AWS and do not have VPN connectivity to the ePO
# server, you may need to have a host entry in order to resolve what might be an internal hostname/fqdn.
# Check your ePO server for the configured hostname. By default, the agent software will attempt to first
# communicate with the Agent Handler by IP address, hostname, then netbios name.
# A recommended configuration when the ePO server is hosted behind a NAT device on a private network is to
# configure a dedicated Agent Handler to manage connections from remote hosts (although not required as long
# as there is connectivity to the default agent handler on the ePO server host)
# This recipe will add the host entry into the node platform host file so the agent handler can be resolved.
# Configure the mcafee.epo.agenthndlr attribute in the attributes/default.rb
# If this is a remotely connecting client (i.e. over the internet) use the publicly reolvable IP and/or fqdn
# and the hostname in the same mapping. For example:
# mcafee.epo.agenthndlr.fqdn = www.lepages.net
# mcafee.epo.agenthndlr.ip = xx.xx.xx.xx
#
# If the ePO server configured can be resolved, nothing is created on the endpoint
puts "hi"
mcafee = node['mcafee']['epo']['agenthndlr']
ruby_block "ensure node can resolve ePO FQDN" do
block do
if node['platform_family'] == 'windows'
fe = Chef::Util::FileEdit.new("C:\\Windows\\system32\\drivers\\etc\\hosts")
else
fe = Chef::Util::FileEdit.new("/etc/hosts")
end
fe.insert_line_if_no_match(/#{mcafee['fqdn']}/,
"#{mcafee['ip']} #{mcafee['fqdn']} #{mcafee['shortname']}")
fe.write_file
Chef::Log.info "Added host entry for ePO Agent Handler into host file"
end
not_if { Resolv.getaddress(mcafee['fqdn']) rescue false }
end
#---------------------------End of Recipe-----------------------------------
|
Add script to convert from Waze Mercator projection to WGS84 used by Open Street Map et.al.
require 'json'
require 'hashie'
mercator = IO.read("waze/1396867493_mercator.txt")
hash = JSON.parse mercator
msg = Hashie::Mash.new hash
File.open("proj_input", "w") do |f|
msg.irregularities.each do |irr|
irr.line.each do |line|
f.puts line.y.to_s + ' ' + line.x.to_s
end
end
end
`proj +proj=merc +lon_0=0 +k=1 +x_0=0 +y_0=0 +a=6378137 +b=6378137 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs -I -f ’%.9f’ proj_input > proj_output`
|
# encoding: utf-8
#
# Cookbook Name:: octohost
# Recipe:: rackspace
#
# Copyright (C) 2014, Darron Froese <darron@froese.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
bash 'Update PRIVATE_IP for rackspace.' do
user 'root'
cwd '/tmp'
code <<-EOH
sed -i '6s/.*/PRIVATE_IP=\$\(ifconfig eth1 \| grep \"inet addr\" \| cut -d \':\' -f 2\)/' /etc/default/octohost
EOH
end
Properly get PRIVATE_IP.
# encoding: utf-8
#
# Cookbook Name:: octohost
# Recipe:: rackspace
#
# Copyright (C) 2014, Darron Froese <darron@froese.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
bash 'Update PRIVATE_IP for rackspace.' do
user 'root'
cwd '/tmp'
code <<-EOH
sed -i '6s/.*/PRIVATE_IP=\$(ifconfig eth1 \| grep \"inet addr\" \| cut --delimiter=\":\" -f 2 \| cut --delimiter=\" \" -f 1)/' /etc/default/octohost
EOH
end
|
#
# Cookbook Name:: appserver
# Recipe:: webserver
#
# Set deploy_usr
deploy_usr = 'vagrant'
def chef_solo_search_installed?
klass = ::Search.const_get('Helper')
return klass.is_a?(Class)
rescue NameError
return false
end
unless Chef::Config[:solo] && !chef_solo_search_installed?
search(:users, 'id:deploy NOT action:remove').each do |u|
deploy_usr = u['id']
end
end
# Compass
include_recipe 'compass' if node['compass']['install']
# Node JS & packages
include_recipe 'nodejs'
node['nodejs']['npm']['packages'].each do |npackage|
nodejs_npm npackage
end
# sudo add-apt-repository ppa:ondrej/php; sudo apt-get update
apt_repository 'ondrej-php' do
uri 'ppa:ondrej/php'
end
# PHP
package "php#{node['php']['version']}"
package 'imagemagick'
# PHP plugins
%w[-cli -mysql -curl -mcrypt -gd -imagick -fpm].each do |pkg|
package "php#{node['php']['version']}#{pkg}"
end
# PHP FPM service
service 'php-fpm' do
provider ::Chef::Provider::Service::Upstart
service_name "php#{node['php']['version']}-fpm"
supports enable: true, start: true, stop: true, restart: true
# :reload doesnt work on ubuntu 14.04 because of a bug...
action [:enable, :start]
end
template "/etc/php/#{node['php']['version']}/fpm/php.ini" do
source 'php.ini.erb'
owner 'root'
group 'root'
mode '0644'
notifies :restart, 'service[php-fpm]'
end
template "/etc/php/#{node['php']['version']}/mods-available/opcache.ini" do
source 'opcache.ini.erb'
owner 'root'
group 'root'
mode '0644'
notifies :restart, 'service[php-fpm]'
end
execute 'Enable Mcrypt' do
command 'phpenmod mcrypt'
action :run
notifies :restart, 'service[php-fpm]'
end
# Upgrade or install composer
execute 'Upgrade Composer' do
command 'composer self-update'
only_if { ::File.exist?('/usr/local/bin/composer') }
action :run
end
execute 'Install Composer' do # ~FC041
command 'curl -sS https://getcomposer.org/installer | php;mv composer.phar /usr/local/bin/composer'
not_if { ::File.exist?('/usr/local/bin/composer') }
action :run
end
# Install supervisor
include_recipe 'supervisor'
# NGINX install
include_recipe 'nginx::server'
directory '/var/www' do
owner deploy_usr
group 'sysadmin'
mode '0775'
action :create
not_if { ::File.directory?('/var/www') }
end
node['nginx']['sites'].each do |site|
webroot_path = "#{site['base_path']}/#{site['webroot_subpath']}"
git_path = "#{site['base_path']}/#{site['git_subpath']}" if site['git']
composer_path = "#{site['base_path']}/#{site['composer_subpath']}" if site['composer_install']
artisan_path = "#{site['base_path']}/#{site['artisan_subpath']}"
compass_path = "#{site['base_path']}/#{site['compass_subpath']}" if site['compass_compile']
npm_path = "#{site['base_path']}/#{site['npm_subpath']}" if site['npm_install']
bower_path = "#{site['base_path']}/#{site['bower_subpath']}" if site['bower_install']
gulp_path = "#{site['base_path']}/#{site['gulp_subpath']}" if site['gulp_run']
workerlog_path = "#{site['base_path']}/#{site['artisan_queuelogpath']}" if site['artisan_queuelisten']
# Create ssl cert files
if site['ssl']
directory "#{node['nginx']['dir']}/ssl" do
owner 'root'
group 'root'
mode '0775'
action :create
not_if { ::File.directory?("#{node['nginx']['dir']}/ssl") }
end
file "#{node['nginx']['dir']}/ssl/#{site['name']}.crt" do
content site['ssl_crt']
owner 'root'
group 'root'
mode '0400'
not_if { ::File.exist?("#{node['nginx']['dir']}/ssl/#{site['name']}/.crt") }
end
file "#{node['nginx']['dir']}/ssl/#{site['name']}.key" do
content site['ssl_key']
owner 'root'
group 'root'
mode '0400'
not_if { ::File.exist?("#{node['nginx']['dir']}/ssl/#{site['name']}/.crt") }
end
end
# Set up nginx server block
custom_data = {
'environment' => site['environment'],
'db_host' => site['db_host'],
'db_database' => site['db_database'],
'db_username' => site['db_username'],
'db_password' => site['db_password'],
'ssl' => site['ssl'],
'ssl_crt' => "#{node['nginx']['dir']}/ssl/#{site['name']}.crt",
'ssl_key' => "#{node['nginx']['dir']}/ssl/#{site['name']}.key",
'redirect-hosts' => site['redirect-hosts'],
'redirect-to' => site['redirect-to']
}
nginx_site site['name'] do # ~FC022
listen '*:80'
host site['host']
root webroot_path
index site['index']
location site['location']
phpfpm site['phpfpm']
custom_data custom_data
template_cookbook site['template_cookbook']
template_source site['template_source']
action [:create, :enable]
not_if { ::File.exist?("#{node['nginx']['dir']}/sites-enabled/#{site['name']}") }
notifies :restart, 'service[php-fpm]'
notifies :restart, 'service[nginx]'
notifies :sync, "git[Syncing git repository for #{site['name']}]"
notifies :run, "execute[Composer install #{site['name']}]"
notifies :run, "execute[Artisan migrate #{site['name']}]"
end
# Sync with git repository
git "Syncing git repository for #{site['name']}" do
destination git_path
repository site['git_repo']
revision site['git_branch']
action :sync
user deploy_usr
ssh_wrapper "/home/#{deploy_usr}/git_wrapper.sh"
only_if { site['git'] && ::File.exist?("/home/#{deploy_usr}/.ssh/git_rsa") }
only_if { ::File.exist?("#{node['nginx']['dir']}/sites-enabled/#{site['name']}") }
notifies :run, "execute[Composer install #{site['name']} after git sync]"
notifies :run, "execute[Artisan migrate #{site['name']} after git sync]"
notifies :compile, "compass_project[Compile sass for #{site['name']} after git sync]", :immediately
notifies :run, "execute[Npm install #{site['name']} after git sync]"
notifies :run, "ruby_block[Set writeable dirs for #{site['name']} after git sync]"
notifies :create, "template[Create #{site['base_path']}/.env after git sync]"
end
# Create .env file efter git sync
template "Create #{site['base_path']}/.env after git sync" do
path "#{site['base_path']}/.env"
source 'env.erb'
owner deploy_usr
group 'www-data'
mode '0755'
action :nothing
only_if { site['env'] }
end
# Create .env file without git sync
template "Create #{site['base_path']}/.env" do
path "#{site['base_path']}/.env"
source 'env.erb'
owner deploy_usr
group 'www-data'
mode '0755'
not_if { site['git'] }
only_if { site['env'] }
end
# Composer install triggered by git sync
execute "Composer install #{site['name']} after git sync" do
command "composer install -n -q -d #{composer_path}"
action :nothing
user deploy_usr
only_if { site['composer_install'] }
only_if { ::File.directory?(composer_path) }
notifies :run, "execute[Artisan migrate #{site['name']} after composer]"
end
# Composer install without git
execute "Composer install #{site['name']}" do
command "composer install -n -q -d #{composer_path}"
action :run
user deploy_usr
only_if { site['composer_install'] }
only_if { ::File.directory?(composer_path) }
only_if { ::File.exist?("#{node['nginx']['dir']}/sites-enabled/#{site['name']}") }
not_if { site['git'] }
notifies :run, "execute[Artisan migrate #{site['name']} after composer]"
end
# Artisan migrate triggered by composer install
execute "Artisan migrate #{site['name']} after composer" do
command "php #{artisan_path} --env=#{site['environment']} migrate"
action :nothing
user deploy_usr
only_if { site['artisan_migrate'] }
only_if { ::File.directory?(artisan_path) }
end
# Artisan migrate after git, when not running composer install
execute "Artisan migrate #{site['name']} after git sync" do
command "php #{artisan_path} --env=#{site['environment']} migrate"
action :nothing
user deploy_usr
only_if { site['artisan_migrate'] }
only_if { ::File.directory?(artisan_path) }
not_if { site['composer_install'] }
end
# Artisan migrate without either composer or git
execute "Artisan migrate #{site['name']}" do
command "php #{artisan_path} --env=#{site['environment']} migrate"
action :run
user deploy_usr
only_if { site['artisan_migrate'] }
only_if { ::File.directory?(artisan_path) }
only_if { ::File.exist?("#{node['nginx']['dir']}/sites-enabled/#{site['name']}") }
not_if { site['composer_install'] }
not_if { site['git'] }
end
# Compass compile without git
compass_project "Compile sass for #{site['name']}" do
path compass_path
action :compile
user deploy_usr
only_if { site['compass_compile'] }
only_if { ::File.directory?(compass_path) }
not_if { site['git'] }
end
# Compass compile triggered by git
compass_project "Compile sass for #{site['name']} after git sync" do
path compass_path
action :nothing
user deploy_usr
only_if { site['compass_compile'] }
only_if { ::File.directory?(compass_path) }
end
# Npm install without git
execute "Npm install #{site['name']}" do
cwd npm_path
command 'npm install'
action :run
user deploy_usr
only_if { site['npm_install'] }
only_if { ::File.directory?(npm_path) }
not_if { site['git'] }
notifies :run, "execute[Bower install #{site['name']}]"
notifies :run, "execute[Gulp #{site['name']}]"
end
# Npm install triggered by git
execute "Npm install #{site['name']} after git sync" do
cwd npm_path
command 'npm install --silent'
action :nothing
user deploy_usr
only_if { site['npm_install'] }
only_if { ::File.directory?(npm_path) }
notifies :run, "execute[Bower install #{site['name']}]"
notifies :run, "execute[Gulp #{site['name']}]"
end
# Bower install after npm install
execute "Bower install #{site['name']}" do
cwd bower_path
command "su #{deploy_usr} -l -c 'bower install --silent'"
action :nothing
only_if { site['bower_install'] }
only_if { ::File.directory?(bower_path) }
notifies :run, "execute[Gulp #{site['name']}]"
end
# Gulp run after bower install
execute "Gulp #{site['name']} after bower" do
cwd gulp_path
command 'gulp --silent --production'
action :nothing
user deploy_usr
only_if { site['gulp_run'] }
only_if { ::File.directory?(gulp_path) }
end
# Gulp run after npm install
execute "Gulp #{site['name']}" do
cwd gulp_path
command 'gulp --silent --production'
action :nothing
user deploy_usr
only_if { site['gulp_run'] }
only_if { ::File.directory?(gulp_path) }
not_if { site['bower_install'] }
end
# Set writeable directories without git
if site['writeable_dirs'].is_a?(Array) && !site['git']
site['writeable_dirs'].each do |dir_path|
dir_path = "#{site['base_path']}/#{dir_path}" unless dir_path[0, 1] == '/'
execute "Set owner of #{dir_path} to #{deploy_usr}:www-data" do
command "chown -R #{deploy_usr}:www-data #{dir_path}"
action :run
only_if { ::File.directory?(dir_path) }
end
execute "Change mode of #{dir_path} to 775" do
command "chmod -R 775 #{dir_path}"
only_if { ::File.directory?(dir_path) }
end
end
end
# Set writeable directories after git sync
ruby_block "Set writeable dirs for #{site['name']} after git sync" do
block do
site['writeable_dirs'].each do |dir_path|
dir_path = "#{site['base_path']}/#{dir_path}" unless dir_path[0, 1] == '/'
r = Chef::Resource::Execute.new("Set owner of #{dir_path} to #{deploy_usr}:www-data", run_context)
r.command "chown -R #{deploy_usr}:www-data #{dir_path}"
r.run_action(:run)
r = Chef::Resource::Execute.new("Change mode of #{dir_path} to 775", run_context)
r.command "chmod -R 775 #{dir_path}"
r.run_action(:run)
end
end
action :nothing
only_if { site['writeable_dirs'].is_a?(Array) }
end
# Set up supervisors
supervisor_service "#{site['name']}ArtisanQueue" do
command "php #{artisan_path} --env=#{site['environment']} queue:work --tries=3 --daemon"
autostart true
autorestart true
user deploy_usr
numprocs site['artisan_queueworkers']
process_name '%(program_name)s_%(process_num)02d'
redirect_stderr true
stdout_logfile workerlog_path
only_if { site['artisan_queuelisten'] }
end
# Set up artisan cron entries
site['artisan_cron'].each do |cronjob|
cronjob['minute'] ||= '*'
cronjob['hour'] ||= '*'
cronjob['month'] ||= '*'
cronjob['weekday'] ||= '*'
cron cronjob['name'] do
minute cronjob['minute']
hour cronjob['hour']
month cronjob['month']
weekday cronjob['weekday']
command "php #{artisan_path} --env=#{site['environment']} #{cronjob['command']}"
user deploy_usr
end
end
# Set up cron entries
site['cronjobs'].each do |cronjob|
cronjob['minute'] ||= '*'
cronjob['hour'] ||= '*'
cronjob['month'] ||= '*'
cronjob['weekday'] ||= '*'
cron cronjob['name'] do
minute cronjob['minute']
hour cronjob['hour']
month cronjob['month']
weekday cronjob['weekday']
command cronjob['command']
user deploy_usr
end
end
end
Chef service provider for newer ubuntu versions
#
# Cookbook Name:: appserver
# Recipe:: webserver
#
# Set deploy_usr
deploy_usr = 'vagrant'
def chef_solo_search_installed?
klass = ::Search.const_get('Helper')
return klass.is_a?(Class)
rescue NameError
return false
end
unless Chef::Config[:solo] && !chef_solo_search_installed?
search(:users, 'id:deploy NOT action:remove').each do |u|
deploy_usr = u['id']
end
end
# Compass
include_recipe 'compass' if node['compass']['install']
# Node JS & packages
include_recipe 'nodejs'
node['nodejs']['npm']['packages'].each do |npackage|
nodejs_npm npackage
end
# sudo add-apt-repository ppa:ondrej/php; sudo apt-get update
apt_repository 'ondrej-php' do
uri 'ppa:ondrej/php'
end
# PHP
package "php#{node['php']['version']}"
package 'imagemagick'
# PHP plugins
%w[-cli -mysql -curl -mcrypt -gd -imagick -fpm].each do |pkg|
package "php#{node['php']['version']}#{pkg}"
end
# PHP FPM service
service_provider = nil
if 'ubuntu' == node['platform']
if Chef::VersionConstraint.new('>= 15.04').include?(node['platform_version'])
service_provider = Chef::Provider::Service::Systemd
elsif Chef::VersionConstraint.new('>= 12.04').include?(node['platform_version'])
service_provider = Chef::Provider::Service::Upstart
end
end
service 'php-fpm' do
provider service_provider
service_name "php#{node['php']['version']}-fpm"
supports enable: true, start: true, stop: true, restart: true
# :reload doesnt work on ubuntu 14.04 because of a bug...
action [:enable, :start]
end
template "/etc/php/#{node['php']['version']}/fpm/php.ini" do
source 'php.ini.erb'
owner 'root'
group 'root'
mode '0644'
notifies :restart, 'service[php-fpm]'
end
template "/etc/php/#{node['php']['version']}/mods-available/opcache.ini" do
source 'opcache.ini.erb'
owner 'root'
group 'root'
mode '0644'
notifies :restart, 'service[php-fpm]'
end
execute 'Enable Mcrypt' do
command 'phpenmod mcrypt'
action :run
notifies :restart, 'service[php-fpm]'
end
# Upgrade or install composer
execute 'Upgrade Composer' do
command 'composer self-update'
only_if { ::File.exist?('/usr/local/bin/composer') }
action :run
end
execute 'Install Composer' do # ~FC041
command 'curl -sS https://getcomposer.org/installer | php;mv composer.phar /usr/local/bin/composer'
not_if { ::File.exist?('/usr/local/bin/composer') }
action :run
end
# Install supervisor
include_recipe 'supervisor'
# NGINX install
include_recipe 'nginx::server'
directory '/var/www' do
owner deploy_usr
group 'sysadmin'
mode '0775'
action :create
not_if { ::File.directory?('/var/www') }
end
node['nginx']['sites'].each do |site|
webroot_path = "#{site['base_path']}/#{site['webroot_subpath']}"
git_path = "#{site['base_path']}/#{site['git_subpath']}" if site['git']
composer_path = "#{site['base_path']}/#{site['composer_subpath']}" if site['composer_install']
artisan_path = "#{site['base_path']}/#{site['artisan_subpath']}"
compass_path = "#{site['base_path']}/#{site['compass_subpath']}" if site['compass_compile']
npm_path = "#{site['base_path']}/#{site['npm_subpath']}" if site['npm_install']
bower_path = "#{site['base_path']}/#{site['bower_subpath']}" if site['bower_install']
gulp_path = "#{site['base_path']}/#{site['gulp_subpath']}" if site['gulp_run']
workerlog_path = "#{site['base_path']}/#{site['artisan_queuelogpath']}" if site['artisan_queuelisten']
# Create ssl cert files
if site['ssl']
directory "#{node['nginx']['dir']}/ssl" do
owner 'root'
group 'root'
mode '0775'
action :create
not_if { ::File.directory?("#{node['nginx']['dir']}/ssl") }
end
file "#{node['nginx']['dir']}/ssl/#{site['name']}.crt" do
content site['ssl_crt']
owner 'root'
group 'root'
mode '0400'
not_if { ::File.exist?("#{node['nginx']['dir']}/ssl/#{site['name']}/.crt") }
end
file "#{node['nginx']['dir']}/ssl/#{site['name']}.key" do
content site['ssl_key']
owner 'root'
group 'root'
mode '0400'
not_if { ::File.exist?("#{node['nginx']['dir']}/ssl/#{site['name']}/.crt") }
end
end
# Set up nginx server block
custom_data = {
'environment' => site['environment'],
'db_host' => site['db_host'],
'db_database' => site['db_database'],
'db_username' => site['db_username'],
'db_password' => site['db_password'],
'ssl' => site['ssl'],
'ssl_crt' => "#{node['nginx']['dir']}/ssl/#{site['name']}.crt",
'ssl_key' => "#{node['nginx']['dir']}/ssl/#{site['name']}.key",
'redirect-hosts' => site['redirect-hosts'],
'redirect-to' => site['redirect-to']
}
nginx_site site['name'] do # ~FC022
listen '*:80'
host site['host']
root webroot_path
index site['index']
location site['location']
phpfpm site['phpfpm']
custom_data custom_data
template_cookbook site['template_cookbook']
template_source site['template_source']
action [:create, :enable]
not_if { ::File.exist?("#{node['nginx']['dir']}/sites-enabled/#{site['name']}") }
notifies :restart, 'service[php-fpm]'
notifies :restart, 'service[nginx]'
notifies :sync, "git[Syncing git repository for #{site['name']}]"
notifies :run, "execute[Composer install #{site['name']}]"
notifies :run, "execute[Artisan migrate #{site['name']}]"
end
# Sync with git repository
git "Syncing git repository for #{site['name']}" do
destination git_path
repository site['git_repo']
revision site['git_branch']
action :sync
user deploy_usr
ssh_wrapper "/home/#{deploy_usr}/git_wrapper.sh"
only_if { site['git'] && ::File.exist?("/home/#{deploy_usr}/.ssh/git_rsa") }
only_if { ::File.exist?("#{node['nginx']['dir']}/sites-enabled/#{site['name']}") }
notifies :run, "execute[Composer install #{site['name']} after git sync]"
notifies :run, "execute[Artisan migrate #{site['name']} after git sync]"
notifies :compile, "compass_project[Compile sass for #{site['name']} after git sync]", :immediately
notifies :run, "execute[Npm install #{site['name']} after git sync]"
notifies :run, "ruby_block[Set writeable dirs for #{site['name']} after git sync]"
notifies :create, "template[Create #{site['base_path']}/.env after git sync]"
end
# Create .env file efter git sync
template "Create #{site['base_path']}/.env after git sync" do
path "#{site['base_path']}/.env"
source 'env.erb'
owner deploy_usr
group 'www-data'
mode '0755'
action :nothing
only_if { site['env'] }
end
# Create .env file without git sync
template "Create #{site['base_path']}/.env" do
path "#{site['base_path']}/.env"
source 'env.erb'
owner deploy_usr
group 'www-data'
mode '0755'
not_if { site['git'] }
only_if { site['env'] }
end
# Composer install triggered by git sync
execute "Composer install #{site['name']} after git sync" do
command "composer install -n -q -d #{composer_path}"
action :nothing
user deploy_usr
only_if { site['composer_install'] }
only_if { ::File.directory?(composer_path) }
notifies :run, "execute[Artisan migrate #{site['name']} after composer]"
end
# Composer install without git
execute "Composer install #{site['name']}" do
command "composer install -n -q -d #{composer_path}"
action :run
user deploy_usr
only_if { site['composer_install'] }
only_if { ::File.directory?(composer_path) }
only_if { ::File.exist?("#{node['nginx']['dir']}/sites-enabled/#{site['name']}") }
not_if { site['git'] }
notifies :run, "execute[Artisan migrate #{site['name']} after composer]"
end
# Artisan migrate triggered by composer install
execute "Artisan migrate #{site['name']} after composer" do
command "php #{artisan_path} --env=#{site['environment']} migrate"
action :nothing
user deploy_usr
only_if { site['artisan_migrate'] }
only_if { ::File.directory?(artisan_path) }
end
# Artisan migrate after git, when not running composer install
execute "Artisan migrate #{site['name']} after git sync" do
command "php #{artisan_path} --env=#{site['environment']} migrate"
action :nothing
user deploy_usr
only_if { site['artisan_migrate'] }
only_if { ::File.directory?(artisan_path) }
not_if { site['composer_install'] }
end
# Artisan migrate without either composer or git
execute "Artisan migrate #{site['name']}" do
command "php #{artisan_path} --env=#{site['environment']} migrate"
action :run
user deploy_usr
only_if { site['artisan_migrate'] }
only_if { ::File.directory?(artisan_path) }
only_if { ::File.exist?("#{node['nginx']['dir']}/sites-enabled/#{site['name']}") }
not_if { site['composer_install'] }
not_if { site['git'] }
end
# Compass compile without git
compass_project "Compile sass for #{site['name']}" do
path compass_path
action :compile
user deploy_usr
only_if { site['compass_compile'] }
only_if { ::File.directory?(compass_path) }
not_if { site['git'] }
end
# Compass compile triggered by git
compass_project "Compile sass for #{site['name']} after git sync" do
path compass_path
action :nothing
user deploy_usr
only_if { site['compass_compile'] }
only_if { ::File.directory?(compass_path) }
end
# Npm install without git
execute "Npm install #{site['name']}" do
cwd npm_path
command 'npm install'
action :run
user deploy_usr
only_if { site['npm_install'] }
only_if { ::File.directory?(npm_path) }
not_if { site['git'] }
notifies :run, "execute[Bower install #{site['name']}]"
notifies :run, "execute[Gulp #{site['name']}]"
end
# Npm install triggered by git
execute "Npm install #{site['name']} after git sync" do
cwd npm_path
command 'npm install --silent'
action :nothing
user deploy_usr
only_if { site['npm_install'] }
only_if { ::File.directory?(npm_path) }
notifies :run, "execute[Bower install #{site['name']}]"
notifies :run, "execute[Gulp #{site['name']}]"
end
# Bower install after npm install
execute "Bower install #{site['name']}" do
cwd bower_path
command "su #{deploy_usr} -l -c 'bower install --silent'"
action :nothing
only_if { site['bower_install'] }
only_if { ::File.directory?(bower_path) }
notifies :run, "execute[Gulp #{site['name']}]"
end
# Gulp run after bower install
execute "Gulp #{site['name']} after bower" do
cwd gulp_path
command 'gulp --silent --production'
action :nothing
user deploy_usr
only_if { site['gulp_run'] }
only_if { ::File.directory?(gulp_path) }
end
# Gulp run after npm install
execute "Gulp #{site['name']}" do
cwd gulp_path
command 'gulp --silent --production'
action :nothing
user deploy_usr
only_if { site['gulp_run'] }
only_if { ::File.directory?(gulp_path) }
not_if { site['bower_install'] }
end
# Set writeable directories without git
if site['writeable_dirs'].is_a?(Array) && !site['git']
site['writeable_dirs'].each do |dir_path|
dir_path = "#{site['base_path']}/#{dir_path}" unless dir_path[0, 1] == '/'
execute "Set owner of #{dir_path} to #{deploy_usr}:www-data" do
command "chown -R #{deploy_usr}:www-data #{dir_path}"
action :run
only_if { ::File.directory?(dir_path) }
end
execute "Change mode of #{dir_path} to 775" do
command "chmod -R 775 #{dir_path}"
only_if { ::File.directory?(dir_path) }
end
end
end
# Set writeable directories after git sync
ruby_block "Set writeable dirs for #{site['name']} after git sync" do
block do
site['writeable_dirs'].each do |dir_path|
dir_path = "#{site['base_path']}/#{dir_path}" unless dir_path[0, 1] == '/'
r = Chef::Resource::Execute.new("Set owner of #{dir_path} to #{deploy_usr}:www-data", run_context)
r.command "chown -R #{deploy_usr}:www-data #{dir_path}"
r.run_action(:run)
r = Chef::Resource::Execute.new("Change mode of #{dir_path} to 775", run_context)
r.command "chmod -R 775 #{dir_path}"
r.run_action(:run)
end
end
action :nothing
only_if { site['writeable_dirs'].is_a?(Array) }
end
# Set up supervisors
supervisor_service "#{site['name']}ArtisanQueue" do
command "php #{artisan_path} --env=#{site['environment']} queue:work --tries=3 --daemon"
autostart true
autorestart true
user deploy_usr
numprocs site['artisan_queueworkers']
process_name '%(program_name)s_%(process_num)02d'
redirect_stderr true
stdout_logfile workerlog_path
only_if { site['artisan_queuelisten'] }
end
# Set up artisan cron entries
site['artisan_cron'].each do |cronjob|
cronjob['minute'] ||= '*'
cronjob['hour'] ||= '*'
cronjob['month'] ||= '*'
cronjob['weekday'] ||= '*'
cron cronjob['name'] do
minute cronjob['minute']
hour cronjob['hour']
month cronjob['month']
weekday cronjob['weekday']
command "php #{artisan_path} --env=#{site['environment']} #{cronjob['command']}"
user deploy_usr
end
end
# Set up cron entries
site['cronjobs'].each do |cronjob|
cronjob['minute'] ||= '*'
cronjob['hour'] ||= '*'
cronjob['month'] ||= '*'
cronjob['weekday'] ||= '*'
cron cronjob['name'] do
minute cronjob['minute']
hour cronjob['hour']
month cronjob['month']
weekday cronjob['weekday']
command cronjob['command']
user deploy_usr
end
end
end
|
#
# Cookbook Name:: appserver
# Recipe:: webserver
#
# Set deploy_usr
deploy_usr = 'vagrant'
def chef_solo_search_installed?
klass = ::Search.const_get('Helper')
return klass.is_a?(Class)
rescue NameError
return false
end
unless Chef::Config[:solo] && !chef_solo_search_installed?
search(:users, 'id:deploy NOT action:remove').each do |u|
deploy_usr = u['id']
end
end
# Compass
include_recipe 'compass'
# PHP FPM
package 'php5-fpm' do
action :install
end
service 'php-fpm' do
provider ::Chef::Provider::Service::Upstart
service_name 'php5-fpm'
supports enable: true, start: true, stop: true, restart: true
# :reload doesnt work on ubuntu 14.04 because of a bug...
action [:enable, :start]
end
# PHP with plugins
%w(php5 php5-cli php5-mysql php5-curl php5-mcrypt php5-gd imagemagick php5-imagick).each do |pkg| # rubocop:disable LineLength
package pkg do
action :install
end
end
template '/etc/php5/fpm/php.ini' do
source 'php.ini.erb'
owner 'root'
group 'root'
mode '0644'
notifies :restart, 'service[php-fpm]'
end
template '/etc/php5/mods-available/opcache.ini' do
source 'opcache.ini.erb'
owner 'root'
group 'root'
mode '0644'
notifies :restart, 'service[php-fpm]'
end
execute 'Enable Mcrypt' do
command 'php5enmod mcrypt'
action :run
notifies :restart, 'service[php-fpm]'
end
# Upgrade or install composer
execute 'Upgrade Composer' do
command 'composer self-update'
only_if { ::File.exist?('/usr/local/bin/composer') }
action :run
end
execute 'Install Composer' do # ~FC041
command 'curl -sS https://getcomposer.org/installer | php;mv composer.phar /usr/local/bin/composer' # rubocop:disable LineLength
not_if { ::File.exist?('/usr/local/bin/composer') }
action :run
end
# NGINX install
include_recipe 'nginx::server'
directory '/var/www' do
owner deploy_usr
group 'sysadmin'
mode '0775'
action :create
not_if { ::File.directory?('/var/www') }
end
node['nginx']['sites'].each do |site|
git_path = "#{site['base_path']}/#{site['git_subpath']}" if site['git']
composer_path = "#{site['base_path']}/#{site['composer_subpath']}"
artisan_path = "#{site['base_path']}/#{site['artisan_subpath']}"
compass_path = "#{site['base_path']}/#{site['compass_subpath']}"
webroot_path = "#{site['base_path']}/#{site['webroot_subpath']}"
git "Syncing git repository for #{site['name']}" do
destination git_path
repository site['git_repo']
revision site['git_branch']
checkout_branch site['git_branch']
action :sync
user deploy_usr
ssh_wrapper "/home/#{deploy_usr}/git_wrapper.sh"
only_if { site['git'] && ::File.exist?("/home/#{deploy_usr}/.ssh/git_rsa") }
notifies :run, "execute[Composer update #{site['name']} after git sync]"
end
# Composer update triggered by git sync
execute "Composer update #{site['name']} after git sync" do
command "composer update -n -d #{composer_path}"
action :nothing
user deploy_usr
only_if { site['git'] && site['composer_update'] }
notifies :run, "execute[Artisan migrate #{site['name']} after composer]"
end
# Composer update without git
execute "Composer update #{site['name']}" do
command "composer update -n -d #{composer_path}"
action :run
user deploy_usr
only_if { site['composer_update'] }
not_if { site['git'] }
notifies :run, "execute[Artisan migrate #{site['name']} after composer]"
end
# Artisan migrate triggered by composer update
execute "Artisan migrate #{site['name']} after composer" do
command "php #{artisan_path} --env=#{site['environment']} migrate"
action :nothing
user deploy_usr
only_if { site['composer_update'] && site['artisan_migrate'] }
end
# Artisan migrate without composer update
execute "Artisan migrate #{site['name']}" do
command "php #{artisan_path} --env=#{site['environment']} migrate"
action :run
user deploy_usr
only_if { site['artisan_migrate'] }
not_if { site['composer_update'] }
end
# Compass compile
compass_project site['name'] do
path compass_path
action :compile
user deploy_usr
only_if { site['compass_compile'] }
end
# Set writeable directories
if site['writeable_dirs'].kind_of?(Array)
site['writeable_dirs'].each do |dir_path|
dir_path = "#{site['base_path']}/#{dir_path}" unless dir_path[0, 1] == '/'
execute "Make #{dir_path} owned by www-data:#{deploy_usr}" do
command "chown -R www-data:#{deploy_usr} #{dir_path}"
action :run
only_if { ::File.directory?(dir_path) }
end
execute "Make #{dir_path} writeable by both www-data and #{deploy_usr}" do
command "chmod -R 775 #{dir_path}"
only_if { ::File.directory?(dir_path) }
end
end
end
custom_data = { 'environment' => site['environment'] }
# Set up nginx server block
nginx_site site['name'] do
listen '*:80'
host site['host']
root webroot_path
index site['index']
location site['location']
phpfpm site['phpfpm']
custom_data custom_data
template_cookbook site['template_cookbook']
template_source site['template_source']
action [:create, :enable]
notifies :restart, 'service[php-fpm]'
notifies :restart, 'service[nginx]'
end
end
Change the rights of web server writeable dirs
#
# Cookbook Name:: appserver
# Recipe:: webserver
#
# Set deploy_usr
deploy_usr = 'vagrant'
def chef_solo_search_installed?
klass = ::Search.const_get('Helper')
return klass.is_a?(Class)
rescue NameError
return false
end
unless Chef::Config[:solo] && !chef_solo_search_installed?
search(:users, 'id:deploy NOT action:remove').each do |u|
deploy_usr = u['id']
end
end
# Compass
include_recipe 'compass'
# PHP FPM
package 'php5-fpm' do
action :install
end
service 'php-fpm' do
provider ::Chef::Provider::Service::Upstart
service_name 'php5-fpm'
supports enable: true, start: true, stop: true, restart: true
# :reload doesnt work on ubuntu 14.04 because of a bug...
action [:enable, :start]
end
# PHP with plugins
%w(php5 php5-cli php5-mysql php5-curl php5-mcrypt php5-gd imagemagick php5-imagick).each do |pkg| # rubocop:disable LineLength
package pkg do
action :install
end
end
template '/etc/php5/fpm/php.ini' do
source 'php.ini.erb'
owner 'root'
group 'root'
mode '0644'
notifies :restart, 'service[php-fpm]'
end
template '/etc/php5/mods-available/opcache.ini' do
source 'opcache.ini.erb'
owner 'root'
group 'root'
mode '0644'
notifies :restart, 'service[php-fpm]'
end
execute 'Enable Mcrypt' do
command 'php5enmod mcrypt'
action :run
notifies :restart, 'service[php-fpm]'
end
# Upgrade or install composer
execute 'Upgrade Composer' do
command 'composer self-update'
only_if { ::File.exist?('/usr/local/bin/composer') }
action :run
end
execute 'Install Composer' do # ~FC041
command 'curl -sS https://getcomposer.org/installer | php;mv composer.phar /usr/local/bin/composer' # rubocop:disable LineLength
not_if { ::File.exist?('/usr/local/bin/composer') }
action :run
end
# NGINX install
include_recipe 'nginx::server'
directory '/var/www' do
owner deploy_usr
group 'sysadmin'
mode '0775'
action :create
not_if { ::File.directory?('/var/www') }
end
node['nginx']['sites'].each do |site|
git_path = "#{site['base_path']}/#{site['git_subpath']}" if site['git']
composer_path = "#{site['base_path']}/#{site['composer_subpath']}"
artisan_path = "#{site['base_path']}/#{site['artisan_subpath']}"
compass_path = "#{site['base_path']}/#{site['compass_subpath']}"
webroot_path = "#{site['base_path']}/#{site['webroot_subpath']}"
git "Syncing git repository for #{site['name']}" do
destination git_path
repository site['git_repo']
revision site['git_branch']
checkout_branch site['git_branch']
action :sync
user deploy_usr
ssh_wrapper "/home/#{deploy_usr}/git_wrapper.sh"
only_if { site['git'] && ::File.exist?("/home/#{deploy_usr}/.ssh/git_rsa") }
notifies :run, "execute[Composer update #{site['name']} after git sync]"
end
# Composer update triggered by git sync
execute "Composer update #{site['name']} after git sync" do
command "composer update -n -d #{composer_path}"
action :nothing
user deploy_usr
only_if { site['git'] && site['composer_update'] }
notifies :run, "execute[Artisan migrate #{site['name']} after composer]"
end
# Composer update without git
execute "Composer update #{site['name']}" do
command "composer update -n -d #{composer_path}"
action :run
user deploy_usr
only_if { site['composer_update'] }
not_if { site['git'] }
notifies :run, "execute[Artisan migrate #{site['name']} after composer]"
end
# Artisan migrate triggered by composer update
execute "Artisan migrate #{site['name']} after composer" do
command "php #{artisan_path} --env=#{site['environment']} migrate"
action :nothing
user deploy_usr
only_if { site['composer_update'] && site['artisan_migrate'] }
end
# Artisan migrate without composer update
execute "Artisan migrate #{site['name']}" do
command "php #{artisan_path} --env=#{site['environment']} migrate"
action :run
user deploy_usr
only_if { site['artisan_migrate'] }
not_if { site['composer_update'] }
end
# Compass compile
compass_project site['name'] do
path compass_path
action :compile
user deploy_usr
only_if { site['compass_compile'] }
end
# Set writeable directories
if site['writeable_dirs'].kind_of?(Array)
site['writeable_dirs'].each do |dir_path|
dir_path = "#{site['base_path']}/#{dir_path}" unless dir_path[0, 1] == '/'
execute "Make #{dir_path} owned by #{deploy_usr}:www-data" do
command "chown -R #{deploy_usr}:www-data #{dir_path}"
action :run
only_if { ::File.directory?(dir_path) }
end
execute "Make #{dir_path} writeable by both #{deploy_usr} and www-data" do
command "chmod -R 775 #{dir_path}"
only_if { ::File.directory?(dir_path) }
end
end
end
custom_data = { 'environment' => site['environment'] }
# Set up nginx server block
nginx_site site['name'] do
listen '*:80'
host site['host']
root webroot_path
index site['index']
location site['location']
phpfpm site['phpfpm']
custom_data custom_data
template_cookbook site['template_cookbook']
template_source site['template_source']
action [:create, :enable]
notifies :restart, 'service[php-fpm]'
notifies :restart, 'service[nginx]'
end
end
|
$:.push File.expand_path("../lib", __FILE__)
Gem::Specification.new do |s|
s.name = 'recommengine'
s.version = '0.1.2'
s.date = '2015-09-12'
s.summary = "A flexible recommendation engine."
s.description = "A flexible recommendation engine supporting multiple similarity algorithms for use in ecommerce sites, marketplaces, social sharing apps, and more."
s.authors = ["Cody Knauer"]
s.email = 'codyknauer@gmail.com'
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- spec/*`.split("\n")
s.homepage = 'http://github.com/c0d3s/recommengine'
s.license = 'MIT'
s.require_paths = ["lib"]
s.required_ruby_version = '>= 2.1.0'
s.add_development_dependency 'rspec', '~> 3.1'
end
update gemspec
$:.push File.expand_path("../lib", __FILE__)
Gem::Specification.new do |s|
s.name = 'recommengine'
s.version = '0.1.3'
s.date = '2015-09-12'
s.summary = "A flexible recommendation engine."
s.description = "A flexible recommendation engine supporting multiple similarity algorithms for use in ecommerce sites, marketplaces, social sharing apps, and more."
s.authors = ["Cody Knauer"]
s.email = 'codyknauer@gmail.com'
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- spec/*`.split("\n")
s.homepage = 'http://github.com/c0d3s/recommengine'
s.license = 'MIT'
s.require_paths = ["lib"]
s.required_ruby_version = '>= 2.1.0'
s.add_development_dependency 'rspec', '~> 3.1'
end |
require 'rubygems'
require 'benchwarmer'
require 'right_aws'
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), 'lib'))
require 'fog/aws'
data = File.open(File.expand_path('~/.s3conf/s3config.yml')).read
config = YAML.load(data)
fog = Fog::AWS::S3.new(
:aws_access_key_id => config['aws_access_key_id'],
:aws_secret_access_key => config['aws_secret_access_key']
)
raws = RightAws::S3Interface.new(
config['aws_access_key_id'],
config['aws_secret_access_key']
)
raws.logger.level = 3 # ERROR
Benchmark.bm(25) do |bench|
bench.report('fog.put_bucket') do
fog.put_bucket('fogbench')
end
bench.report('raws.create_bucket') do
raws.create_bucket('rawsbench')
end
print '-' * 64 << "\n"
bench.report('fog.put_object') do
file = File.open(File.dirname(__FILE__) + '/spec/lorem.txt', 'r')
fog.put_object('fogbench', 'lorem', file)
end
bench.report('raws.put') do
file = File.open(File.dirname(__FILE__) + '/spec/lorem.txt', 'r')
raws.put('rawsbench', 'lorem', file)
end
print '-' * 64 << "\n"
bench.report('fog.delete_object') do
fog.delete_object('fogbench', 'lorem')
end
bench.report('raws.delete') do
raws.delete('rawsbench', 'lorem')
end
print '-' * 64 << "\n"
bench.report('fog.delete_bucket') do
fog.delete_bucket('fogbench')
end
bench.report('raws.delete_bucket') do
raws.delete_bucket('rawsbench')
end
end
more repitition for benchs
require 'rubygems'
require 'benchwarmer'
require 'right_aws'
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), 'lib'))
require 'fog/aws'
data = File.open(File.expand_path('~/.s3conf/s3config.yml')).read
config = YAML.load(data)
fog = Fog::AWS::S3.new(
:aws_access_key_id => config['aws_access_key_id'],
:aws_secret_access_key => config['aws_secret_access_key']
)
raws = RightAws::S3Interface.new(
config['aws_access_key_id'],
config['aws_secret_access_key']
)
raws.logger.level = 3 # ERROR
TIMES = 10
Benchmark.bm(25) do |bench|
bench.report('fog.put_bucket') do
fog.put_bucket('fogbench')
end
bench.report('raws.create_bucket') do
raws.create_bucket('rawsbench')
end
print '-' * 64 << "\n"
bench.report('fog.put_object') do
TIMES.times do |x|
file = File.open(File.dirname(__FILE__) + '/spec/lorem.txt', 'r')
fog.put_object('fogbench', "lorem_#{x}", file)
end
end
bench.report('raws.put') do
TIMES.times do |x|
file = File.open(File.dirname(__FILE__) + '/spec/lorem.txt', 'r')
raws.put('rawsbench', "lorem_#{x}", file)
end
end
print '-' * 64 << "\n"
bench.report('fog.delete_object') do
TIMES.times do |x|
fog.delete_object('fogbench', "lorem_#{x}")
end
end
bench.report('raws.delete') do
TIMES.times do |x|
raws.delete('rawsbench', "lorem_#{x}")
end
end
print '-' * 64 << "\n"
bench.report('fog.delete_bucket') do
fog.delete_bucket('fogbench')
end
bench.report('raws.delete_bucket') do
raws.delete_bucket('rawsbench')
end
end |
# frozen_string_literal: true
require File.join(File.dirname(__FILE__), '/lib/pygments.rb')
require 'benchmark'
include Benchmark
# number of iterations
num = ARGV[0] ? ARGV[0].to_i : 10
# we can also repeat the code itself
repeats = ARGV[1] ? ARGV[1].to_i : 1
code = File.open('test/test_pygments.py').read.to_s * repeats
puts "Benchmarking....\n"
puts 'Size: ' + code.bytesize.to_s + " bytes\n"
puts 'Iterations: ' + num.to_s + "\n"
Benchmark.bm(40) do |x|
x.report('pygments popen ') { (1..num).each { |_i|; Pygments.highlight(code, lexer: 'python'); } }
x.report('pygments popen (process already started) ') { (1..num).each { |_i|; Pygments.highlight(code, lexer: 'python'); } }
x.report('pygments popen (process already started 2) ') { (1..num).each { |_i|; Pygments.highlight(code, lexer: 'python'); } }
end
fix filename in bench.rb
# frozen_string_literal: true
require File.join(File.dirname(__FILE__), '/lib/pygments.rb')
require 'benchmark'
include Benchmark
# number of iterations
num = ARGV[0] ? ARGV[0].to_i : 10
# we can also repeat the code itself
repeats = ARGV[1] ? ARGV[1].to_i : 1
code = File.open('test/test_pygments.rb').read.to_s * repeats
puts "Benchmarking....\n"
puts 'Size: ' + code.bytesize.to_s + " bytes\n"
puts 'Iterations: ' + num.to_s + "\n"
Benchmark.bm(40) do |x|
x.report('pygments popen ') { (1..num).each { |_i|; Pygments.highlight(code, lexer: 'python'); } }
x.report('pygments popen (process already started) ') { (1..num).each { |_i|; Pygments.highlight(code, lexer: 'python'); } }
x.report('pygments popen (process already started 2) ') { (1..num).each { |_i|; Pygments.highlight(code, lexer: 'python'); } }
end
|
# Define a class called Fraction
# This class is used to represent a ratio of two integers
# Include setter methods called numerator and denominator that allow the user to change these values
# Also include a method called to_f that returns the ratio as a float (a number with a decimal point, such as 10.3)
# Also include a method called lowest, which returns a new Fraction, where the numerator and denominator
# are reduced to lowest terms (ie 20/60 becomes 1/3)
# This will require finding the greatest common divisor for the numerator and denominator
# then dividing both by that number.
# I have included a greatest common divisor method for you
# You should also define a method, to_s, that will represent the Fraction as a String
#
# EXAMPLE:
# f = Fraction.new 20, 60
# f.numerator # => 20
# f.denominator # => 60
# f.to_s # => "20/60"
# f.lowest.to_s # => "1/3"
# f.numerator = 50
# f.denominator = 100
# f.to_s # => "50/100"
# f.to_f # => 0.5
class Fraction
def gcd(a,b)
return a if b == 0
gcd(b, a%b)
end
end
Session 2 Challenge 12 passed
# Define a class called Fraction
# This class is used to represent a ratio of two integers
# Include setter methods called numerator and denominator that allow the user to change these values
# Also include a method called to_f that returns the ratio as a float (a number with a decimal point, such as 10.3)
# Also include a method called lowest, which returns a new Fraction, where the numerator and denominator
# are reduced to lowest terms (ie 20/60 becomes 1/3)
# This will require finding the greatest common divisor for the numerator and denominator
# then dividing both by that number.
# I have included a greatest common divisor method for you
# You should also define a method, to_s, that will represent the Fraction as a String
#
# EXAMPLE:
# f = Fraction.new 20, 60
# f.numerator # => 20
# f.denominator # => 60
# f.to_s # => "20/60"
# f.lowest.to_s # => "1/3"
# f.numerator = 50
# f.denominator = 100
# f.to_s # => "50/100"
# f.to_f # => 0.5
# Define a class called Fraction
# This class is used to represent a ratio of two integers
# Include setter methods called numerator and denominator that allow the user to change these values
# Also include a method called to_f that returns the ratio as a float (a number with a decimal point, such as 10.3)
# Also include a method called lowest, which returns a new Fraction, where the numerator and denominator
# are reduced to lowest terms (ie 20/60 becomes 1/3)
# This will require finding the greatest common divisor for the numerator and denominator
# then dividing both by that number.
# I have included a greatest common divisor method for you
# You should also define a method, to_s, that will represent the Fraction as a String
#
# EXAMPLE:
# f = Fraction.new 20, 60
# f.numerator # => 20
# f.denominator # => 60
# f.to_s # => "20/60"
# f.lowest.to_s # => "1/3"
# f.numerator = 50
# f.denominator = 100
# f.to_s # => "50/100"
# f.to_f # => 0.5
class Fraction
def initialize(a,b)
@numerator = a
@denominator = b
end
attr_accessor 'numerator', 'denominator'
def numerator
@numerator
end
def denominator
@denominator
end
def gcd(numerator, denominator)
return numerator if denominator == 0
gcd(denominator, numerator%denominator)
end
def to_f
return numerator.to_f / denominator.to_f
end
def to_s
return numerator.to_s + '/' + denominator.to_s
end
def lowest
top = (numerator.to_i / gcd(numerator, denominator)).to_s
bottom = (denominator.to_i / gcd(numerator, denominator)).to_s
lowest_fraction = top + '/' + bottom
return lowest_fraction
end
end
|
#!/usr/bin/env ruby
require 'rubygems'
require 'net/http'
require 'uri'
require 'fileutils'
require 'crack'
##########################################################################
####################### YOU CAN EDIT THIS PART ###########################
##########################################################################
# Put your favorite Reddits here
reddits = ['Pics', 'WTF']
# Desired sorting
sort_type = 'hot' # hot, new, controversial, top
# Folder to save pictures to
dir = 'Saved Reddit Pics'
##########################################################################
#################### DONT EDIT ANYTHING PAST HERE ########################
##########################################################################
# Generate custom Reddit URL
def generate_custom_url(reddit_list, sort)
"http://www.reddit.com/r/#{reddit_list.sort.join('+')}/#{sort}.json"
end
custom_url = generate_custom_url(reddits, sort_type)
puts "Your Personal URL: #{custom_url}\n"
puts "--------------------#{print '-' * custom_url.length}"
# Get source of page
def get_page_source(page_url)
url = URI.parse(page_url)
req = Net::HTTP::Get.new(url.path)
Net::HTTP.start(url.host, url.port) do |http|
http.request(req)
end
end
res = get_page_source(custom_url)
# Add URLs and Title to hash
urls = {}
doc = Crack::JSON.parse(res.body)
doc['data']['children'].each do |link|
urls[link['data']['title']] = link['data']['url']
end
puts urls.inspect
# Fix ugly imgur URLs
urls.each_pair do |name, url|
# imgur.com -> i.imgur.com
if url =~ /^((http:\/\/)|(www))+imgur\.com.*$/
url.insert(url.index(/(?i)(imgur\.com).*$/), 'i.')
# i.imgur.com/1234 -> i.imgur.com/1234.jpg
unless url =~ /^.*\.(?i)((bmp)|(gif)|(jpeg)|(jpg)|(png)|(tiff))$/
url.concat(".jpg")
end
end
end
def is_picture?(file)
valid = true
valid = false if file =~ /^.+\.(?i)((bmp)|(gif)|(jpeg)|(jpg)|(png)|(tiff))$/
valid = true if file =~ /^.+\.(?i)(php)/
valid
end
# Remove non-pictures
urls.reject! do |name, url|
is_picture?(url)
end
# Make directory for pictures
FileUtils.mkdir_p dir
# Follow redirects
def fetch(uri_str, limit = 10)
raise ArgumentError, 'HTTP redirect too deep' if limit == 0
response = Net::HTTP.get_response(URI.parse(uri_str))
case response
when Net::HTTPSuccess then response
when Net::HTTPRedirection then fetch(response['location'], limit - 1)
else
response.error!
end
end
# Make file names safe
def sanitize(s)
sani = ""
s.each_byte do |c|
if (c == 32 || (c >= 48 && c <= 57) || (c >= 65 && c <= 90) || (c >= 97 && c <= 122))
sani += c.chr
end
end
sani.gsub!(' ', '_')
sani
end
def download_file(url, path)
response = fetch(url)
Net::HTTP.start(URI.parse(url).host) do |http|
ext = url.match(/\.([^\.]+)$/).to_a.last
open(path, 'w') do |file|
file.write(response.body)
end
end
end
# Download files
urls.each_pair do |name, url|
puts "Downloading: #{name}\n\t#{url}\n\n"
ext = url.match(/\.([^\.]+)$/).to_a.last
unless File.exist?("#{dir}/#{sanitize(name)}.#{ext.downcase}")
download_file(url, "#{dir}/#{sanitize(name)}.#{ext.downcase}")
end
end
puts 'Downloading Complete'
Now uses json instead of crack
#!/usr/bin/env ruby
require 'rubygems'
require 'net/http'
require 'uri'
require 'fileutils'
require 'json'
##########################################################################
####################### YOU CAN EDIT THIS PART ###########################
##########################################################################
# Put your favorite Reddits here
reddits = ['Pics', 'WTF']
# Desired sorting
sort_type = 'hot' # hot, new, controversial, top
# Folder to save pictures to
dir = 'Saved Reddit Pics'
##########################################################################
#################### DONT EDIT ANYTHING PAST HERE ########################
##########################################################################
# Generate custom Reddit URL
def generate_custom_url(reddit_list, sort)
"http://www.reddit.com/r/#{reddit_list.sort.join('+')}/#{sort}.json"
end
custom_url = generate_custom_url(reddits, sort_type)
puts "Your Personal URL: #{custom_url}\n"
puts "--------------------#{print '-' * custom_url.length}"
# Get source of page
def get_page_source(page_url)
url = URI.parse(page_url)
req = Net::HTTP::Get.new(url.path)
Net::HTTP.start(url.host, url.port) do |http|
http.request(req)
end
end
res = get_page_source(custom_url)
# Add URLs and Title to hash
urls = {}
doc = JSON.parse(res.body)
doc['data']['children'].each do |link|
urls[link['data']['title']] = link['data']['url']
end
puts urls.inspect
# Fix ugly imgur URLs
urls.each_pair do |name, url|
# imgur.com -> i.imgur.com
if url =~ /^((http:\/\/)|(www))+imgur\.com.*$/
url.insert(url.index(/(?i)(imgur\.com).*$/), 'i.')
# i.imgur.com/1234 -> i.imgur.com/1234.jpg
unless url =~ /^.*\.(?i)((bmp)|(gif)|(jpeg)|(jpg)|(png)|(tiff))$/
url.concat(".jpg")
end
end
end
def is_picture?(file)
valid = true
valid = false if file =~ /^.+\.(?i)((bmp)|(gif)|(jpeg)|(jpg)|(png)|(tiff))$/
valid = true if file =~ /^.+\.(?i)(php)/
valid
end
# Remove non-pictures
urls.reject! do |name, url|
is_picture?(url)
end
# Make directory for pictures
FileUtils.mkdir_p dir
# Follow redirects
def fetch(uri_str, limit = 10)
raise ArgumentError, 'HTTP redirect too deep' if limit == 0
response = Net::HTTP.get_response(URI.parse(uri_str))
case response
when Net::HTTPSuccess then response
when Net::HTTPRedirection then fetch(response['location'], limit - 1)
else
response.error!
end
end
# Make file names safe
def sanitize(s)
sani = ""
s.each_byte do |c|
if (c == 32 || (c >= 48 && c <= 57) || (c >= 65 && c <= 90) || (c >= 97 && c <= 122))
sani += c.chr
end
end
sani.gsub!(' ', '_')
sani
end
def download_file(url, path)
response = fetch(url)
Net::HTTP.start(URI.parse(url).host) do |http|
ext = url.match(/\.([^\.]+)$/).to_a.last
open(path, 'w') do |file|
file.write(response.body)
end
end
end
# Download files
urls.each_pair do |name, url|
puts "Downloading: #{name}\n\t#{url}\n\n"
ext = url.match(/\.([^\.]+)$/).to_a.last
unless File.exist?("#{dir}/#{sanitize(name)}.#{ext.downcase}")
download_file(url, "#{dir}/#{sanitize(name)}.#{ext.downcase}")
end
end
puts 'Downloading Complete'
|
module Game
class Cards
def initialize
end
def make_white_deck
white_deck = CSV.read("./csv/whitecards.csv")
end
class Board
end
end
make black cards deck
module Game
class Cards
def initialize
end
def make_white_deck
white_deck = CSV.read("./csv/whitecards.csv")
end
def make_black_deck
black_deck = CSV.read("./csv/blackcards.csv")
end
class Board
end
end
|
#!/usr/bin/ruby -w
require 'rubygems'
require 'optparse'
require 'active_resource'
require 'json'
require 'dbi'
require 'uuidtools'
class GenericResource < ActiveResource::Base
self.format = :xml
end
class CloudRecord < GenericResource
end
class LocalRecord
def initialize(records)
@records = records
end
end
class OneRecordSSM < LocalRecord
def print(record)
if record['statusSSM'] == "completed"
endBuff = "EndTime: " + record['endTime'].to_i.to_s + "\n"
else
endBuff = ""
end
"VMUUID: " + record['VMUUID'] + "\n" +
"SiteName: " + record['resourceName'] + "\n" +
"MachineName: " + record['localVMID'] + "\n" +
"LocalUserId: " + record['local_user'] + "\n" +
"LocalGroupId: " + record['local_group'] + "\n" +
"GlobaUserName: " + "" + "\n" +
"FQAN: " + "" + "\n" +
"Status: " + record['statusSSM']+ "\n" +
"StarTime: " + record['startTime'].to_i.to_s + "\n" +
endBuff +
"SuspendDuration: " + "" + "\n" +
"WallDuration: " + record['wallDuration'].to_i.to_s + "\n" +
"CpuDuration: " + record['cpuDuration'].to_i.to_s + "\n" + #Check validity of this number! It is inferred from percentage of CPU consupmption
"CpuCount: " + record['cpuCount'] + "\n" +
"NetworkType: " + "" + "\n" +
"NetworkInbound: " + record['networkInbound'] + "\n" +
"NetworkOutbound: " + record['networkOutbound'] + "\n" +
"Memory: " + record['memory'] + "\n" +
"Disk: " + "" + "\n" +
"StorageRecordId: " + "" + "\n" +
"ImageId: " + record['diskImage'] + "\n" +
"CloudType: " + "OpenNebula" + "\n" + "%%\n"
end
def post
@records.each do |record|
puts print(record)
end
end
end
class OneRecordSSMFile < OneRecordSSM
@@written = 0
@@files = 0
def dir=(dir)
@dir = dir
end
def limit=(limit)
@limit = limit
end
def RandomExa(length, chars = 'abcdef0123456789')
rnd_str = ''
length.times { rnd_str << chars[rand(chars.size)] }
rnd_str
end
def generateFileName
time = Time.now.to_i
timeHex = time.to_s(16)
random_string = RandomExa(6)
filename = timeHex + random_string
filename
end
def post
while not @records.empty?
@@written = 0
out = File.new("#{@dir}/#{self.generateFileName}","w")
if out
out.syswrite("APEL-cloud-message: v0.2\n")
while ( @@written < @limit.to_i)
break if @records.empty?
record = @records.pop
out.syswrite(print(record))
@@written += 1
end
else
puts "Could not open file!"
exit
end
@@files +=1
out.close
end
end
end
class OneRecordJSON < LocalRecord
def post
puts @records.to_json
end
end
class OneRecordXML < LocalRecord
def post
puts @records.to_xml
end
end
class OneRecordActiveResource < LocalRecord
def recordMangle(r)
#mangling content of vector to expunge keys not accepted by rails api and fix inconsistencies
r.delete('cpuPercentage')
r.delete('cpuPercentageNormalized')
r.delete('resourceName')
r['networkOutBound'] = r['networkOutbound']
r.delete('networkOutbound')
r.delete('statusLiteral')
end
def post
@records.each do |record|
recordMangle(record)
r = CloudRecord.new(record)
tries = 0
begin
tries += 1
r.save
if not r.valid?
puts r.errors.full_messages if options[:verbose]
recordBuff = CloudRecord.get(:search, :VMUUID => r.VMUUID )
newRecord = CloudRecord.find(recordBuff["id"])
newRecord.load(r.attributes)
newRecord.save
end
rescue Exception => e
puts "Error sending #{r.VMUUID}:#{e.to_s}. Retrying" # if options[:verbose]
if ( tries < 2)
sleep(2**tries)
retry
else
puts "Could not send record #{r.VMUUID}."
end
end
end
end
end
class OneacctFile
def initialize(file,resourceName)
@file = file
@resourceName = resourceName
end
def parse
records = []
parsed = JSON.parse IO.read(@file)
parsed["HISTORY_RECORDS"]["HISTORY"].each do |jsonRecord|
record = OpenNebulaJsonRecord.new(jsonRecord)
record.resourceName = @resourceName
records << record.recordVector
end
records
end
end
class OpenNebulaStatus
def initialize(state,lcm_state)
@state = state
@lcm_state = lcm_state
@state_ary = ['INIT',
'PENDING',
'HOLD',
'ACTIVE',
'STOPPED',
'SUSPENDED',
'DONE',
'FAILED',
'POWEROFF',
'UNDEFINED1',
'UNDEFINED2']
@lcmstate_ary = ['LCM_INIT',
'PROLOG',
'BOOT',
'RUNNING',
'MIGRATE',
'SAVE_STOP',
'SAVE_SUSPEND',
'SAVE_MIGRATE',
'PROLOG_MIGRATE',
'PROLOG_RESUME',
'EPILOG_STOP',
'EPILOG',
'SHUTDOWN',
'CANCEL',
'FAILURE',
'CLEANUP',
'UNKNOWN',
'HOTPLUG',
'SHUTDOWN_POWEROFF',
'BOOT_UNKNOWN',
'BOOT_POWEROFF',
'BOOT_SUSPENDED',
'BOOT_STOPPED',
'LCMUNDEFINED1',
'LCMUNDEFINED2',
'LCMUNDEFINED3',
'LCMUNDEFINED4']
end
def to_s
if (@state != '3')
"#{@state_ary[@state.to_i]}"
else
"#{@lcmstate_ary[@lcm_state.to_i]}"
end
end
def to_ssm
started = ['INIT',
'PENDING',
'HOLD',
'ACTIVE',
'LCM_INIT',
'PROLOG',
'BOOT',
'RUNNING',
'MIGRATE',
'SAVE_STOP',
'SAVE_SUSPEND',
'SAVE_MIGRATE',
'PROLOG_MIGRATE',
'PROLOG_RESUME',
'EPILOG_STOP',
'EPILOG',
'BOOT_UNKNOWN',
'BOOT_POWEROFF',
'BOOT_SUSPENDED',
'BOOT_STOPPED'
]
suspended = ['SUSPENDED']
completed = ['DONE',
'FAILED',
'POWEROFF',
'SHUTDOWN',
'CANCEL',
'FAILURE',
'CLEANUP']
s = case
when started.include?(self.to_s)
"started"
when suspended.include?(self.to_s)
"suspended"
when completed.include?(self.to_s)
"completed"
else
"one:#{self.to_s}"
end
s
end
end
class OpenNebulaJsonRecord
def initialize(jsonRecord)
@jsonRecord = jsonRecord
end
def recordVector
rv = {}
#rv['FQAN'] = @jsonRecord['a']
rv['cloudType'] = "OpenNebula"
if @jsonRecord["VM"]["TEMPLATE"]["CPU"] then
#Number of physical CPU was assigned in the template. Use this
rv['cpuCount'] = @jsonRecord["VM"]["TEMPLATE"]["CPU"]
else
#Number of physical CPU was not assigned in the template, just Virtual CPUS
#Where requested. This causes possible overbooking. Use this if physical is
#not specified
rv['cpuCount'] = @jsonRecord["VM"]["TEMPLATE"]["VCPU"]
end
#rv['cpuDuration'] = @jsonRecord["VM"]
#rv['Disk'] = @jsonRecord['e']
if @jsonRecord["VM"]["TEMPLATE"]["DISK"]
if @jsonRecord["VM"]["TEMPLATE"]["DISK"].kind_of?(Array)
rv['diskImage'] = ""
@jsonRecord["VM"]["TEMPLATE"]["DISK"].each do |disk|
rv['diskImage'] += disk["IMAGE"] if disk["IMAGE"]
end
else
rv['diskImage'] = @jsonRecord["VM"]["TEMPLATE"]["DISK"]["IMAGE"] if @jsonRecord["VM"]["TEMPLATE"]["DISK"]["IMAGE"]
end
end
rv['endTime'] = Time.at(@jsonRecord["ETIME"].to_i).to_datetime
#rv['globaluserName'] = @jsonRecord["e"]
rv['localVMID'] = @jsonRecord["VM"]["ID"]
rv['local_group'] = @jsonRecord["VM"]["GNAME"]
rv['local_user'] = @jsonRecord["VM"]["UNAME"]
rv['memory'] = @jsonRecord["VM"]["TEMPLATE"]["MEMORY"]
rv['networkInbound'] = @jsonRecord["VM"]["NET_RX"]
rv['networkOutbound'] = @jsonRecord["VM"]["NET_TX"]
rv['cpuPercentage'] = @jsonRecord["VM"]["CPU"]#<!-- Percentage of 1 CPU consumed (two fully consumed cpu is 200) -->
rv['cpuPercentageNormalized'] = rv['cpuPercentage'].to_f/(100.0*rv['cpuCount'].to_f)
#rv['networkType'] = @jsonRecord['q']
#rv['resource_name'] = @resourceName
rv['status'] = @jsonRecord['VM']['STATE'] + ":" + @jsonRecord['VM']['LCM_STATE']
state = OpenNebulaStatus.new(@jsonRecord['VM']['STATE'],@jsonRecord['VM']['LCM_STATE'])
rv['statusLiteral'] = state.to_s
rv['statusSSM'] = state.to_ssm
#rv['storageRecordId'] = @jsonRecord['u']
#rv['suspendDuration'] = @jsonRecord['v']
## Compute endTime from the available information. use current date if none applies
endTimeBuff = Time.new.to_time.to_i
endTimeBuff = @jsonRecord["RETIME"] if @jsonRecord["RETIME"] != "0" #RUNNING_ENDTIME
endTimeBuff = @jsonRecord["EETIME"] if @jsonRecord["EETIME"] != "0" #EPILOG_ENDTIME
endTimeBuff = @jsonRecord["ETIME"] if @jsonRecord["ETIME"] != "0"
rv['endTime'] = Time.at(endTimeBuff.to_i).to_datetime
## Compute startTime from the available information. use endTime if none applies
startTimeBuff = endTimeBuff
startTimeBuff = @jsonRecord["RSTIME"] if @jsonRecord["RSTIME"] != "0" #RUNNING_STARTTIME
startTimeBuff = @jsonRecord["PSTIME"] if @jsonRecord["PSTIME"] != "0" #PROLOG_STARTTIME
startTimeBuff = @jsonRecord["STIME"] if @jsonRecord["STIME"] != "0"
rv['startTime'] = Time.at(startTimeBuff.to_i).to_datetime
## wallDuration is by definition endTime - startTime
rv['wallDuration'] = rv['endTime'].to_i - rv['startTime'].to_i
rv['cpuDuration'] = rv['wallDuration'].to_f*rv['cpuPercentageNormalized']
## VMUUID must be assured unique.
buffer = @resourceName + "/" + @jsonRecord["STIME"] + "/" +@jsonRecord["VM"]["ID"]
rv['VMUUID'] = UUIDTools::UUID.md5_create(UUIDTools::UUID_DNS_NAMESPACE,buffer)
rv['resourceName'] = @resourceName
rv
end
def to_s
stringVector = "VMUUID = " + self.recordVector['VMUUID'] + "\n"
stringVector += "startTime = " + self.recordVector['startTime'].to_s + "\n"
stringVector += "endTime = " + self.recordVector['endTime'].to_s + "\n"
end
def resourceName=(resourceName)
@resourceName = resourceName
end
def resourceName
@resourceName
end
end
class OpennebulaSensor
def initialize
@options = {}
end
def getLineParameters
opt_parser = OptionParser.new do |opt|
opt.banner = "Usage: opennebulaSensorMain.rb [OPTIONS]"
@options[:verbose] = false
opt.on( '-v', '--verbose', 'Output more information') do
@options[:verbose] = true
end
#@options[:dryrun] = false
# opt.on( '-d', '--dryrun', 'Do not talk to server') do
# @options[:dryrun] = true
#end
@options[:uri] = nil
opt.on( '-U', '--URI uri', 'URI to contact') do |uri|
@options[:uri] = uri
end
@options[:resourceName] = nil
opt.on( '-r', '--resourceName resourceName', 'Name of resource, e.g. BDII siteName') do |resourceName|
@options[:resourceName] = resourceName
end
@options[:uri] = nil
opt.on( '-d', '--dir dir', 'outpudDir for ssm files') do |outDir|
@options[:outputDir] = outDir
end
@options[:limit] = nil
opt.on( '-L', '--Limit limit', 'number of record per output file with ssmfile publisher') do |limit|
@options[:limit] = limit
end
@options[:uri] = nil
opt.on( '-P', '--Publisher type', 'Publisher type {ssm,ssmfile,XML,JSON,ActiveResource}') do |type|
@options[:publisher_type] = type
end
@options[:file] = nil
opt.on( '-F', '--File file', 'File containing the output of oneacct --json command') do |file|
@options[:file] = file
end
@options[:token] = nil
opt.on( '-t', '--token token', 'Authorization token (needed only with FAUST ActiveResource backend). Must be requested to the service administrator') do |token|
@options[:token] = token
end
opt.on( '-h', '--help', 'Print this screen') do
puts opt
exit
end
end
opt_parser.parse!
end
def newPublisher(records)
r = case
when @options[:publisher_type] == "JSON" then
p = OneRecordJSON.new(records)
when @options[:publisher_type] == "XML" then
p = OneRecordXML.new(records)
when @options[:publisher_type] == "ssm" then
p = OneRecordSSM.new(records)
when @options[:publisher_type] == "ssmfile" then
p = OneRecordSSMFile.new(records)
p.limit = @options[:limit]
p.dir = @options[:outputDir]
when @options[:publisher_type] == "ActiveResource" then
CloudRecord.site = @options[:uri]
CloudRecord.headers['Authorization'] = "Token token=\"#{@options[:token]}\""
CloudRecord.timeout = 5
CloudRecord.proxy = ""
p = OneRecordActiveResource.new(records)
else
p = nil
end
p
end
def main
self.getLineParameters
f = OneacctFile.new(@options[:file],@options[:resourceName])
records = f.parse
p = newPublisher(records)
p.post
end
end
Modified sensor to publish new records also for already accounted VMs
#!/usr/bin/ruby -w
require 'rubygems'
require 'optparse'
require 'active_resource'
require 'json'
require 'dbi'
require 'uuidtools'
class GenericResource < ActiveResource::Base
self.format = :xml
end
class CloudRecord < GenericResource
end
class LocalRecord
def initialize(records)
@records = records
end
end
class OneRecordSSM < LocalRecord
def print(record)
if record['statusSSM'] == "completed"
endBuff = "EndTime: " + record['endTime'].to_i.to_s + "\n"
else
endBuff = ""
end
"VMUUID: " + record['VMUUID'] + "\n" +
"SiteName: " + record['resourceName'] + "\n" +
"MachineName: " + record['localVMID'] + "\n" +
"LocalUserId: " + record['local_user'] + "\n" +
"LocalGroupId: " + record['local_group'] + "\n" +
"GlobaUserName: " + "" + "\n" +
"FQAN: " + "" + "\n" +
"Status: " + record['statusSSM']+ "\n" +
"StarTime: " + record['startTime'].to_i.to_s + "\n" +
endBuff +
"SuspendDuration: " + "" + "\n" +
"WallDuration: " + record['wallDuration'].to_i.to_s + "\n" +
"CpuDuration: " + record['cpuDuration'].to_i.to_s + "\n" + #Check validity of this number! It is inferred from percentage of CPU consupmption
"CpuCount: " + record['cpuCount'] + "\n" +
"NetworkType: " + "" + "\n" +
"NetworkInbound: " + record['networkInbound'] + "\n" +
"NetworkOutbound: " + record['networkOutbound'] + "\n" +
"Memory: " + record['memory'] + "\n" +
"Disk: " + "" + "\n" +
"StorageRecordId: " + "" + "\n" +
"ImageId: " + record['diskImage'] + "\n" +
"CloudType: " + "OpenNebula" + "\n" + "%%\n"
end
def post
@records.each do |record|
puts print(record)
end
end
end
class OneRecordSSMFile < OneRecordSSM
@@written = 0
@@files = 0
def dir=(dir)
@dir = dir
end
def limit=(limit)
@limit = limit
end
def RandomExa(length, chars = 'abcdef0123456789')
rnd_str = ''
length.times { rnd_str << chars[rand(chars.size)] }
rnd_str
end
def generateFileName
time = Time.now.to_i
timeHex = time.to_s(16)
random_string = RandomExa(6)
filename = timeHex + random_string
filename
end
def post
while not @records.empty?
@@written = 0
out = File.new("#{@dir}/#{self.generateFileName}","w")
if out
out.syswrite("APEL-cloud-message: v0.2\n")
while ( @@written < @limit.to_i)
break if @records.empty?
record = @records.pop
out.syswrite(print(record))
@@written += 1
end
else
puts "Could not open file!"
exit
end
@@files +=1
out.close
end
end
end
class OneRecordJSON < LocalRecord
def post
puts @records.to_json
end
end
class OneRecordXML < LocalRecord
def post
puts @records.to_xml
end
end
class OneRecordActiveResource < LocalRecord
def recordMangle(r)
#mangling content of vector to expunge keys not accepted by rails api and fix inconsistencies
r.delete('cpuPercentage')
r.delete('cpuPercentageNormalized')
r.delete('resourceName')
r['networkOutBound'] = r['networkOutbound']
r.delete('networkOutbound')
r.delete('statusLiteral')
r.delete('statusSSM')
end
def post
@records.each do |record|
recordMangle(record)
r = CloudRecord.new(record)
tries = 0
begin
tries += 1
r.save
if not r.valid?
puts r.errors.full_messages # if options[:verbose]
#recordBuff = CloudRecord.get(:search, :VMUUID => r.VMUUID )
#newRecord = CloudRecord.find(recordBuff["id"])
#newRecord.load(r.attributes)
#newRecord.save
end
rescue Exception => e
puts "Error sending #{r.VMUUID}:#{e.to_s}. Retrying" # if options[:verbose]
if ( tries < 2)
sleep(2**tries)
retry
else
puts "Could not send record #{r.VMUUID}."
end
end
end
end
end
class OneacctFile
def initialize(file,resourceName)
@file = file
@resourceName = resourceName
end
def parse
records = []
parsed = JSON.parse IO.read(@file)
parsed["HISTORY_RECORDS"]["HISTORY"].each do |jsonRecord|
record = OpenNebulaJsonRecord.new(jsonRecord)
record.resourceName = @resourceName
records << record.recordVector
end
records
end
end
class OpenNebulaStatus
def initialize(state,lcm_state)
@state = state
@lcm_state = lcm_state
@state_ary = ['INIT',
'PENDING',
'HOLD',
'ACTIVE',
'STOPPED',
'SUSPENDED',
'DONE',
'FAILED',
'POWEROFF',
'UNDEFINED1',
'UNDEFINED2']
@lcmstate_ary = ['LCM_INIT',
'PROLOG',
'BOOT',
'RUNNING',
'MIGRATE',
'SAVE_STOP',
'SAVE_SUSPEND',
'SAVE_MIGRATE',
'PROLOG_MIGRATE',
'PROLOG_RESUME',
'EPILOG_STOP',
'EPILOG',
'SHUTDOWN',
'CANCEL',
'FAILURE',
'CLEANUP',
'UNKNOWN',
'HOTPLUG',
'SHUTDOWN_POWEROFF',
'BOOT_UNKNOWN',
'BOOT_POWEROFF',
'BOOT_SUSPENDED',
'BOOT_STOPPED',
'LCMUNDEFINED1',
'LCMUNDEFINED2',
'LCMUNDEFINED3',
'LCMUNDEFINED4']
end
def to_s
if (@state != '3')
"#{@state_ary[@state.to_i]}"
else
"#{@lcmstate_ary[@lcm_state.to_i]}"
end
end
def to_ssm
started = ['INIT',
'PENDING',
'HOLD',
'ACTIVE',
'LCM_INIT',
'PROLOG',
'BOOT',
'RUNNING',
'MIGRATE',
'SAVE_STOP',
'SAVE_SUSPEND',
'SAVE_MIGRATE',
'PROLOG_MIGRATE',
'PROLOG_RESUME',
'EPILOG_STOP',
'EPILOG',
'BOOT_UNKNOWN',
'BOOT_POWEROFF',
'BOOT_SUSPENDED',
'BOOT_STOPPED'
]
suspended = ['SUSPENDED']
completed = ['DONE',
'FAILED',
'POWEROFF',
'SHUTDOWN',
'CANCEL',
'FAILURE',
'CLEANUP']
s = case
when started.include?(self.to_s)
"started"
when suspended.include?(self.to_s)
"suspended"
when completed.include?(self.to_s)
"completed"
else
"one:#{self.to_s}"
end
s
end
end
class OpenNebulaJsonRecord
def initialize(jsonRecord)
@jsonRecord = jsonRecord
end
def recordVector
rv = {}
#rv['FQAN'] = @jsonRecord['a']
rv['cloudType'] = "OpenNebula"
if @jsonRecord["VM"]["TEMPLATE"]["CPU"] then
#Number of physical CPU was assigned in the template. Use this
rv['cpuCount'] = @jsonRecord["VM"]["TEMPLATE"]["CPU"]
else
#Number of physical CPU was not assigned in the template, just Virtual CPUS
#Where requested. This causes possible overbooking. Use this if physical is
#not specified
rv['cpuCount'] = @jsonRecord["VM"]["TEMPLATE"]["VCPU"]
end
#rv['cpuDuration'] = @jsonRecord["VM"]
#rv['Disk'] = @jsonRecord['e']
if @jsonRecord["VM"]["TEMPLATE"]["DISK"]
if @jsonRecord["VM"]["TEMPLATE"]["DISK"].kind_of?(Array)
rv['diskImage'] = ""
@jsonRecord["VM"]["TEMPLATE"]["DISK"].each do |disk|
rv['diskImage'] += disk["IMAGE"] if disk["IMAGE"]
end
else
rv['diskImage'] = @jsonRecord["VM"]["TEMPLATE"]["DISK"]["IMAGE"] if @jsonRecord["VM"]["TEMPLATE"]["DISK"]["IMAGE"]
end
end
rv['endTime'] = Time.at(@jsonRecord["ETIME"].to_i).to_datetime
#rv['globaluserName'] = @jsonRecord["e"]
rv['localVMID'] = @jsonRecord["VM"]["ID"]
rv['local_group'] = @jsonRecord["VM"]["GNAME"]
rv['local_user'] = @jsonRecord["VM"]["UNAME"]
rv['memory'] = @jsonRecord["VM"]["TEMPLATE"]["MEMORY"]
rv['networkInbound'] = @jsonRecord["VM"]["NET_RX"]
rv['networkOutbound'] = @jsonRecord["VM"]["NET_TX"]
rv['cpuPercentage'] = @jsonRecord["VM"]["CPU"]#<!-- Percentage of 1 CPU consumed (two fully consumed cpu is 200) -->
rv['cpuPercentageNormalized'] = rv['cpuPercentage'].to_f/(100.0*rv['cpuCount'].to_f)
#rv['networkType'] = @jsonRecord['q']
#rv['resource_name'] = @resourceName
rv['status'] = @jsonRecord['VM']['STATE'] + ":" + @jsonRecord['VM']['LCM_STATE']
state = OpenNebulaStatus.new(@jsonRecord['VM']['STATE'],@jsonRecord['VM']['LCM_STATE'])
rv['statusLiteral'] = state.to_s
rv['statusSSM'] = state.to_ssm
#rv['storageRecordId'] = @jsonRecord['u']
#rv['suspendDuration'] = @jsonRecord['v']
## Compute endTime from the available information. use current date if none applies
endTimeBuff = Time.new.to_time.to_i
endTimeBuff = @jsonRecord["RETIME"] if @jsonRecord["RETIME"] != "0" #RUNNING_ENDTIME
endTimeBuff = @jsonRecord["EETIME"] if @jsonRecord["EETIME"] != "0" #EPILOG_ENDTIME
endTimeBuff = @jsonRecord["ETIME"] if @jsonRecord["ETIME"] != "0"
rv['endTime'] = Time.at(endTimeBuff.to_i).to_datetime
## Compute startTime from the available information. use endTime if none applies
startTimeBuff = endTimeBuff
startTimeBuff = @jsonRecord["RSTIME"] if @jsonRecord["RSTIME"] != "0" #RUNNING_STARTTIME
startTimeBuff = @jsonRecord["PSTIME"] if @jsonRecord["PSTIME"] != "0" #PROLOG_STARTTIME
startTimeBuff = @jsonRecord["STIME"] if @jsonRecord["STIME"] != "0"
rv['startTime'] = Time.at(startTimeBuff.to_i).to_datetime
## wallDuration is by definition endTime - startTime
rv['wallDuration'] = rv['endTime'].to_i - rv['startTime'].to_i
rv['cpuDuration'] = rv['wallDuration'].to_f*rv['cpuPercentageNormalized']
## VMUUID must be assured unique.
buffer = @resourceName + "/" + @jsonRecord["STIME"] + "/" +@jsonRecord["VM"]["ID"]
rv['VMUUID'] = UUIDTools::UUID.md5_create(UUIDTools::UUID_DNS_NAMESPACE,buffer)
rv['resourceName'] = @resourceName
rv
end
def to_s
stringVector = "VMUUID = " + self.recordVector['VMUUID'] + "\n"
stringVector += "startTime = " + self.recordVector['startTime'].to_s + "\n"
stringVector += "endTime = " + self.recordVector['endTime'].to_s + "\n"
end
def resourceName=(resourceName)
@resourceName = resourceName
end
def resourceName
@resourceName
end
end
class OpennebulaSensor
def initialize
@options = {}
end
def getLineParameters
opt_parser = OptionParser.new do |opt|
opt.banner = "Usage: opennebulaSensorMain.rb [OPTIONS]"
@options[:verbose] = false
opt.on( '-v', '--verbose', 'Output more information') do
@options[:verbose] = true
end
#@options[:dryrun] = false
# opt.on( '-d', '--dryrun', 'Do not talk to server') do
# @options[:dryrun] = true
#end
@options[:uri] = nil
opt.on( '-U', '--URI uri', 'URI to contact') do |uri|
@options[:uri] = uri
end
@options[:resourceName] = nil
opt.on( '-r', '--resourceName resourceName', 'Name of resource, e.g. BDII siteName') do |resourceName|
@options[:resourceName] = resourceName
end
@options[:uri] = nil
opt.on( '-d', '--dir dir', 'outpudDir for ssm files') do |outDir|
@options[:outputDir] = outDir
end
@options[:limit] = nil
opt.on( '-L', '--Limit limit', 'number of record per output file with ssmfile publisher') do |limit|
@options[:limit] = limit
end
@options[:uri] = nil
opt.on( '-P', '--Publisher type', 'Publisher type {ssm,ssmfile,XML,JSON,ActiveResource}') do |type|
@options[:publisher_type] = type
end
@options[:file] = nil
opt.on( '-F', '--File file', 'File containing the output of oneacct --json command') do |file|
@options[:file] = file
end
@options[:token] = nil
opt.on( '-t', '--token token', 'Authorization token (needed only with FAUST ActiveResource backend). Must be requested to the service administrator') do |token|
@options[:token] = token
end
opt.on( '-h', '--help', 'Print this screen') do
puts opt
exit
end
end
opt_parser.parse!
end
def newPublisher(records)
r = case
when @options[:publisher_type] == "JSON" then
p = OneRecordJSON.new(records)
when @options[:publisher_type] == "XML" then
p = OneRecordXML.new(records)
when @options[:publisher_type] == "ssm" then
p = OneRecordSSM.new(records)
when @options[:publisher_type] == "ssmfile" then
p = OneRecordSSMFile.new(records)
p.limit = @options[:limit]
p.dir = @options[:outputDir]
when @options[:publisher_type] == "ActiveResource" then
CloudRecord.site = @options[:uri]
CloudRecord.headers['Authorization'] = "Token token=\"#{@options[:token]}\""
CloudRecord.timeout = 5
CloudRecord.proxy = ""
p = OneRecordActiveResource.new(records)
else
p = nil
end
p
end
def main
self.getLineParameters
f = OneacctFile.new(@options[:file],@options[:resourceName])
records = f.parse
p = newPublisher(records)
p.post
end
end |
# -*- coding:utf-8 -*-
Module.new do
plugin = Plugin::create(:search)
main = Gtk::TimeLine.new()
main.force_retrieve_in_reply_to = false
service = nil
querybox = Gtk::Entry.new()
querycont = Gtk::VBox.new(false, 0)
searchbtn = Gtk::Button.new('検索')
savebtn = Gtk::Button.new('保存')
searchbtn.signal_connect('clicked'){ |elm|
elm.sensitive = querybox.sensitive = false
main.clear
service.search(q: querybox.text, rpp: 100).next{ |res|
main.add(res) if res.is_a? Array
elm.sensitive = querybox.sensitive = true }.terminate }
savebtn.signal_connect('clicked'){ |elm|
Gtk::Lock.synchronize{
query = querybox.text
service.search_create(query: query).next{ |saved_search|
Plugin.call(:saved_search_regist, saved_search[:id], query)
}.terminate("検索キーワード「#{query}」を保存できませんでした。あとで試してみてください") } }
querycont.closeup(Gtk::HBox.new(false, 0).pack_start(querybox).closeup(searchbtn))
querycont.closeup(Gtk::HBox.new(false, 0).closeup(savebtn))
plugin.add_event(:boot){ |s|
service = s
container = Gtk::VBox.new(false, 0).pack_start(querycont, false).pack_start(main, true)
Plugin.call(:mui_tab_regist, container, 'Search', MUI::Skin.get("search.png"))
Message::Entity.addlinkrule(:hashtags, /(?:#|#)[a-zA-Z0-9_]+/){ |segment|
querybox.text = '#' + segment[:url].match(/^(?:#|#)?(.+)$/)[1]
searchbtn.clicked
Addon.focus('Search') } }
end
Module.new do
@tab = Class.new(Addon.gen_tabclass){
def on_create(*args)
super
timeline.force_retrieve_in_reply_to = false
del = Gtk::Button.new.add(Gtk::WebIcon.new(MUI::Skin.get('close.png'), 16, 16))
del.signal_connect('clicked'){ |e|
@service.search_destroy(id: @options[:id]){ |event, dummy|
remove if event == :success } }
@header.closeup(del)
end
def suffix
'(Saved Search)' end
def search(use_cache=false)
@service.search(q: @options[:query], rpp: 100, cache: use_cache).next{ |res|
update(res) if res.is_a? Array
}.terminate
self end }
def self.boot
plugin = Plugin::create(:saved_search)
plugin.add_event(:boot){ |service|
@service = service
@count = 0
update(UserConfig[:use_cache_first_query]) }
plugin.add_event(:period){ |service|
@count += 1
if(@count >= UserConfig[:retrieve_interval_search])
update
@count = 0 end }
plugin.add_event(:saved_search_regist){ |id, query|
add_tab(id, query, query) } end
def self.update(use_cache=false)
@service.saved_searches(cache: use_cache).next{ |res|
if res
remove_unmarked{
res.each{ |record|
add_tab(record[:id], URI.decode(record[:query]), URI.decode(record[:name])) } } end }.terminate
end
def self.remove_unmarked
@tab.tabs.each{ |tab|
tab.mark = false }
yield
@tab.tabs.each{ |tab|
tab.remove if not tab.mark } end
def self.add_tab(id, query, name)
tab = @tab.tabs.find{ |tab| tab.name == name }
if tab
tab.search.mark = true
tab.search(:keep)
else
@tab.new(name, @service,
:id => id,
:query => query,
:icon => MUI::Skin.get("savedsearch.png")).search(true) end end
boot
end
Enterで検索できるようにする refs #450
git-svn-id: 2d5b2631527c13f1be6d28af81e06d7b0891ab74@727 03aab468-d3d2-4883-8b12-f661bbf03fa8
# -*- coding:utf-8 -*-
Module.new do
plugin = Plugin::create(:search)
main = Gtk::TimeLine.new()
main.force_retrieve_in_reply_to = false
service = nil
querybox = Gtk::Entry.new()
querycont = Gtk::VBox.new(false, 0)
searchbtn = Gtk::Button.new('検索')
savebtn = Gtk::Button.new('保存')
querybox.signal_connect('activate'){ |elm|
searchbtn.clicked }
searchbtn.signal_connect('clicked'){ |elm|
elm.sensitive = querybox.sensitive = false
main.clear
service.search(q: querybox.text, rpp: 100).next{ |res|
main.add(res) if res.is_a? Array
elm.sensitive = querybox.sensitive = true }.terminate }
savebtn.signal_connect('clicked'){ |elm|
Gtk::Lock.synchronize{
query = querybox.text
service.search_create(query: query).next{ |saved_search|
Plugin.call(:saved_search_regist, saved_search[:id], query)
}.terminate("検索キーワード「#{query}」を保存できませんでした。あとで試してみてください") } }
querycont.closeup(Gtk::HBox.new(false, 0).pack_start(querybox).closeup(searchbtn))
querycont.closeup(Gtk::HBox.new(false, 0).closeup(savebtn))
plugin.add_event(:boot){ |s|
service = s
container = Gtk::VBox.new(false, 0).pack_start(querycont, false).pack_start(main, true)
Plugin.call(:mui_tab_regist, container, 'Search', MUI::Skin.get("search.png"))
Message::Entity.addlinkrule(:hashtags, /(?:#|#)[a-zA-Z0-9_]+/){ |segment|
querybox.text = '#' + segment[:url].match(/^(?:#|#)?(.+)$/)[1]
searchbtn.clicked
Addon.focus('Search') } }
end
Module.new do
@tab = Class.new(Addon.gen_tabclass){
def on_create(*args)
super
timeline.force_retrieve_in_reply_to = false
del = Gtk::Button.new.add(Gtk::WebIcon.new(MUI::Skin.get('close.png'), 16, 16))
del.signal_connect('clicked'){ |e|
@service.search_destroy(id: @options[:id]){ |event, dummy|
remove if event == :success } }
@header.closeup(del)
end
def suffix
'(Saved Search)' end
def search(use_cache=false)
@service.search(q: @options[:query], rpp: 100, cache: use_cache).next{ |res|
update(res) if res.is_a? Array
}.terminate
self end }
def self.boot
plugin = Plugin::create(:saved_search)
plugin.add_event(:boot){ |service|
@service = service
@count = 0
update(UserConfig[:use_cache_first_query]) }
plugin.add_event(:period){ |service|
@count += 1
if(@count >= UserConfig[:retrieve_interval_search])
update
@count = 0 end }
plugin.add_event(:saved_search_regist){ |id, query|
add_tab(id, query, query) } end
def self.update(use_cache=false)
@service.saved_searches(cache: use_cache).next{ |res|
if res
remove_unmarked{
res.each{ |record|
add_tab(record[:id], URI.decode(record[:query]), URI.decode(record[:name])) } } end }.terminate
end
def self.remove_unmarked
@tab.tabs.each{ |tab|
tab.mark = false }
yield
@tab.tabs.each{ |tab|
tab.remove if not tab.mark } end
def self.add_tab(id, query, name)
tab = @tab.tabs.find{ |tab| tab.name == name }
if tab
tab.search.mark = true
tab.search(:keep)
else
@tab.new(name, @service,
:id => id,
:query => query,
:icon => MUI::Skin.get("savedsearch.png")).search(true) end end
boot
end
|
# Preview all emails at http://localhost:3000/rails/mailers/notification_mailer
# A Notification is attached as a Comment to the relevant discussion,
# then sent by email to the user.
#
# The subject and body of a Notification can be customized by each demarche.
#
class NotificationMailer < ApplicationMailer
helper ServiceHelper
def send_dossier_received(dossier)
send_notification(dossier, dossier.procedure.received_mail_template)
end
def send_initiated_notification(dossier)
send_notification(dossier, dossier.procedure.initiated_mail_template)
end
def send_closed_notification(dossier)
send_final_notification(dossier, dossier.procedure.closed_mail_template, dossier.justificatif_motivation)
end
def send_refused_notification(dossier)
send_final_notification(dossier, dossier.procedure.refused_mail_template, dossier.justificatif_motivation)
end
def send_without_continuation_notification(dossier)
send_final_notification(dossier, dossier.procedure.without_continuation_mail_template, dossier.justificatif_motivation)
end
private
def send_notification(dossier, mail_template, attachment_file = nil)
email = dossier.user.email
subject = mail_template.subject_for_dossier(dossier)
body = mail_template.body_for_dossier(dossier)
create_commentaire_for_notification(dossier, subject, body)
if dossier.procedure.logo?
begin
logo_filename = dossier.procedure.logo.filename
attachments.inline[logo_filename] = dossier.procedure.logo.read
@logo_url = attachments[logo_filename].url
rescue StandardError => e
# A problem occured when reading logo, maybe the logo is missing and we should clean the procedure to remove logo reference ?
Raven.capture_exception(e)
end
end
@dossier = dossier
@service = dossier.procedure.service
mail(subject: subject, to: email) do |format|
# rubocop:disable Rails/OutputSafety
format.html { render(html: body.html_safe, layout: 'mailers/notification') }
# rubocop:enable Rails/OutputSafety
end
end
def create_commentaire_for_notification(dossier, subject, body)
params = { body: ["[#{subject}]", body].join("<br><br>") }
commentaire = CommentaireService.build_with_email(CONTACT_EMAIL, dossier, params)
commentaire.save!
end
end
suppression de l'ajout en PJ
# Preview all emails at http://localhost:3000/rails/mailers/notification_mailer
# A Notification is attached as a Comment to the relevant discussion,
# then sent by email to the user.
#
# The subject and body of a Notification can be customized by each demarche.
#
class NotificationMailer < ApplicationMailer
helper ServiceHelper
def send_dossier_received(dossier)
send_notification(dossier, dossier.procedure.received_mail_template)
end
def send_initiated_notification(dossier)
send_notification(dossier, dossier.procedure.initiated_mail_template)
end
def send_closed_notification(dossier)
send_final_notification(dossier, dossier.procedure.closed_mail_template)
end
def send_refused_notification(dossier)
send_final_notification(dossier, dossier.procedure.refused_mail_template)
end
def send_without_continuation_notification(dossier)
send_final_notification(dossier, dossier.procedure.without_continuation_mail_template)
end
private
def send_notification(dossier, mail_template)
email = dossier.user.email
subject = mail_template.subject_for_dossier(dossier)
body = mail_template.body_for_dossier(dossier)
create_commentaire_for_notification(dossier, subject, body)
if dossier.procedure.logo?
begin
logo_filename = dossier.procedure.logo.filename
attachments.inline[logo_filename] = dossier.procedure.logo.read
@logo_url = attachments[logo_filename].url
rescue StandardError => e
# A problem occured when reading logo, maybe the logo is missing and we should clean the procedure to remove logo reference ?
Raven.capture_exception(e)
end
end
@dossier = dossier
@service = dossier.procedure.service
mail(subject: subject, to: email) do |format|
# rubocop:disable Rails/OutputSafety
format.html { render(html: body.html_safe, layout: 'mailers/notification') }
# rubocop:enable Rails/OutputSafety
end
end
def create_commentaire_for_notification(dossier, subject, body)
params = { body: ["[#{subject}]", body].join("<br><br>") }
commentaire = CommentaireService.build_with_email(CONTACT_EMAIL, dossier, params)
commentaire.save!
end
end
|
class NotificationMailer < ApplicationMailer
before_action :prepend_view_paths
def send_immediate(notification)
@notification = notification
@user = @notification.user
headers(
"X-MC-GoogleAnalytics" => 'staging.imgraetzl.at, www.imgraetzl.at',
"X-MC-GoogleAnalyticsCampaign" => "notification-mail",
)
mail(
to: @user.email,
subject: @notification.mail_subject,
template_name: "immediate/#{@notification.mail_template}"
)
end
GRAETZL_SUMMARY_BLOCKS = {
'Neue Locations in deinem Grätzl' => [
Notifications::NewLocation
],
'Neue Treffen' => [
Notifications::NewMeeting
],
'Neue Location Updates' => [
Notifications::NewLocationPost
],
'Neue Ideen im Grätzl' => [
Notifications::NewUserPost, Notifications::NewAdminPost
],
'Neue Gruppen' => [
Notifications::NewGroup
],
'Neue Toolteiler in deinem Grätzl' => [
Notifications::NewToolOffer
],
'Neuer Raumteiler Call' => [
Notifications::NewRoomCall
],
'Neue Räume zum Andocken' => [
Notifications::NewRoomOffer
],
'Auf der Suche nach Raum' => [
Notifications::NewRoomDemand
],
}
def summary_graetzl(user, period)
@user, @period = user, period
@notifications = user.pending_notifications(@period).where(
type: GRAETZL_SUMMARY_BLOCKS.values.flatten.map(&:to_s)
)
# BEGIN CHANGES MICHAEL
# CLEAN NOTIFICATIONS FROM NOT NECESSARY DOUBLES (could be in meetings -> because of rake task set new date)
notifications = {}
@notifications.each do |notification|
next if notification.activity.key != 'meeting.create' # check only for meeting.create
@notifications.delete(notification) if notifications["#{notification.activity.key}.#{notification.activity.id}.#{notification.activity.trackable.id}"].present?
#puts notification.activity.key
#puts notification.activity.id
#puts notification.activity.trackable.id
notifications["#{notification.activity.key}.#{notification.activity.id}.#{notification.activity.trackable.id}"] = true
end
#puts notifications
# END CHANGES MICHAEL
return if @notifications.empty?
headers(
"X-MC-Tags" => "summary-graetzl-mail",
"X-MC-GoogleAnalytics" => 'staging.imgraetzl.at, www.imgraetzl.at',
"X-MC-GoogleAnalyticsCampaign" => "notification-mail",
)
mail(
to: @user.email,
from: "imGrätzl.at <neuigkeiten@imgraetzl.at>",
subject: "Neues aus dem Grätzl #{@user.graetzl.name}",
)
@notifications.update_all(sent: true)
end
PERSONAL_SUMMARY_BLOCKS = {
'Neuer Kommentar auf deiner Pinnwand' => [
Notifications::NewWallComment,
],
"Änderungen an einem Treffen" => [
Notifications::MeetingCancelled, Notifications::MeetingUpdated,
],
"Neuer Kommentar bei" => [
Notifications::CommentInMeeting, Notifications::CommentInUsersMeeting,
Notifications::CommentOnAdminPost, Notifications::CommentOnLocationPost,
Notifications::CommentOnRoomDemand, Notifications::CommentOnRoomOffer,
Notifications::CommentOnUserPost, Notifications::AlsoCommentedToolOffer,
],
'Ebenfalls kommentiert' => [
Notifications::AlsoCommentedAdminPost, Notifications::AlsoCommentedLocationPost,
Notifications::AlsoCommentedMeeting, Notifications::AlsoCommentedRoomDemand,
Notifications::AlsoCommentedRoomOffer, Notifications::AlsoCommentedUserPost,
Notifications::AlsoCommentedToolOffer,
]
}
GROUP_SUMMARY_TYPES = [
Notifications::NewGroupMeeting,
Notifications::NewGroupDiscussion,
Notifications::NewGroupPost,
Notifications::NewGroupUser,
Notifications::CommentOnDiscussionPost,
Notifications::AlsoCommentedDiscussionPost,
]
def summary_personal(user, period)
@user, @period = user, period
@notifications = {}
@notifications[:atendees] = user.pending_notifications(@period).where(
type: "Notifications::AttendeeInUsersMeeting"
)
@notifications[:personal] = user.pending_notifications(@period).where(
type: PERSONAL_SUMMARY_BLOCKS.values.flatten.map(&:to_s)
)
@notifications[:groups] = user.pending_notifications(@period).where(
type: GROUP_SUMMARY_TYPES.map(&:to_s)
)
puts @notifications
# BEGIN CHANGES MICHAEL
# CLEAN PERSONAL NOTIFICATIONS FROM NOT NECESSARY DOUBLES
personal_notifications = {}
@notifications[:personal].each do |notification|
@notifications[:personal].delete(notification) if personal_notifications["#{notification.activity.key}.#{notification.activity.id}.#{notification.activity.trackable.id}"].present?
personal_notifications["#{notification.activity.key}.#{notification.activity.id}.#{notification.activity.trackable.id}"] = true
end
puts '--------- PERSONAL NOTIFICATIONS CLEANED: -------'
puts personal_notifications
# CLEAN GROUP NOTIFICATIONS FROM NOT NECESSARY DOUBLES
group_notifications = {}
@notifications[:groups].each do |notification|
@notifications[:groups].delete(notification) if group_notifications["#{notification.activity.key}.#{notification.activity.id}.#{notification.activity.trackable.id}"].present?
group_notifications["#{notification.activity.key}.#{notification.activity.id}.#{notification.activity.trackable.id}"] = true
end
puts '--------- GROUP NOTIFICATIONS CLEANED: -------'
puts group_notifications
# END CHANGES MICHAEL
puts @notifications
if @notifications.values.all?(&:empty?)
return
end
headers(
"X-MC-Tags" => "summary-personal-mail",
"X-MC-GoogleAnalytics" => 'staging.imgraetzl.at, www.imgraetzl.at',
"X-MC-GoogleAnalyticsCampaign" => "summary-mail",
)
mail(
to: @user.email,
from: "imGrätzl.at | Updates <updates@imgraetzl.at>",
subject: "Persönliche Neuigkeiten für #{@user.first_name} zusammengefasst",
)
@notifications.values.each { |n| n.update_all(sent: true) }
end
private
def prepend_view_paths
prepend_view_path 'app/views/mailers/notification_mailer'
end
end
insert logging
class NotificationMailer < ApplicationMailer
before_action :prepend_view_paths
def send_immediate(notification)
@notification = notification
@user = @notification.user
headers(
"X-MC-GoogleAnalytics" => 'staging.imgraetzl.at, www.imgraetzl.at',
"X-MC-GoogleAnalyticsCampaign" => "notification-mail",
)
mail(
to: @user.email,
subject: @notification.mail_subject,
template_name: "immediate/#{@notification.mail_template}"
)
end
GRAETZL_SUMMARY_BLOCKS = {
'Neue Locations in deinem Grätzl' => [
Notifications::NewLocation
],
'Neue Treffen' => [
Notifications::NewMeeting
],
'Neue Location Updates' => [
Notifications::NewLocationPost
],
'Neue Ideen im Grätzl' => [
Notifications::NewUserPost, Notifications::NewAdminPost
],
'Neue Gruppen' => [
Notifications::NewGroup
],
'Neue Toolteiler in deinem Grätzl' => [
Notifications::NewToolOffer
],
'Neuer Raumteiler Call' => [
Notifications::NewRoomCall
],
'Neue Räume zum Andocken' => [
Notifications::NewRoomOffer
],
'Auf der Suche nach Raum' => [
Notifications::NewRoomDemand
],
}
def summary_graetzl(user, period)
@user, @period = user, period
@notifications = user.pending_notifications(@period).where(
type: GRAETZL_SUMMARY_BLOCKS.values.flatten.map(&:to_s)
)
# BEGIN CHANGES MICHAEL
# CLEAN NOTIFICATIONS FROM NOT NECESSARY DOUBLES (could be in meetings -> because of rake task set new date)
notifications = {}
@notifications.each do |notification|
next if notification.activity.key != 'meeting.create' # check only for meeting.create
@notifications.delete(notification) if notifications["#{notification.activity.key}.#{notification.activity.id}.#{notification.activity.trackable.id}"].present?
#puts notification.activity.key
#puts notification.activity.id
#puts notification.activity.trackable.id
notifications["#{notification.activity.key}.#{notification.activity.id}.#{notification.activity.trackable.id}"] = true
end
#puts notifications
# END CHANGES MICHAEL
return if @notifications.empty?
headers(
"X-MC-Tags" => "summary-graetzl-mail",
"X-MC-GoogleAnalytics" => 'staging.imgraetzl.at, www.imgraetzl.at',
"X-MC-GoogleAnalyticsCampaign" => "notification-mail",
)
mail(
to: @user.email,
from: "imGrätzl.at <neuigkeiten@imgraetzl.at>",
subject: "Neues aus dem Grätzl #{@user.graetzl.name}",
)
@notifications.update_all(sent: true)
end
PERSONAL_SUMMARY_BLOCKS = {
'Neuer Kommentar auf deiner Pinnwand' => [
Notifications::NewWallComment,
],
"Änderungen an einem Treffen" => [
Notifications::MeetingCancelled, Notifications::MeetingUpdated,
],
"Neuer Kommentar bei" => [
Notifications::CommentInMeeting, Notifications::CommentInUsersMeeting,
Notifications::CommentOnAdminPost, Notifications::CommentOnLocationPost,
Notifications::CommentOnRoomDemand, Notifications::CommentOnRoomOffer,
Notifications::CommentOnUserPost, Notifications::AlsoCommentedToolOffer,
],
'Ebenfalls kommentiert' => [
Notifications::AlsoCommentedAdminPost, Notifications::AlsoCommentedLocationPost,
Notifications::AlsoCommentedMeeting, Notifications::AlsoCommentedRoomDemand,
Notifications::AlsoCommentedRoomOffer, Notifications::AlsoCommentedUserPost,
Notifications::AlsoCommentedToolOffer,
]
}
GROUP_SUMMARY_TYPES = [
Notifications::NewGroupMeeting,
Notifications::NewGroupDiscussion,
Notifications::NewGroupPost,
Notifications::NewGroupUser,
Notifications::CommentOnDiscussionPost,
Notifications::AlsoCommentedDiscussionPost,
]
def summary_personal(user, period)
@user, @period = user, period
@notifications = {}
@notifications[:atendees] = user.pending_notifications(@period).where(
type: "Notifications::AttendeeInUsersMeeting"
)
@notifications[:personal] = user.pending_notifications(@period).where(
type: PERSONAL_SUMMARY_BLOCKS.values.flatten.map(&:to_s)
)
@notifications[:groups] = user.pending_notifications(@period).where(
type: GROUP_SUMMARY_TYPES.map(&:to_s)
)
puts @notifications
# BEGIN CHANGES MICHAEL
# CLEAN PERSONAL NOTIFICATIONS FROM NOT NECESSARY DOUBLES
personal_notifications = {}
@notifications[:personal].each do |notification|
@notifications[:personal].delete(notification) if personal_notifications["#{notification.activity.key}.#{notification.activity.id}.#{notification.activity.trackable.id}"].present?
personal_notifications["#{notification.activity.key}.#{notification.activity.id}.#{notification.activity.trackable.id}"] = true
end
puts '--------- PERSONAL NOTIFICATIONS CLEANED: -------'
puts personal_notifications
# CLEAN GROUP NOTIFICATIONS FROM NOT NECESSARY DOUBLES
group_notifications = {}
puts @notifications[:groups]
@notifications[:groups].each do |notification|
@notifications[:groups].delete(notification) if group_notifications["#{notification.activity.key}.#{notification.activity.id}.#{notification.activity.trackable.id}"].present?
group_notifications["#{notification.activity.key}.#{notification.activity.id}.#{notification.activity.trackable.id}"] = true
end
puts '--------- GROUP NOTIFICATIONS CLEANED: -------'
puts group_notifications
# END CHANGES MICHAEL
puts @notifications[:groups]
puts @notifications
if @notifications.values.all?(&:empty?)
return
end
headers(
"X-MC-Tags" => "summary-personal-mail",
"X-MC-GoogleAnalytics" => 'staging.imgraetzl.at, www.imgraetzl.at',
"X-MC-GoogleAnalyticsCampaign" => "summary-mail",
)
mail(
to: @user.email,
from: "imGrätzl.at | Updates <updates@imgraetzl.at>",
subject: "Persönliche Neuigkeiten für #{@user.first_name} zusammengefasst",
)
@notifications.values.each { |n| n.update_all(sent: true) }
end
private
def prepend_view_paths
prepend_view_path 'app/views/mailers/notification_mailer'
end
end
|
fixed http forced port
git-svn-id: 36b044a1d4e5f74e123a9788437846fc1f2f0dec@318 fd6d9157-2c40-0410-9642-e7077f30ef04
|
require 'date'
module Agents
class WeatherAgent < Agent
cannot_receive_events!
description <<-MD
The WeatherAgent creates an event for the following day's weather at `zipcode`.
You must setup an API key for Wunderground in order to use this Agent.
MD
event_description <<-MD
Events look like this:
{
:zipcode => 12345,
:date => { :epoch=>"1357959600", :pretty=>"10:00 PM EST on January 11, 2013" },
:high => { :fahrenheit=>"64", :celsius=>"18" },
:low => { :fahrenheit=>"52", :celsius=>"11" },
:conditions => "Rain Showers",
:icon=>"rain",
:icon_url => "http://icons-ak.wxug.com/i/c/k/rain.gif",
:skyicon => "mostlycloudy",
:pop => 80,
:qpf_allday => { :in=>0.24, :mm=>6.1 },
:qpf_day => { :in=>0.13, :mm=>3.3 },
:qpf_night => { :in=>0.03, :mm=>0.8 },
:snow_allday => { :in=>0, :cm=>0 },
:snow_day => { :in=>0, :cm=>0 },
:snow_night => { :in=>0, :cm=>0 },
:maxwind => { :mph=>15, :kph=>24, :dir=>"SSE", :degrees=>160 },
:avewind => { :mph=>9, :kph=>14, :dir=>"SSW", :degrees=>194 },
:avehumidity => 85,
:maxhumidity => 93,
:minhumidity => 63
}
MD
default_schedule "midnight"
def working?
(event = event_created_within(2.days)) && event.payload.present?
end
def wunderground
Wunderground.new("your-api-key")
end
def default_options
{ :zipcode => "94103" }
end
def validate_options
errors.add(:base, "zipcode is required") unless options[:zipcode].present?
end
def check
wunderground.forecast_for(options[:zipcode])["forecast"]["simpleforecast"]["forecastday"].each do |day|
if is_tomorrow?(day)
create_event :payload => day.merge(:zipcode => options[:zipcode])
end
end
end
def is_tomorrow?(day)
Time.zone.at(day["date"]["epoch"].to_i).to_date == Time.zone.now.tomorrow.to_date
end
end
end
need to run earlier
require 'date'
module Agents
class WeatherAgent < Agent
cannot_receive_events!
description <<-MD
The WeatherAgent creates an event for the following day's weather at `zipcode`.
You must setup an API key for Wunderground in order to use this Agent.
MD
event_description <<-MD
Events look like this:
{
:zipcode => 12345,
:date => { :epoch=>"1357959600", :pretty=>"10:00 PM EST on January 11, 2013" },
:high => { :fahrenheit=>"64", :celsius=>"18" },
:low => { :fahrenheit=>"52", :celsius=>"11" },
:conditions => "Rain Showers",
:icon=>"rain",
:icon_url => "http://icons-ak.wxug.com/i/c/k/rain.gif",
:skyicon => "mostlycloudy",
:pop => 80,
:qpf_allday => { :in=>0.24, :mm=>6.1 },
:qpf_day => { :in=>0.13, :mm=>3.3 },
:qpf_night => { :in=>0.03, :mm=>0.8 },
:snow_allday => { :in=>0, :cm=>0 },
:snow_day => { :in=>0, :cm=>0 },
:snow_night => { :in=>0, :cm=>0 },
:maxwind => { :mph=>15, :kph=>24, :dir=>"SSE", :degrees=>160 },
:avewind => { :mph=>9, :kph=>14, :dir=>"SSW", :degrees=>194 },
:avehumidity => 85,
:maxhumidity => 93,
:minhumidity => 63
}
MD
default_schedule "8pm"
def working?
(event = event_created_within(2.days)) && event.payload.present?
end
def wunderground
Wunderground.new("your-api-key")
end
def default_options
{ :zipcode => "94103" }
end
def validate_options
errors.add(:base, "zipcode is required") unless options[:zipcode].present?
end
def check
wunderground.forecast_for(options[:zipcode])["forecast"]["simpleforecast"]["forecastday"].each do |day|
if is_tomorrow?(day)
create_event :payload => day.merge(:zipcode => options[:zipcode])
end
end
end
def is_tomorrow?(day)
Time.zone.at(day["date"]["epoch"].to_i).to_date == Time.zone.now.tomorrow.to_date
end
end
end |
require 'nokogiri'
require 'date'
module Agents
class WebsiteAgent < Agent
include WebRequestConcern
can_dry_run!
default_schedule "every_12h"
UNIQUENESS_LOOK_BACK = 200
UNIQUENESS_FACTOR = 3
description <<-MD
The WebsiteAgent scrapes a website, XML document, or JSON feed and creates Events based on the results.
Specify a `url` and select a `mode` for when to create Events based on the scraped data, either `all` or `on_change`.
`url` can be a single url, or an array of urls (for example, for multiple pages with the exact same structure but different content to scrape)
The `type` value can be `xml`, `html`, `json`, or `text`.
To tell the Agent how to parse the content, specify `extract` as a hash with keys naming the extractions and values of hashes.
When parsing HTML or XML, these sub-hashes specify how each extraction should be done. The Agent first selects a node set from the document for each extraction key by evaluating either a CSS selector in `css` or an XPath expression in `xpath`. It then evaluates an XPath expression in `value` on each node in the node set, converting the result into string. Here's an example:
"extract": {
"url": { "css": "#comic img", "value": "@src" },
"title": { "css": "#comic img", "value": "@title" },
"body_text": { "css": "div.main", "value": ".//text()" }
}
"@_attr_" is the XPath expression to extract the value of an attribute named _attr_ from a node, and ".//text()" is to extract all the enclosed texts. You can also use [XPath functions](http://www.w3.org/TR/xpath/#section-String-Functions) like `normalize-space` to strip and squeeze whitespace, `substring-after` to extract part of a text, and `translate` to remove comma from a formatted number, etc. Note that these functions take a string, not a node set, so what you may think would be written as `normalize-space(.//text())` should actually be `normalize-space(.)`.
When parsing JSON, these sub-hashes specify [JSONPaths](http://goessner.net/articles/JsonPath/) to the values that you care about. For example:
"extract": {
"title": { "path": "results.data[*].title" },
"description": { "path": "results.data[*].description" }
}
When parsing text, each sub-hash should contain a `regexp` and `index`. Output text is matched against the regular expression repeatedly from the beginning through to the end, collecting a captured group specified by `index` in each match. Each index should be either an integer or a string name which corresponds to <code>(?<<em>name</em>>...)</code>. For example, to parse lines of <code><em>word</em>: <em>definition</em></code>, the following should work:
"extract": {
"word": { "regexp": "^(.+?): (.+)$", index: 1 },
"definition": { "regexp": "^(.+?): (.+)$", index: 2 }
}
Or if you prefer names to numbers for index:
"extract": {
"word": { "regexp": "^(?<word>.+?): (?<definition>.+)$", index: 'word' },
"definition": { "regexp": "^(?<word>.+?): (?<definition>.+)$", index: 'definition' }
}
To extract the whole content as one event:
"extract": {
"content": { "regexp": "\A(?m:.)*\z", index: 0 }
}
Beware that `.` does not match the newline character (LF) unless the `m` flag is in effect, and `^`/`$` basically match every line beginning/end. See [this document](http://ruby-doc.org/core-#{RUBY_VERSION}/doc/regexp_rdoc.html) to learn the regular expression variant used in this service.
Note that for all of the formats, whatever you extract MUST have the same number of matches for each extractor. E.g., if you're extracting rows, all extractors must match all rows. For generating CSS selectors, something like [SelectorGadget](http://selectorgadget.com) may be helpful.
Can be configured to use HTTP basic auth by including the `basic_auth` parameter with `"username:password"`, or `["username", "password"]`.
Set `expected_update_period_in_days` to the maximum amount of time that you'd expect to pass between Events being created by this Agent. This is only used to set the "working" status.
Set `uniqueness_look_back` to limit the number of events checked for uniqueness (typically for performance). This defaults to the larger of #{UNIQUENESS_LOOK_BACK} or #{UNIQUENESS_FACTOR}x the number of detected received results.
Set `force_encoding` to an encoding name if the website does not return a Content-Type header with a proper charset.
Set `user_agent` to a custom User-Agent name if the website does not like the default value (`#{default_user_agent}`).
The `headers` field is optional. When present, it should be a hash of headers to send with the request.
Set `disable_ssl_verification` to `true` to disable ssl verification.
Set `unzip` to `gzip` to inflate the resource using gzip.
The WebsiteAgent can also scrape based on incoming events. It will scrape the url contained in the `url` key of the incoming event payload. If you specify `merge` as the mode, it will retain the old payload and update it with the new values.
In Liquid templating, the following variable is available:
* `_response_`: A response object with the following keys:
* `status`: HTTP status as integer. (Almost always 200)
* `headers`: Response headers; for example, `{{ _response_.headers.Content-Type }}` expands to the value of the Content-Type header. Keys are insensitive to cases and -/_.
MD
event_description do
"Events will have the following fields:\n\n %s" % [
Utils.pretty_print(Hash[options['extract'].keys.map { |key|
[key, "..."]
}])
]
end
def working?
event_created_within?(interpolated['expected_update_period_in_days']) && !recent_error_logs?
end
def default_options
{
'expected_update_period_in_days' => "2",
'url' => "http://xkcd.com",
'type' => "html",
'mode' => "on_change",
'extract' => {
'url' => { 'css' => "#comic img", 'value' => "@src" },
'title' => { 'css' => "#comic img", 'value' => "@alt" },
'hovertext' => { 'css' => "#comic img", 'value' => "@title" }
}
}
end
def validate_options
# Check for required fields
errors.add(:base, "url and expected_update_period_in_days are required") unless options['expected_update_period_in_days'].present? && options['url'].present?
if !options['extract'].present? && extraction_type != "json"
errors.add(:base, "extract is required for all types except json")
end
# Check for optional fields
if options['mode'].present?
errors.add(:base, "mode must be set to on_change, all or merge") unless %w[on_change all merge].include?(options['mode'])
end
if options['expected_update_period_in_days'].present?
errors.add(:base, "Invalid expected_update_period_in_days format") unless is_positive_integer?(options['expected_update_period_in_days'])
end
if options['uniqueness_look_back'].present?
errors.add(:base, "Invalid uniqueness_look_back format") unless is_positive_integer?(options['uniqueness_look_back'])
end
if (encoding = options['force_encoding']).present?
case encoding
when String
begin
Encoding.find(encoding)
rescue ArgumentError
errors.add(:base, "Unknown encoding: #{encoding.inspect}")
end
else
errors.add(:base, "force_encoding must be a string")
end
end
validate_web_request_options!
end
def check
check_urls(interpolated['url'])
end
def check_urls(in_url)
return unless in_url.present?
Array(in_url).each do |url|
check_url(url)
end
end
def check_url(url, payload = {})
log "Fetching #{url}"
response = faraday.get(url)
raise "Failed: #{response.inspect}" unless response.success?
interpolation_context.stack {
interpolation_context['_response_'] = ResponseDrop.new(response)
body = response.body
if (encoding = interpolated['force_encoding']).present?
body = body.encode(Encoding::UTF_8, encoding)
end
if interpolated['unzip'] == "gzip"
body = ActiveSupport::Gzip.decompress(body)
end
doc = parse(body)
if extract_full_json?
if store_payload!(previous_payloads(1), doc)
log "Storing new result for '#{name}': #{doc.inspect}"
create_event payload: payload.merge(doc)
end
return
end
output =
case extraction_type
when 'json'
extract_json(doc)
when 'text'
extract_text(doc)
else
extract_xml(doc)
end
num_unique_lengths = interpolated['extract'].keys.map { |name| output[name].length }.uniq
if num_unique_lengths.length != 1
raise "Got an uneven number of matches for #{interpolated['name']}: #{interpolated['extract'].inspect}"
end
old_events = previous_payloads num_unique_lengths.first
num_unique_lengths.first.times do |index|
result = {}
interpolated['extract'].keys.each do |name|
result[name] = output[name][index]
if name.to_s == 'url'
result[name] = (response.env[:url] + result[name]).to_s
end
end
if store_payload!(old_events, result)
log "Storing new parsed result for '#{name}': #{result.inspect}"
create_event payload: payload.merge(result)
end
end
}
rescue => e
error "Error when fetching url: #{e.message}\n#{e.backtrace.join("\n")}"
end
def receive(incoming_events)
incoming_events.each do |event|
interpolate_with(event) do
url_to_scrape = event.payload['url']
next unless url_to_scrape =~ /^https?:\/\//i
check_url(url_to_scrape,
interpolated['mode'].to_s == "merge" ? event.payload : {})
end
end
end
private
# This method returns true if the result should be stored as a new event.
# If mode is set to 'on_change', this method may return false and update an existing
# event to expire further in the future.
def store_payload!(old_events, result)
case interpolated['mode'].presence
when 'on_change'
result_json = result.to_json
old_events.each do |old_event|
if old_event.payload.to_json == result_json
old_event.expires_at = new_event_expiration_date
old_event.save!
return false
end
end
true
when 'all', 'merge', ''
true
else
raise "Illegal options[mode]: #{interpolated['mode']}"
end
end
def previous_payloads(num_events)
if interpolated['uniqueness_look_back'].present?
look_back = interpolated['uniqueness_look_back'].to_i
else
# Larger of UNIQUENESS_FACTOR * num_events and UNIQUENESS_LOOK_BACK
look_back = UNIQUENESS_FACTOR * num_events
if look_back < UNIQUENESS_LOOK_BACK
look_back = UNIQUENESS_LOOK_BACK
end
end
events.order("id desc").limit(look_back) if interpolated['mode'] == "on_change"
end
def extract_full_json?
!interpolated['extract'].present? && extraction_type == "json"
end
def extraction_type
(interpolated['type'] || begin
case interpolated['url']
when /\.(rss|xml)$/i
"xml"
when /\.json$/i
"json"
when /\.(txt|text)$/i
"text"
else
"html"
end
end).to_s
end
def extract_each(doc, &block)
interpolated['extract'].each_with_object({}) { |(name, extraction_details), output|
output[name] = block.call(extraction_details)
}
end
def extract_json(doc)
extract_each(doc) { |extraction_details|
result = Utils.values_at(doc, extraction_details['path'])
log "Extracting #{extraction_type} at #{extraction_details['path']}: #{result}"
result
}
end
def extract_text(doc)
extract_each(doc) { |extraction_details|
regexp = Regexp.new(extraction_details['regexp'])
result = []
doc.scan(regexp) {
result << Regexp.last_match[extraction_details['index']]
}
log "Extracting #{extraction_type} at #{regexp}: #{result}"
result
}
end
def extract_xml(doc)
extract_each(doc) { |extraction_details|
case
when css = extraction_details['css']
nodes = doc.css(css)
when xpath = extraction_details['xpath']
doc.remove_namespaces! # ignore xmlns, useful when parsing atom feeds
nodes = doc.xpath(xpath)
else
raise '"css" or "xpath" is required for HTML or XML extraction'
end
case nodes
when Nokogiri::XML::NodeSet
result = nodes.map { |node|
case value = node.xpath(extraction_details['value'])
when Float
# Node#xpath() returns any numeric value as float;
# convert it to integer as appropriate.
value = value.to_i if value.to_i == value
end
value.to_s
}
else
raise "The result of HTML/XML extraction was not a NodeSet"
end
log "Extracting #{extraction_type} at #{xpath || css}: #{result}"
result
}
end
def parse(data)
case extraction_type
when "xml"
Nokogiri::XML(data)
when "json"
JSON.parse(data)
when "html"
Nokogiri::HTML(data)
when "text"
data
else
raise "Unknown extraction type #{extraction_type}"
end
end
def is_positive_integer?(value)
Integer(value) >= 0
rescue
false
end
# Wraps Faraday::Response
class ResponseDrop < LiquidDroppable::Drop
def headers
HeaderDrop.new(@object.headers)
end
# Integer value of HTTP status
def status
@object.status
end
end
# Wraps Faraday::Utilsa::Headers
class HeaderDrop < LiquidDroppable::Drop
def before_method(name)
@object[name.tr('_', '-')]
end
end
end
end
Refactor WebsiteAgent::store_payload!
This is to avoid a jump (return) from within a block where possible.
require 'nokogiri'
require 'date'
module Agents
class WebsiteAgent < Agent
include WebRequestConcern
can_dry_run!
default_schedule "every_12h"
UNIQUENESS_LOOK_BACK = 200
UNIQUENESS_FACTOR = 3
description <<-MD
The WebsiteAgent scrapes a website, XML document, or JSON feed and creates Events based on the results.
Specify a `url` and select a `mode` for when to create Events based on the scraped data, either `all` or `on_change`.
`url` can be a single url, or an array of urls (for example, for multiple pages with the exact same structure but different content to scrape)
The `type` value can be `xml`, `html`, `json`, or `text`.
To tell the Agent how to parse the content, specify `extract` as a hash with keys naming the extractions and values of hashes.
When parsing HTML or XML, these sub-hashes specify how each extraction should be done. The Agent first selects a node set from the document for each extraction key by evaluating either a CSS selector in `css` or an XPath expression in `xpath`. It then evaluates an XPath expression in `value` on each node in the node set, converting the result into string. Here's an example:
"extract": {
"url": { "css": "#comic img", "value": "@src" },
"title": { "css": "#comic img", "value": "@title" },
"body_text": { "css": "div.main", "value": ".//text()" }
}
"@_attr_" is the XPath expression to extract the value of an attribute named _attr_ from a node, and ".//text()" is to extract all the enclosed texts. You can also use [XPath functions](http://www.w3.org/TR/xpath/#section-String-Functions) like `normalize-space` to strip and squeeze whitespace, `substring-after` to extract part of a text, and `translate` to remove comma from a formatted number, etc. Note that these functions take a string, not a node set, so what you may think would be written as `normalize-space(.//text())` should actually be `normalize-space(.)`.
When parsing JSON, these sub-hashes specify [JSONPaths](http://goessner.net/articles/JsonPath/) to the values that you care about. For example:
"extract": {
"title": { "path": "results.data[*].title" },
"description": { "path": "results.data[*].description" }
}
When parsing text, each sub-hash should contain a `regexp` and `index`. Output text is matched against the regular expression repeatedly from the beginning through to the end, collecting a captured group specified by `index` in each match. Each index should be either an integer or a string name which corresponds to <code>(?<<em>name</em>>...)</code>. For example, to parse lines of <code><em>word</em>: <em>definition</em></code>, the following should work:
"extract": {
"word": { "regexp": "^(.+?): (.+)$", index: 1 },
"definition": { "regexp": "^(.+?): (.+)$", index: 2 }
}
Or if you prefer names to numbers for index:
"extract": {
"word": { "regexp": "^(?<word>.+?): (?<definition>.+)$", index: 'word' },
"definition": { "regexp": "^(?<word>.+?): (?<definition>.+)$", index: 'definition' }
}
To extract the whole content as one event:
"extract": {
"content": { "regexp": "\A(?m:.)*\z", index: 0 }
}
Beware that `.` does not match the newline character (LF) unless the `m` flag is in effect, and `^`/`$` basically match every line beginning/end. See [this document](http://ruby-doc.org/core-#{RUBY_VERSION}/doc/regexp_rdoc.html) to learn the regular expression variant used in this service.
Note that for all of the formats, whatever you extract MUST have the same number of matches for each extractor. E.g., if you're extracting rows, all extractors must match all rows. For generating CSS selectors, something like [SelectorGadget](http://selectorgadget.com) may be helpful.
Can be configured to use HTTP basic auth by including the `basic_auth` parameter with `"username:password"`, or `["username", "password"]`.
Set `expected_update_period_in_days` to the maximum amount of time that you'd expect to pass between Events being created by this Agent. This is only used to set the "working" status.
Set `uniqueness_look_back` to limit the number of events checked for uniqueness (typically for performance). This defaults to the larger of #{UNIQUENESS_LOOK_BACK} or #{UNIQUENESS_FACTOR}x the number of detected received results.
Set `force_encoding` to an encoding name if the website does not return a Content-Type header with a proper charset.
Set `user_agent` to a custom User-Agent name if the website does not like the default value (`#{default_user_agent}`).
The `headers` field is optional. When present, it should be a hash of headers to send with the request.
Set `disable_ssl_verification` to `true` to disable ssl verification.
Set `unzip` to `gzip` to inflate the resource using gzip.
The WebsiteAgent can also scrape based on incoming events. It will scrape the url contained in the `url` key of the incoming event payload. If you specify `merge` as the mode, it will retain the old payload and update it with the new values.
In Liquid templating, the following variable is available:
* `_response_`: A response object with the following keys:
* `status`: HTTP status as integer. (Almost always 200)
* `headers`: Response headers; for example, `{{ _response_.headers.Content-Type }}` expands to the value of the Content-Type header. Keys are insensitive to cases and -/_.
MD
event_description do
"Events will have the following fields:\n\n %s" % [
Utils.pretty_print(Hash[options['extract'].keys.map { |key|
[key, "..."]
}])
]
end
def working?
event_created_within?(interpolated['expected_update_period_in_days']) && !recent_error_logs?
end
def default_options
{
'expected_update_period_in_days' => "2",
'url' => "http://xkcd.com",
'type' => "html",
'mode' => "on_change",
'extract' => {
'url' => { 'css' => "#comic img", 'value' => "@src" },
'title' => { 'css' => "#comic img", 'value' => "@alt" },
'hovertext' => { 'css' => "#comic img", 'value' => "@title" }
}
}
end
def validate_options
# Check for required fields
errors.add(:base, "url and expected_update_period_in_days are required") unless options['expected_update_period_in_days'].present? && options['url'].present?
if !options['extract'].present? && extraction_type != "json"
errors.add(:base, "extract is required for all types except json")
end
# Check for optional fields
if options['mode'].present?
errors.add(:base, "mode must be set to on_change, all or merge") unless %w[on_change all merge].include?(options['mode'])
end
if options['expected_update_period_in_days'].present?
errors.add(:base, "Invalid expected_update_period_in_days format") unless is_positive_integer?(options['expected_update_period_in_days'])
end
if options['uniqueness_look_back'].present?
errors.add(:base, "Invalid uniqueness_look_back format") unless is_positive_integer?(options['uniqueness_look_back'])
end
if (encoding = options['force_encoding']).present?
case encoding
when String
begin
Encoding.find(encoding)
rescue ArgumentError
errors.add(:base, "Unknown encoding: #{encoding.inspect}")
end
else
errors.add(:base, "force_encoding must be a string")
end
end
validate_web_request_options!
end
def check
check_urls(interpolated['url'])
end
def check_urls(in_url)
return unless in_url.present?
Array(in_url).each do |url|
check_url(url)
end
end
def check_url(url, payload = {})
log "Fetching #{url}"
response = faraday.get(url)
raise "Failed: #{response.inspect}" unless response.success?
interpolation_context.stack {
interpolation_context['_response_'] = ResponseDrop.new(response)
body = response.body
if (encoding = interpolated['force_encoding']).present?
body = body.encode(Encoding::UTF_8, encoding)
end
if interpolated['unzip'] == "gzip"
body = ActiveSupport::Gzip.decompress(body)
end
doc = parse(body)
if extract_full_json?
if store_payload!(previous_payloads(1), doc)
log "Storing new result for '#{name}': #{doc.inspect}"
create_event payload: payload.merge(doc)
end
return
end
output =
case extraction_type
when 'json'
extract_json(doc)
when 'text'
extract_text(doc)
else
extract_xml(doc)
end
num_unique_lengths = interpolated['extract'].keys.map { |name| output[name].length }.uniq
if num_unique_lengths.length != 1
raise "Got an uneven number of matches for #{interpolated['name']}: #{interpolated['extract'].inspect}"
end
old_events = previous_payloads num_unique_lengths.first
num_unique_lengths.first.times do |index|
result = {}
interpolated['extract'].keys.each do |name|
result[name] = output[name][index]
if name.to_s == 'url'
result[name] = (response.env[:url] + result[name]).to_s
end
end
if store_payload!(old_events, result)
log "Storing new parsed result for '#{name}': #{result.inspect}"
create_event payload: payload.merge(result)
end
end
}
rescue => e
error "Error when fetching url: #{e.message}\n#{e.backtrace.join("\n")}"
end
def receive(incoming_events)
incoming_events.each do |event|
interpolate_with(event) do
url_to_scrape = event.payload['url']
next unless url_to_scrape =~ /^https?:\/\//i
check_url(url_to_scrape,
interpolated['mode'].to_s == "merge" ? event.payload : {})
end
end
end
private
# This method returns true if the result should be stored as a new event.
# If mode is set to 'on_change', this method may return false and update an existing
# event to expire further in the future.
def store_payload!(old_events, result)
case interpolated['mode'].presence
when 'on_change'
result_json = result.to_json
if found = old_events.find { |event| event.payload.to_json == result_json }
found.update!(expires_at: new_event_expiration_date)
false
else
true
end
when 'all', 'merge', ''
true
else
raise "Illegal options[mode]: #{interpolated['mode']}"
end
end
def previous_payloads(num_events)
if interpolated['uniqueness_look_back'].present?
look_back = interpolated['uniqueness_look_back'].to_i
else
# Larger of UNIQUENESS_FACTOR * num_events and UNIQUENESS_LOOK_BACK
look_back = UNIQUENESS_FACTOR * num_events
if look_back < UNIQUENESS_LOOK_BACK
look_back = UNIQUENESS_LOOK_BACK
end
end
events.order("id desc").limit(look_back) if interpolated['mode'] == "on_change"
end
def extract_full_json?
!interpolated['extract'].present? && extraction_type == "json"
end
def extraction_type
(interpolated['type'] || begin
case interpolated['url']
when /\.(rss|xml)$/i
"xml"
when /\.json$/i
"json"
when /\.(txt|text)$/i
"text"
else
"html"
end
end).to_s
end
def extract_each(doc, &block)
interpolated['extract'].each_with_object({}) { |(name, extraction_details), output|
output[name] = block.call(extraction_details)
}
end
def extract_json(doc)
extract_each(doc) { |extraction_details|
result = Utils.values_at(doc, extraction_details['path'])
log "Extracting #{extraction_type} at #{extraction_details['path']}: #{result}"
result
}
end
def extract_text(doc)
extract_each(doc) { |extraction_details|
regexp = Regexp.new(extraction_details['regexp'])
result = []
doc.scan(regexp) {
result << Regexp.last_match[extraction_details['index']]
}
log "Extracting #{extraction_type} at #{regexp}: #{result}"
result
}
end
def extract_xml(doc)
extract_each(doc) { |extraction_details|
case
when css = extraction_details['css']
nodes = doc.css(css)
when xpath = extraction_details['xpath']
doc.remove_namespaces! # ignore xmlns, useful when parsing atom feeds
nodes = doc.xpath(xpath)
else
raise '"css" or "xpath" is required for HTML or XML extraction'
end
case nodes
when Nokogiri::XML::NodeSet
result = nodes.map { |node|
case value = node.xpath(extraction_details['value'])
when Float
# Node#xpath() returns any numeric value as float;
# convert it to integer as appropriate.
value = value.to_i if value.to_i == value
end
value.to_s
}
else
raise "The result of HTML/XML extraction was not a NodeSet"
end
log "Extracting #{extraction_type} at #{xpath || css}: #{result}"
result
}
end
def parse(data)
case extraction_type
when "xml"
Nokogiri::XML(data)
when "json"
JSON.parse(data)
when "html"
Nokogiri::HTML(data)
when "text"
data
else
raise "Unknown extraction type #{extraction_type}"
end
end
def is_positive_integer?(value)
Integer(value) >= 0
rescue
false
end
# Wraps Faraday::Response
class ResponseDrop < LiquidDroppable::Drop
def headers
HeaderDrop.new(@object.headers)
end
# Integer value of HTTP status
def status
@object.status
end
end
# Wraps Faraday::Utilsa::Headers
class HeaderDrop < LiquidDroppable::Drop
def before_method(name)
@object[name.tr('_', '-')]
end
end
end
end
|
##
# = AuthorizationServer model
#
# This class manages the interface to the Authorization Server.
class AuthorizationServer
attr_accessor :uri, :connection, :configuration
#-------------------------------------------------------------------------------
##
# This method initializes a new instance of the AuthorizationServer class and
# sets up the necessary configurations for the communicating with the server.
#
# Params:
# +auth_server_uri+:: URI of the authorization server
# +rsrc_server_uri+:: URI of protected resource server
#
# Attributes set:
# +@uri+:: URI of the authorization server
# +@connection+:: Connection object to be used for further communication
# +@configuration+:: Hash of server capabilities and endpoints
def initialize(auth_server_uri, rsrc_server_uri)
@auth_server_uri = auth_server_uri
@rsrc_server_uri = rsrc_server_uri
Rails.logger.debug "========== @auth_server_uri = " + @auth_server_uri + "=========="
Rails.logger.debug "========== @rsrc_server_uri = " + @rsrc_server_uri + "=========="
# Establish a connection object that will be reused during communication
# with the authorization server
@connection = Faraday.new(@auth_uri, :ssl => {:verify => false}) do |builder|
builder.request :url_encoded # Encode request parameters as "www-form-urlencoded"
builder.response :logger # Log request and response to STDOUT
builder.adapter :net_http # Perform requests with Net::HTTP
end
# Get authorization server endpoints and configuration settings
response = @connection.get("#{@auth_server_uri}/.well-known/openid-configuration")
@configuration = JSON.parse(response.body)
end
#-------------------------------------------------------------------------------
##
# This method retrieves the public key for the authorization server from the
# authorization server's jwks_uri endpoint.
#
# Returns:
# +String+:: Public key for Authorization Server
def public_key
# The public key is provided as a JSON Web Key Set (JWKS) by the jwks_uri
# endpoint of the Authorization Server.
response = @connection.get(@configuration["jwks_uri"])
jwks = JSON.parse(response.body)
# Use only first key returned and retrieve the "n" field of that key
jwks["keys"].first["n"]
end
#-------------------------------------------------------------------------------
##
# This method determines whether the access token provided by the client is valid.
# To validate the token, the introspection endpoint of the authorization server
# is called to retrieve the claims for the access token. Once we have the claims,
# we can validate whether data request falls within those claims.
#
# Params:
# +client_request+:: Request hash from the client requesting information
#
# Returns:
# +boolean+:: +true+ if access allowed, otherwise +false+
def authorize_request(client_request, test = false)
#access_token = Application.test_access_token
access_token = "eyJhbGciOiJSUzI1NiJ9.eyJleHAiOjE0MTY1MTc3MTcsImF1ZCI6WyJlOTg0OWNmMS0wZGUyLTRiMzYtYWMzNy1hYjk1M2Q3OGM3Y2YiXSwiaXNzIjoiaHR0cHM6XC9cL2FzLXZhLm1pdHJlLm9yZ1wvIiwianRpIjoiZTVlZWQzMjYtNjljZC00NjFhLTk4MzYtYzJiMjhkN2I5NTkwIiwiaWF0IjoxNDE2NTE0MTE3fQ.VDhPLYosO_CovfBWBXHVuZk9kX0_znfh1v5ZuHaKCHnzhwM2qfLext400Ac9pIpnbNwYNGT0FnGKoqB1mrRFrfuzp5kbtc__o1N1VNEHm-EM03eGjiNGRnBA0hf2TPidQKi7H0oCr1G3jxZmZ373eIz838_gjew2Ia5KpdwMov5PaejYvyvaEGkjOhcb63phsfQPyDKRmGZkTLYRdHMCZL3j6UesFeZrSJaS5NQk0mzzR-fFPKIEyXOKhCi5zogEFiWNq08wSiiSXh_JB58sY_fdbNrItwSml80pR1CQprQftIurubcYVz6TwNKxhcTf7etOWSFPe0ewGwr7riCfdA"
# Get access token from client request
# authorization = client_request.env["HTTP_AUTHORIZATION"]
# Rails.logger.debug "--------- authorization = #{authorization} ----------"
# if authorization
# authorization = authorization.split(' ')
# if authorization.first == 'Bearer'
# access_token = authorization.last
# end
Rails.logger.debug "********** Request = #{client_request.inspect} **********"
Rails.logger.debug "////////// Access token = #{access_token.inspect} //////////"
# Call authorization server to perform introspection on access token
auth_response = @connection.post @configuration["introspection_endpoint"] do |request|
# Pass access token as form data
request.body = {
"client_id" => Application.client_id,
"client_assertion_type" => "urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
"client_assertion" => jwt_token,
"token" => access_token
}.to_param
Rails.logger.debug "--------- request.headers = " + request.headers.inspect + " ----------"
Rails.logger.debug "--------- request.body = " + request.body.inspect + " ---------"
end
Rails.logger.debug "--------- auth_response = " + auth_response.inspect + " ----------"
#Rails.logger.debug "--------- auth_response['valid'] = " + auth_response["valid"] + " ----------"
Rails.logger.debug "--------- auth_response.body = " + auth_response.body + " ----------"
if test
return auth_response
else
# Use introspection info to determine validity of access token for request
valid_access_token?(client_request, auth_response)
end
# else
# # No access token
# false
# end
end
#-------------------------------------------------------------------------------
private
#-------------------------------------------------------------------------------
##
# This method creates a JSON Web Token (JWT) so that we can authenticate with
# the authorization server.
#
# Returns:
# ++:: Signed JSON Web Token
def jwt_token
# Sign our claims with our private key. The authorization server will
# contact our jwks_uri endpoint to get our public key to decode the JWT.
JWT.encode(jwt_claims, Application.private_key, 'RS256')
end
#-------------------------------------------------------------------------------
CLAIM_EXPIRATION = 3600 # Expiration in seconds
##
# This method defines the claims for the JSON Web Token (JWT) we use to
# authenticate with the authorization server.
#
# Returns:
# +Hash+:: Set of claims for JSON Web Token
def jwt_claims
now = Time.now.to_i
{
iss: Application.client_id, # Issuer (Resource Server)
sub: Application.client_id, # Subject of request (Resource Server)
aud: "https://as-va.mitre.org/token", # Intended audience (Authorization Server)
iat: now, # Time of issue
exp: now + CLAIM_EXPIRATION, # Expiration time
jti: "#{now}/#{SecureRandom.hex(18)}", # Unique ID for request
}
end
#-------------------------------------------------------------------------------
##
# This method validates the access token passed to us by the client by checking
# the type of information allowed by the access token and and verifying that the
# request is consistent with those claims.
#
# Params:
# +client_request+:: Original request from the client seeking access
# +auth_response+:: Response from the Authorization Server introspection
#
# Returns:
# +boolean+:: +true+ if access allowed, otherwise +false+
def valid_access_token?(client_request, auth_response)
byebug
if result = (auth_response.status == 200)
token_claims = JSON.parse(auth_response.body)
# Authorize request based on claims of access token
result &&= token_claims["active"]
result &&= validate_expiration(token_claims) if result
result &&= validate_scope(client_request, token_claims) if result
end
Rails.logger.debug "----- valid_access_token? = #{result.to_s} -----"
result
end
#-------------------------------------------------------------------------------
##
# This method determines whether the access token has expired.
#
# Params:
# +token_claims+:: Claims from access token introspection
#
# Returns:
# +boolean+:: +true+ if token has not expired, otherwise +false+
def validate_expiration(token_claims)
byebug
if token_claims["exp"].blank?
Rails.logger.debug "----- no expiration time provided in access token -----"
# No expiration time provided
true
else
Rails.logger.debug "----- token_claims['exp'] = #{token_claims["expires_at"].inspect} -----"
expiration_time = Time.parse(token_claims["exp"])
(expiration_time >= Time.now)
end
end
#-------------------------------------------------------------------------------
##
# This method determines whether the access token has expired.
#
# Params:
# +client_request+:: Original request from the client seeking access
# +token_claims+:: Claims from access token introspection
#
# Returns:
# +boolean+:: +true+ if request within token scope, otherwise +false+
def validate_scope(client_request, token_claims)
byebug
claims = token_claims["scope"].split(' ')
Rails.logger.debug "----- claims = #{claims.inspect} -----"
uri = URI(client_request.uri)
# Remove initial '/' from path to get resource name
resource = uri.path.from(1)
Rails.logger.debug "----- resource = #{resource.inspect} -----"
claims.include?(resource)
end
end
Removed debugger statements from authorization_server.rb.
##
# = AuthorizationServer model
#
# This class manages the interface to the Authorization Server.
class AuthorizationServer
attr_accessor :uri, :connection, :configuration
#-------------------------------------------------------------------------------
##
# This method initializes a new instance of the AuthorizationServer class and
# sets up the necessary configurations for the communicating with the server.
#
# Params:
# +auth_server_uri+:: URI of the authorization server
# +rsrc_server_uri+:: URI of protected resource server
#
# Attributes set:
# +@uri+:: URI of the authorization server
# +@connection+:: Connection object to be used for further communication
# +@configuration+:: Hash of server capabilities and endpoints
def initialize(auth_server_uri, rsrc_server_uri)
@auth_server_uri = auth_server_uri
@rsrc_server_uri = rsrc_server_uri
Rails.logger.debug "========== @auth_server_uri = " + @auth_server_uri + "=========="
Rails.logger.debug "========== @rsrc_server_uri = " + @rsrc_server_uri + "=========="
# Establish a connection object that will be reused during communication
# with the authorization server
@connection = Faraday.new(@auth_uri, :ssl => {:verify => false}) do |builder|
builder.request :url_encoded # Encode request parameters as "www-form-urlencoded"
builder.response :logger # Log request and response to STDOUT
builder.adapter :net_http # Perform requests with Net::HTTP
end
# Get authorization server endpoints and configuration settings
response = @connection.get("#{@auth_server_uri}/.well-known/openid-configuration")
@configuration = JSON.parse(response.body)
end
#-------------------------------------------------------------------------------
##
# This method retrieves the public key for the authorization server from the
# authorization server's jwks_uri endpoint.
#
# Returns:
# +String+:: Public key for Authorization Server
def public_key
# The public key is provided as a JSON Web Key Set (JWKS) by the jwks_uri
# endpoint of the Authorization Server.
response = @connection.get(@configuration["jwks_uri"])
jwks = JSON.parse(response.body)
# Use only first key returned and retrieve the "n" field of that key
jwks["keys"].first["n"]
end
#-------------------------------------------------------------------------------
##
# This method determines whether the access token provided by the client is valid.
# To validate the token, the introspection endpoint of the authorization server
# is called to retrieve the claims for the access token. Once we have the claims,
# we can validate whether data request falls within those claims.
#
# Params:
# +client_request+:: Request hash from the client requesting information
#
# Returns:
# +boolean+:: +true+ if access allowed, otherwise +false+
def authorize_request(client_request, test = false)
#access_token = Application.test_access_token
access_token = "eyJhbGciOiJSUzI1NiJ9.eyJleHAiOjE0MTY1MTc3MTcsImF1ZCI6WyJlOTg0OWNmMS0wZGUyLTRiMzYtYWMzNy1hYjk1M2Q3OGM3Y2YiXSwiaXNzIjoiaHR0cHM6XC9cL2FzLXZhLm1pdHJlLm9yZ1wvIiwianRpIjoiZTVlZWQzMjYtNjljZC00NjFhLTk4MzYtYzJiMjhkN2I5NTkwIiwiaWF0IjoxNDE2NTE0MTE3fQ.VDhPLYosO_CovfBWBXHVuZk9kX0_znfh1v5ZuHaKCHnzhwM2qfLext400Ac9pIpnbNwYNGT0FnGKoqB1mrRFrfuzp5kbtc__o1N1VNEHm-EM03eGjiNGRnBA0hf2TPidQKi7H0oCr1G3jxZmZ373eIz838_gjew2Ia5KpdwMov5PaejYvyvaEGkjOhcb63phsfQPyDKRmGZkTLYRdHMCZL3j6UesFeZrSJaS5NQk0mzzR-fFPKIEyXOKhCi5zogEFiWNq08wSiiSXh_JB58sY_fdbNrItwSml80pR1CQprQftIurubcYVz6TwNKxhcTf7etOWSFPe0ewGwr7riCfdA"
# Get access token from client request
# authorization = client_request.env["HTTP_AUTHORIZATION"]
# Rails.logger.debug "--------- authorization = #{authorization} ----------"
# if authorization
# authorization = authorization.split(' ')
# if authorization.first == 'Bearer'
# access_token = authorization.last
# end
Rails.logger.debug "********** Request = #{client_request.inspect} **********"
Rails.logger.debug "////////// Access token = #{access_token.inspect} //////////"
# Call authorization server to perform introspection on access token
auth_response = @connection.post @configuration["introspection_endpoint"] do |request|
# Pass access token as form data
request.body = {
"client_id" => Application.client_id,
"client_assertion_type" => "urn:ietf:params:oauth:client-assertion-type:jwt-bearer",
"client_assertion" => jwt_token,
"token" => access_token
}.to_param
Rails.logger.debug "--------- request.headers = " + request.headers.inspect + " ----------"
Rails.logger.debug "--------- request.body = " + request.body.inspect + " ---------"
end
Rails.logger.debug "--------- auth_response = " + auth_response.inspect + " ----------"
#Rails.logger.debug "--------- auth_response['valid'] = " + auth_response["valid"] + " ----------"
Rails.logger.debug "--------- auth_response.body = " + auth_response.body + " ----------"
if test
return auth_response
else
# Use introspection info to determine validity of access token for request
valid_access_token?(client_request, auth_response)
end
# else
# # No access token
# false
# end
end
#-------------------------------------------------------------------------------
private
#-------------------------------------------------------------------------------
##
# This method creates a JSON Web Token (JWT) so that we can authenticate with
# the authorization server.
#
# Returns:
# ++:: Signed JSON Web Token
def jwt_token
# Sign our claims with our private key. The authorization server will
# contact our jwks_uri endpoint to get our public key to decode the JWT.
JWT.encode(jwt_claims, Application.private_key, 'RS256')
end
#-------------------------------------------------------------------------------
CLAIM_EXPIRATION = 3600 # Expiration in seconds
##
# This method defines the claims for the JSON Web Token (JWT) we use to
# authenticate with the authorization server.
#
# Returns:
# +Hash+:: Set of claims for JSON Web Token
def jwt_claims
now = Time.now.to_i
{
iss: Application.client_id, # Issuer (Resource Server)
sub: Application.client_id, # Subject of request (Resource Server)
aud: "https://as-va.mitre.org/token", # Intended audience (Authorization Server)
iat: now, # Time of issue
exp: now + CLAIM_EXPIRATION, # Expiration time
jti: "#{now}/#{SecureRandom.hex(18)}", # Unique ID for request
}
end
#-------------------------------------------------------------------------------
##
# This method validates the access token passed to us by the client by checking
# the type of information allowed by the access token and and verifying that the
# request is consistent with those claims.
#
# Params:
# +client_request+:: Original request from the client seeking access
# +auth_response+:: Response from the Authorization Server introspection
#
# Returns:
# +boolean+:: +true+ if access allowed, otherwise +false+
def valid_access_token?(client_request, auth_response)
if result = (auth_response.status == 200)
token_claims = JSON.parse(auth_response.body)
# Authorize request based on claims of access token
result &&= token_claims["active"]
result &&= validate_expiration(token_claims) if result
result &&= validate_scope(client_request, token_claims) if result
end
Rails.logger.debug "----- valid_access_token? = #{result.to_s} -----"
result
end
#-------------------------------------------------------------------------------
##
# This method determines whether the access token has expired.
#
# Params:
# +token_claims+:: Claims from access token introspection
#
# Returns:
# +boolean+:: +true+ if token has not expired, otherwise +false+
def validate_expiration(token_claims)
if token_claims["exp"].blank?
Rails.logger.debug "----- no expiration time provided in access token -----"
# No expiration time provided
true
else
Rails.logger.debug "----- token_claims['exp'] = #{token_claims["expires_at"].inspect} -----"
expiration_time = Time.parse(token_claims["exp"])
(expiration_time >= Time.now)
end
end
#-------------------------------------------------------------------------------
##
# This method determines whether the access token has expired.
#
# Params:
# +client_request+:: Original request from the client seeking access
# +token_claims+:: Claims from access token introspection
#
# Returns:
# +boolean+:: +true+ if request within token scope, otherwise +false+
def validate_scope(client_request, token_claims)
claims = token_claims["scope"].split(' ')
Rails.logger.debug "----- claims = #{claims.inspect} -----"
uri = URI(client_request.uri)
# Remove initial '/' from path to get resource name
resource = uri.path.from(1)
Rails.logger.debug "----- resource = #{resource.inspect} -----"
claims.include?(resource)
end
end
|
class BarcodeRegistration
include ActiveModel::Validations
include ActiveModel::Conversion
extend ActiveModel::Naming
attr_accessor :first_number, :last_number
validates :first_number,
:presence => true,
:length => { maximum: 9 },
:format => { with: /^\d+$/ }
validates :last_number,
:presence => true,
:length => { maximum: 9 },
:format => { with: /^\d+$/ }
validate :comparison_in_size
def comparison_in_size
errors.add(:first_number, I18n.t('barcode_registration.more_than_last_number')) if first_number.to_i > last_number.to_i
end
def self.set_data(first_number, last_number)
data = String.new
data << "\xEF\xBB\xBF".force_encoding("UTF-8")
row = []
first_number.upto(last_number) { |num| row << "%09d" % num }
data << '"'+row.join("\",\n\"")+"\"\n"
end
def initialize(attributes = {})
attributes.each { |name, value| send("#{name}=", value) }
end
def persisted?
false
end
end
modified barcode_refister
class BarcodeRegistration
include ActiveModel::Validations
include ActiveModel::Conversion
extend ActiveModel::Naming
attr_accessor :first_number, :last_number
validates :first_number,
:presence => false,
:length => { maximum: 9 },
:format => { with: /^\d+$/ }
validates :last_number,
:presence => false,
:length => { maximum: 9 },
:format => { with: /^\d+$/ }
validate :comparison_in_size
def comparison_in_size
errors.add(:first_number, I18n.t('barcode_registration.more_than_last_number')) if first_number.to_i > last_number.to_i
end
def self.set_data(first_number, last_number)
data = String.new
data << "\xEF\xBB\xBF".force_encoding("UTF-8")
row = []
first_number.upto(last_number) { |num| row << "%09d" % num }
data << '"'+row.join("\",\n\"")+"\"\n"
end
def initialize(attributes = {})
attributes.each { |name, value| send("#{name}=", value) }
end
def persisted?
false
end
end
|
Use symbolized hash
|
# frozen_string_literal: true
#
# == Publishable module
#
module Publishable
extend ActiveSupport::Concern
included do
has_one :publication_date, as: :publishable, dependent: :destroy
accepts_nested_attributes_for :publication_date, reject_if: :all_blank, allow_destroy: true
delegate :published_later, :published_at,
:expired_prematurely, :expired_at,
:published_later?, :expired_prematurely?,
to: :publication_date, prefix: false, allow_nil: true
end
end
Publishable :: Add comments to separate model relations from delegates
# frozen_string_literal: true
#
# == Publishable module
#
module Publishable
extend ActiveSupport::Concern
included do
# Model relations
has_one :publication_date, as: :publishable, dependent: :destroy
accepts_nested_attributes_for :publication_date, reject_if: :all_blank, allow_destroy: true
# Delegates
delegate :published_later, :published_at,
:expired_prematurely, :expired_at,
:published_later?, :expired_prematurely?,
to: :publication_date, prefix: false, allow_nil: true
end
end
|
class ContainerDefinition < ApplicationRecord
include ArchivedMixin
include_concern 'Purging'
# :name, :image, :image_pull_policy, :memory, :cpu
belongs_to :container_group
belongs_to :ext_management_system, :foreign_key => :ems_id
has_many :container_port_configs, :dependent => :destroy
has_many :container_env_vars, :dependent => :destroy
has_one :container, :dependent => :destroy
has_one :security_context, :as => :resource, :dependent => :destroy
has_one :container_image, :through => :container
def disconnect_inv
_log.info "Disconnecting Container definition [#{name}] id [#{id}]"
self.container.try(:disconnect_inv)
self.deleted_on = Time.now.utc
self.old_ems_id = self.ems_id
self.ems_id = nil
save
end
end
Do not disconnect ContainerDefinition twice
class ContainerDefinition < ApplicationRecord
include ArchivedMixin
include_concern 'Purging'
# :name, :image, :image_pull_policy, :memory, :cpu
belongs_to :container_group
belongs_to :ext_management_system, :foreign_key => :ems_id
has_many :container_port_configs, :dependent => :destroy
has_many :container_env_vars, :dependent => :destroy
has_one :container, :dependent => :destroy
has_one :security_context, :as => :resource, :dependent => :destroy
has_one :container_image, :through => :container
def disconnect_inv
return if ems_id.nil?
_log.info "Disconnecting Container definition [#{name}] id [#{id}]"
self.container.try(:disconnect_inv)
self.deleted_on = Time.now.utc
self.old_ems_id = self.ems_id
self.ems_id = nil
save
end
end
|
add ministry_involvement
class MinistryInvolvement < ActiveRecord::Base
load_mappings
include Common::Core::MinistryInvolvement
end
|
class MiqEventDefinition < ActiveRecord::Base
default_scope { where self.conditions_for_my_region_default_scope }
include UuidMixin
validates_presence_of :name
validates_uniqueness_of :name
validates_format_of :name, :with => %r{\A[a-z0-9_\-]+\z}i,
:allow_nil => true, :message => "must only contain alpha-numeric, underscore and hyphen characters without spaces"
validates_presence_of :description
acts_as_miq_set_member
include ReportableMixin
acts_as_miq_taggable
has_many :miq_policy_contents
has_many :policy_events
serialize :definition
attr_accessor :reserved
FIXTURE_DIR = File.join(Rails.root, "db/fixtures")
CHILD_EVENTS = {
:assigned_company_tag => {
:Host => [:vms_and_templates],
:EmsCluster => [:all_vms_and_templates],
:Storage => [:vms_and_templates],
:ResourcePool => [:vms_and_templates]
},
:unassigned_company_tag => {
:Host => [:vms_and_templates],
:EmsCluster => [:all_vms_and_templates],
:Storage => [:vms_and_templates],
:ResourcePool => [:vms_and_templates]
}
}
SUPPORTED_POLICY_AND_ALERT_CLASSES = [Host, VmOrTemplate, Storage, EmsCluster, ResourcePool, MiqServer]
def self.raise_evm_event(target, raw_event, inputs={})
# Target may have been deleted if it's a worker
# Target, in that case will be the worker's server.
# The generic raw_event remains, but client can pass the :type of the worker spawning the event:
# ex: MiqEventDefinition.raise_evm_event(w.miq_server, "evm_worker_not_responding", :type => "MiqGenericWorker", :event_details => "MiqGenericWorker with pid 1234 killed due to not responding")
# Policy, automate, and alerting could then consume this type field along with the details
if target.kind_of?(Array)
klass, id = target
klass = Object.const_get(klass)
target = klass.find_by_id(id)
raise "Unable to find object with class: [#{klass}], Id: [#{id}]" unless target
end
inputs[:type] ||= target.class.name
# TODO: Need to be able to pick an event without an expression in the UI
event = normalize_event(raw_event.to_s)
# Determine what actions to perform for this event
actions = event_to_actions(target, raw_event, event)
results = {}
if actions[:enforce_policy]
_log.info("Event Raised [#{event}]")
results[:policy] = MiqPolicy.enforce_policy(target, event, inputs)
end
if actions[:raise_to_automate]
_log.info("Event [#{raw_event}] raised to automation")
results[:automate] = MiqAeEvent.raise_evm_event(raw_event, target, inputs)
end
if actions[:evaluate_alert]
_log.info("Alert for Event [#{raw_event}]")
results[:alert] = MiqAlert.evaluate_alerts(target, event, inputs)
end
if actions[:raise_children_events]
results[:children_events] = raise_event_for_children(target, raw_event, inputs)
end
results
end
def self.event_to_actions(target, raw_event, event)
# Old logic:
#
# For Host, VmOrTemplate, Storage, EmsCluster, ResourcePool targets:
# if it's a known event, we enforce policy and evaluate alerts
# if not known but alertable???, we only evaluate alerts
# For any of these targets, we then raise an event for the children of the target
# For any other targets, we raise an raise an event to automate
# New logic:
# Known events:
# send to policy (policy can then raise to automate)
# evaluate alerts
# raise for children
# Unknown events:
# Alert for ones we care about
# raise for children
# Not Host, VmOrTemplate, Storage, EmsCluster, ResourcePool events:
# Alert if event is alertable
# raise to automate (since policy doesn't support these types)
# TODO: Need to add to automate_expressions in MiqAlert line 345 for alertable events
actions = Hash.new(false)
if target.class.base_class.in?(SUPPORTED_POLICY_AND_ALERT_CLASSES)
actions[:raise_children_events] = true
if event != "unknown"
actions[:enforce_policy] = true
actions[:evaluate_alert] = true
elsif MiqAlert.event_alertable?(raw_event)
actions[:evaluate_alert] = true
else
_log.debug("Event [#{raw_event}] does not participate in policy enforcement")
end
else
actions[:raise_to_automate] = true
actions[:evaluate_alert] = true if MiqAlert.event_alertable?(raw_event)
end
actions
end
def self.raise_evm_event_queue_in_region(target, raw_event, inputs={})
MiqQueue.put(
:zone => nil,
:class_name => self.name,
:method_name => 'raise_evm_event',
:args => [[target.class.name, target.id], raw_event, inputs]
)
end
def self.raise_evm_event_queue(target, raw_event, inputs={})
MiqQueue.put(
:class_name => self.name,
:method_name => 'raise_evm_event',
:args => [[target.class.name, target.id], raw_event, inputs]
)
end
def self.raise_evm_alert_event_queue(target, raw_event, inputs={})
MiqQueue.put_unless_exists(
:class_name => "MiqAlert",
:method_name => 'evaluate_alerts',
:args => [[target.class.name, target.id], raw_event, inputs]
) if MiqAlert.alarm_has_alerts?(raw_event)
end
def self.raise_evm_job_event(target, options = {}, inputs={})
# Eg. options = {:type => "scan", ":prefix => "request, :suffix => "abort"}
options.reverse_merge!(
:type => "scan",
:prefix => nil,
:suffix => nil
)
base_event = [target.class.base_model.name.downcase, options[:type]].join("_")
evm_event = [options[:prefix], base_event, options[:suffix]].compact.join("_")
self.raise_evm_event(target, evm_event, inputs)
end
def self.raise_event_for_children(target, raw_event, inputs={})
child_assocs = CHILD_EVENTS.fetch_path(raw_event.to_sym, target.class.base_class.name.to_sym)
return if child_assocs.blank?
child_event = "#{raw_event}_parent_#{target.class.base_model.name.underscore}"
child_assocs.each do |assoc|
next unless target.respond_to?(assoc)
children = target.send(assoc)
children.each do |child|
_log.info("Raising Event [#{child_event}] for Child [(#{child.class}) #{child.name}] of Parent [(#{target.class}) #{target.name}]")
self.raise_evm_event_queue(child, child_event, inputs)
end
end
end
def self.normalize_event(event)
return event if self.find_by_name(event)
return "unknown"
end
def self.all_events
where(:event_type => "Default")
end
def self.event_name_for_target(target, event_suffix)
"#{target.class.base_model.name.underscore}_#{event_suffix}"
end
def miq_policies
p_ids = MiqPolicyContent.where(:miq_event_definition_id => self.id).uniq.pluck(:miq_policy_id)
MiqPolicy.where(:id => p_ids).to_a
end
def export_to_array
h = self.attributes
["id", "created_on", "updated_on"].each { |k| h.delete(k) }
return [ self.class.to_s => h ]
end
def self.import_from_hash(event, options={})
status = {:class => self.name, :description => event["description"]}
e = MiqEventDefinition.find_by_name(event["name"])
msg_pfx = "Importing Event: name=[#{event["name"]}]"
if e.nil?
e = MiqEventDefinition.new(event)
status[:status] = :add
else
e.attributes = event
status[:status] = :update
end
unless e.valid?
status[:status] = :conflict
status[:messages] = e.errors.full_messages
end
msg = "#{msg_pfx}, Status: #{status[:status]}"
msg += ", Messages: #{status[:messages].join(",")}" if status[:messages]
unless options[:preview] == true
MiqPolicy.logger.info(msg)
e.save!
else
MiqPolicy.logger.info("[PREVIEW] #{msg}")
end
return e, status
end
def etype
set = self.memberof.first
raise "unexpected error, no type found for event #{self.name}" if set.nil?
set
end
def self.etypes
MiqEventDefinition.sets
end
def self.add_elements(vm, xmlNode)
begin
# Record vm operational and configuration events
if xmlNode.root.name == "vmevents"
xmlNode.find_each("//vmevents/view/rows/row") do |row|
# Get the record's parts
eventType = row.attributes["event_type"]
timestamp = Time.at(row.attributes["timestamp"].to_i)
eventData = YAML.load(row.attributes["event_data"])
eventData.delete("id")
# Remove elements that do not belong in the event table
%w{ src_vm_guid dest_vm_guid vm_guid }.each do |field|
eventData.delete(field)
end
# Write the data to the table
unless EmsEvent.exists?(:event_type => eventType,
:timestamp => timestamp,
:ems_id => eventData['ems_id'],
:chain_id => eventData['chain_id'])
EmsEvent.create(eventData)
end
end
end
#_log.warn "[#{xmlNode}]"
#add_missing_elements(vm, xmlNode, "Applications/Products/Products", "win32_product", WIN32_APPLICATION_MAPPING)
File.open("./xfer_#{xmlNode.root.name}.xml", "w") {|f| xmlNode.write(f,0)}
rescue
end
end
def self.seed
MiqEventDefinitionSet.seed
MiqRegion.my_region.lock do
self.seed_default_events
self.seed_default_definitions
end
end
def self.seed_default_events
fname = File.join(FIXTURE_DIR, "#{self.to_s.pluralize.underscore}.csv")
data = File.read(fname).split("\n")
cols = data.shift.split(",")
data.each do |e|
next if e =~ /^#.*$/ # skip commented lines
arr = e.split(",")
event = {}
cols.each_index {|i| event[cols[i].to_sym] = arr[i]}
set_type = event.delete(:set_type)
next if event[:name].blank?
rec = self.find_by_name(event[:name])
if rec.nil?
_log.info("Creating [#{event[:name]}]")
rec = self.create(event)
else
rec.attributes = event
if rec.changed?
_log.info("Updating [#{event[:name]}]")
rec.save
end
end
es = MiqEventDefinitionSet.find_by_name(set_type)
rec.memberof.each {|old_set| rec.make_not_memberof(old_set) unless old_set == es} # handle changes in set membership
es.add_member(rec) if es && !es.members.include?(rec)
end
end
def self.seed_default_definitions
stats = {:a => 0, :u => 0}
fname = File.join(FIXTURE_DIR, "miq_event_definitions.yml")
defns = YAML.load_file(fname)
defns.each do |event_type, events|
events[:events].each do |e|
event = self.find_by_name_and_event_type(e[:name], event_type.to_s)
if event.nil?
_log.info("Creating [#{e[:name]}]")
event = self.create(e.merge(:event_type => event_type.to_s, :default => true, :enabled => true))
stats[:a] += 1
else
event.attributes = e
if event.changed?
_log.info("Updating [#{e[:name]}]")
event.save
stats[:u] += 1
end
end
end
end
end
end # class MiqEventDefinition
Separate MiqEvent functions into its own file.
class MiqEventDefinition < ActiveRecord::Base
default_scope { where self.conditions_for_my_region_default_scope }
include UuidMixin
validates_presence_of :name
validates_uniqueness_of :name
validates_format_of :name, :with => %r{\A[a-z0-9_\-]+\z}i,
:allow_nil => true, :message => "must only contain alpha-numeric, underscore and hyphen characters without spaces"
validates_presence_of :description
acts_as_miq_set_member
include ReportableMixin
acts_as_miq_taggable
has_many :miq_policy_contents
has_many :policy_events
serialize :definition
attr_accessor :reserved
FIXTURE_DIR = File.join(Rails.root, "db/fixtures")
def self.all_events
where(:event_type => "Default")
end
def miq_policies
p_ids = MiqPolicyContent.where(:miq_event_definition_id => self.id).uniq.pluck(:miq_policy_id)
MiqPolicy.where(:id => p_ids).to_a
end
def export_to_array
h = self.attributes
["id", "created_on", "updated_on"].each { |k| h.delete(k) }
return [ self.class.to_s => h ]
end
def self.import_from_hash(event, options={})
status = {:class => self.name, :description => event["description"]}
e = MiqEventDefinition.find_by_name(event["name"])
msg_pfx = "Importing Event: name=[#{event["name"]}]"
if e.nil?
e = MiqEventDefinition.new(event)
status[:status] = :add
else
e.attributes = event
status[:status] = :update
end
unless e.valid?
status[:status] = :conflict
status[:messages] = e.errors.full_messages
end
msg = "#{msg_pfx}, Status: #{status[:status]}"
msg += ", Messages: #{status[:messages].join(",")}" if status[:messages]
unless options[:preview] == true
MiqPolicy.logger.info(msg)
e.save!
else
MiqPolicy.logger.info("[PREVIEW] #{msg}")
end
return e, status
end
def etype
set = self.memberof.first
raise "unexpected error, no type found for event #{self.name}" if set.nil?
set
end
def self.etypes
MiqEventDefinition.sets
end
def self.add_elements(vm, xmlNode)
begin
# Record vm operational and configuration events
if xmlNode.root.name == "vmevents"
xmlNode.find_each("//vmevents/view/rows/row") do |row|
# Get the record's parts
eventType = row.attributes["event_type"]
timestamp = Time.at(row.attributes["timestamp"].to_i)
eventData = YAML.load(row.attributes["event_data"])
eventData.delete("id")
# Remove elements that do not belong in the event table
%w{ src_vm_guid dest_vm_guid vm_guid }.each do |field|
eventData.delete(field)
end
# Write the data to the table
unless EmsEvent.exists?(:event_type => eventType,
:timestamp => timestamp,
:ems_id => eventData['ems_id'],
:chain_id => eventData['chain_id'])
EmsEvent.create(eventData)
end
end
end
#_log.warn "[#{xmlNode}]"
#add_missing_elements(vm, xmlNode, "Applications/Products/Products", "win32_product", WIN32_APPLICATION_MAPPING)
File.open("./xfer_#{xmlNode.root.name}.xml", "w") {|f| xmlNode.write(f,0)}
rescue
end
end
def self.seed
MiqEventDefinitionSet.seed
MiqRegion.my_region.lock do
self.seed_default_events
self.seed_default_definitions
end
end
def self.seed_default_events
fname = File.join(FIXTURE_DIR, "#{self.to_s.pluralize.underscore}.csv")
data = File.read(fname).split("\n")
cols = data.shift.split(",")
data.each do |e|
next if e =~ /^#.*$/ # skip commented lines
arr = e.split(",")
event = {}
cols.each_index {|i| event[cols[i].to_sym] = arr[i]}
set_type = event.delete(:set_type)
next if event[:name].blank?
rec = self.find_by_name(event[:name])
if rec.nil?
_log.info("Creating [#{event[:name]}]")
rec = self.create(event)
else
rec.attributes = event
if rec.changed?
_log.info("Updating [#{event[:name]}]")
rec.save
end
end
es = MiqEventDefinitionSet.find_by_name(set_type)
rec.memberof.each {|old_set| rec.make_not_memberof(old_set) unless old_set == es} # handle changes in set membership
es.add_member(rec) if es && !es.members.include?(rec)
end
end
def self.seed_default_definitions
stats = {:a => 0, :u => 0}
fname = File.join(FIXTURE_DIR, "miq_event_definitions.yml")
defns = YAML.load_file(fname)
defns.each do |event_type, events|
events[:events].each do |e|
event = self.find_by_name_and_event_type(e[:name], event_type.to_s)
if event.nil?
_log.info("Creating [#{e[:name]}]")
event = self.create(e.merge(:event_type => event_type.to_s, :default => true, :enabled => true))
stats[:a] += 1
else
event.attributes = e
if event.changed?
_log.info("Updating [#{e[:name]}]")
event.save
stats[:u] += 1
end
end
end
end
end
end # class MiqEventDefinition
|
module Qernel
##
# == Big Picture
#
# === Converter
# Responsible for calculating demands/energy flow only. Has links and
# slots, so can traverse the graph. But doesn't know about its other
# attributes like cost, co2, etc. It is more like a node in a graph.
#
# === ConverterApi
#
# A ConverterApi instance includes (static) attributes (stored in the
# ::Converter table) and dynamic attributes that are calculated based
# on the static ones. It doesn't (really) know about links, slots, etc
# but can access them through #converter. It's more like a data-model.
#
#
# === Reasons for separation
#
# Converter is relatively simple, ConverterApi has a lot of code. Like
# this we can keep the calculation part simple. Also regarding that in
# the future it might be easier to implement the graph for instance in
# another language (C, Java, Scala).
#
# In the energy flow calculation, we worry about speed much more. Things
# like method_missing can make that really slow, (e.g. calling flatten)
# on an array of objects that implement method_missing degrades performance
# a lot.
#
#
#
#
# You can use all the methods directly in the GQL. *Do not miss* the
# *Dynamic Method Handling* section.
#
#
#
class ConverterApi
include MethodMetaData
include DatasetAttributes
##
# :method: primary_demand_of_fossil
# Primary demand of fossil energy
EXPECTED_DEMAND_TOLERANCE = 0.001
# All the static attributes that come from the database
# Access the following attributes with @. e.g
# @demand_expected_value *and not* demand_expected_value (or self.demand_expected_value)
ATTRIBUTES_USED = [
:availability,
:variability,
:capacity_factor,
:ccs_investment_per_mw_input,
:ccs_operation_and_maintenance_cost_per_full_load_hour,
:co2_free,
:construction_time,
:costs_per_mj,
:decommissioning_costs_per_mw_input,
:decrease_in_nominal_capacity_over_lifetime,
:demand_expected_value,
:full_load_hours,
:installing_costs_per_mw_input,
:land_use_per_unit,
:lead_time,
:network_capacity_available_in_mw,
:network_capacity_used_in_mw,
:network_expansion_costs_in_euro_per_mw,
:operation_and_maintenance_cost_fixed_per_mw_input,
:operation_and_maintenance_cost_variable_per_full_load_hour,
:part_ets,
:peak_load_units_present,
:purchase_price_per_mw_input,
:residual_value_per_mw_input,
:simult_sd,
:simult_se,
:simult_wd,
:simult_we,
:technical_lifetime,
:typical_nominal_input_capacity,
:wacc,
:merit_order_start,
:merit_order_end
]
# For the data/converter/show page we need grouping of the attributes
# these atrribut groups should only be used to show the values in the data section
ELECTRICITY_PRODUCTION_VALUES = {
:technical => {
:nominal_capacity_electricity_output_per_unit => ['Nominal electrical capacity','MW'],
:average_effective_output_of_nominal_capacity_over_lifetime => ['Average effective output of nominal capacity over lifetime', '%'],
:full_load_hours => ['Full load hours', 'hour / year'],
:electricity_output_conversion => ['Electrical efficiency', '%'],
:heat_output_conversion => ['Heat efficiency', '%']
},
:cost => {
:initial_investment_excl_ccs_per_mwe => ['Initial investment (excl CCS)', 'euro / MWe'],
:additional_investment_ccs_per_mwe => ['Additional inititial investment for CCS', 'euro / MWe'],
:cost_of_installing_per_mwe => ['Cost of installing','euro / MWe'],
:residual_value_per_mwe => ['Residual value after lifetime','euro / MWe'],
:decommissioning_costs_per_mwe => ['Decommissioning costs','euro / MWe'],
:fixed_yearly_operation_and_maintenance_costs_per_mwe => ['Fixed operation and maintenance costs','euro / MWe / year'],
:operation_and_maintenance_cost_variable_per_full_load_hour => ['Variable operation and maintenance costs (excl CCS)', 'euro / full load hour'],
:ccs_operation_and_maintenance_cost_per_full_load_hour => ['Additional variable operation and maintenance costs for CCS', 'euro / full load hour'],
:wacc => ['Weighted average cost of capital', '%'],
:part_ets => ['Do emissions have to be paid for through the ETS?', 'yes=1 / no=0']
},
:other => {
:land_use_per_unit => ['Land use per unit', 'km2'],
:construction_time => ['Construction time', 'year'],
:technical_lifetime => ['Technical lifetime', 'year']
}
}
HEAT_PRODUCTION_VALUES = {
:technical => {
:nominal_capacity_heat_output_per_unit => ['Nominal heat capacity','MW'],
:average_effective_output_of_nominal_capacity_over_lifetime => ['Average effective output of nominal capacity over lifetime', '%'],
:full_load_hours => ['Full load hours', 'hour / year'],
:heat_output_conversion => ['Heat efficiency', '%']
},
:cost => {
:purchase_price_per_unit => ['Initial purchase price', 'euro'],
:cost_of_installing_per_unit => ['Cost of installing','euro'],
:residual_value_per_unit => ['Residual value after lifetime','euro'],
:decommissioning_costs_per_unit => ['Decommissioning costs','euro'],
:fixed_yearly_operation_and_maintenance_costs_per_unit => ['Fixed operation and maintenance costs','euro / year'],
:operation_and_maintenance_cost_variable_per_full_load_hour => ['Variable operation and maintenance costs', 'euro / full load hour'],
:wacc => ['Weighted average cost of capital', '%'],
:part_ets => ['Do emissions have to be paid for through the ETS?', 'yes=1 / no=0']
},
:other => {
:land_use_per_unit => ['Land use per unit', 'km2'],
:technical_lifetime => ['Technical lifetime', 'year']
}
}
HEAT_PUMP_VALUES = {
:technical => {
:nominal_capacity_heat_output_per_unit => ['Nominal heat capacity','MW'],
:average_effective_output_of_nominal_capacity_over_lifetime => ['Average effective output of nominal capacity over lifetime', '%'],
:full_load_hours => ['Full load hours', 'hour / year'],
:coefficient_of_performance => ['Coefficient of Performance', ''],
:heat_and_cold_output_conversion => ['Efficiency (after COP)', '%']
},
:cost => {
:purchase_price_per_unit => ['Initial purchase price', 'euro'],
:cost_of_installing_per_unit => ['Cost of installing','euro'],
:residual_value_per_unit => ['Residual value after lifetime','euro'],
:decommissioning_costs_per_unit => ['Decommissioning costs','euro'],
:fixed_yearly_operation_and_maintenance_costs_per_unit => ['Fixed operation and maintenance costs','euro / year'],
:operation_and_maintenance_cost_variable_per_full_load_hour => ['Variable operation and maintenance costs', 'euro / full load hour'],
:wacc => ['Weighted average cost of capital', '%'],
:part_ets => ['Do emissions have to be paid for through the ETS?', 'yes=1 / no=0']
},
:other => {
:land_use_per_unit => ['Land use per unit', 'km2'],
:technical_lifetime => ['Technical lifetime', 'year']
}
}
CHP_VALUES = {
:technical => {
:nominal_capacity_electricity_output_per_unit => ['Nominal electrical capacity','MW'],
:nominal_capacity_heat_output_per_unit => ['Nominal heat capacity','MW'],
:average_effective_output_of_nominal_capacity_over_lifetime => ['Average effective output of nominal capacity over lifetime', '%'],
:full_load_hours => ['Full load hours', 'hour / year'],
:electricity_output_conversion => ['Electrical efficiency', '%'],
:heat_output_conversion => ['Heat efficiency', '%']
},
:cost => {
:initial_investment_excl_ccs_per_unit => ['Initial investment (excl CCS)', 'euro'],
:additional_investment_ccs_per_unit => ['Additional inititial investment for CCS', 'euro'],
:cost_of_installing_per_unit => ['Cost of installing','euro'],
:residual_value_per_unit => ['Residual value after lifetime','euro'],
:decommissioning_costs_per_unit => ['Decommissioning costs','euro'],
:fixed_yearly_operation_and_maintenance_costs_per_unit => ['Fixed operation and maintenance costs','euro / year'],
:operation_and_maintenance_cost_variable_per_full_load_hour => ['Variable operation and maintenance costs (excl CCS)', 'euro / full load hour'],
:ccs_operation_and_maintenance_cost_per_full_load_hour => ['Additional variable operation and maintenance costs for CCS', 'euro / full load hour'],
:wacc => ['Weighted average cost of capital', '%'],
:part_ets => ['Do emissions have to be paid for through the ETS?', 'yes=1 / no=0']
},
:other => {
:land_use_per_unit => ['Land use per unit', 'km2'],
:construction_time => ['Construction time', 'year'],
:technical_lifetime => ['Technical lifetime', 'year']
}
}
dataset_accessors ATTRIBUTES_USED
# attributes updated by #initialize
attr_reader :converter, :dataset_key, :dataset_group
# attributes updated by Converter#graph=
attr_accessor :area, :graph
# dataset attributes of converter
dataset_accessors [:preset_demand, :demand]
# ConverterApi has same accessor as it's converter
def self.dataset_group
@dataset_group ||= Qernel::Converter.dataset_group
end
def to_s
converter && converter.full_key.to_s
end
def inspect
to_s
end
# For testing only
# Otherwise graphs by GraphParser won't be Gqueryable
# DEBT properly fix
if Rails.env.development? or Rails.env.test?
def object_dataset
converter.object_dataset
end
end
#
#
def initialize(converter_qernel, attrs = {})
@converter = converter_qernel
@dataset_key = converter.dataset_key
@dataset_group = converter.dataset_group
end
def energy_balance_group
converter.energy_balance_group
end
# See {Qernel::Converter} for difference of demand/preset_demand
#
def preset_demand=(val)
converter.preset_demand = val
end
# Is the calculated near the demand_expected_value?
#
# @return [nil] if demand or expected is nil
# @return [true] if demand is within tolerance EXPECTED_DEMAND_TOLERANCE
#
def demand_expected?
expected = demand_expected_value
return nil if demand.nil? or expected.nil?
return true if demand.to_f == 0 and expected.to_f == 0.0
(demand.to_f / expected.to_f - 1.0).abs < EXPECTED_DEMAND_TOLERANCE
end
# Extracted into a method, because we have a circular dependency in specs
# Carriers are not imported, so when initializing all those methods won't get
# loaded. So this way we can load later.
def self.create_methods_for_each_carrier(carrier_names)
carrier_names.each do |carrier|
carrier_key = carrier.to_sym
define_method "demand_of_#{carrier}" do
self.output_of_carrier(carrier_key) || 0.0
end
define_method "supply_of_#{carrier}" do
self.input_of_carrier(carrier_key) || 0.0
end
define_method "input_of_#{carrier}" do
self.input_of_carrier(carrier_key) || 0.0
end
define_method "output_of_#{carrier}" do
self.output_of_carrier(carrier_key) || 0.0
end
define_method "primary_demand_of_#{carrier}" do
converter.primary_demand_of_carrier(carrier_key) || 0.0
end
['input', 'output'].each do |side|
define_method "#{carrier}_#{side}_link_share" do
if slot = self.converter.send(side, carrier_key)
if link = slot.links.first
link.send('share') || 0.0
else
0.0
end
else
0.0
end
end
%w[conversion value share actual_conversion].each do |method|
define_method "#{carrier}_#{side}_#{method}" do
slot = self.converter.send(side, carrier_key)
value = slot && slot.send(method)
value || 0.0
end
end
end
end
end
create_methods_for_each_carrier(Etsource::Dataset::Import.new('nl').carrier_keys)
# creates a method during run time if method_missing
#
def self.create_share_of_converter_method(converter_key)
key = converter_key.to_sym
define_method "share_of_#{key}" do
ol = self.converter.output_links.detect{|l| l.parent.full_key == key}
ol and ol.share
end
end
# creates a method during run time if method_missing and returns the value
#
def self.create_share_of_converter_method_and_execute(caller, converter_key)
create_share_of_converter_method(converter_key)
caller.send("share_of_#{converter_key}")
end
# creates a method during run time if method_missing
#
def self.create_input_link_method(method_id, carrier_name, side, method)
if carrier_name.match(/^(.*)_(constant|share|inversedflexible|flexible)$/)
carrier_name, link_type = carrier_name.match(/^(.*)_(constant|share|inversedflexible|flexible)$/).captures
link_type = "inversed_flexible" if link_type == "inversedflexible"
end
define_method method_id do
if slot = self.converter.send(side, carrier_name.to_sym)
if link = link_type.nil? ? slot.links.first : slot.links.detect{|l| l.send("#{link_type}?")}
link.send(method)
end
end
end
end
# creates a method during run time if method_missing and returns the value
#
def self.create_input_link_method_and_execute(caller, method_id, carrier_name, side, method)
create_input_link_method(method_id, carrier_name, side, method)
caller.send(method_id)
end
def primary_demand
self.converter.primary_demand
end
def final_demand
self.converter.final_demand
end
#
#
def method_missing(method_id, *arguments)
ActiveSupport::Notifications.instrument("gql.debug", "ConverterApi:method_missing #{method_id}")
# electricity_
if m = /^(.*)_(input|output)_link_(share|value)$/.match(method_id.to_s)
carrier_name, side, method = m.captures
self.class.create_input_link_method_and_execute(self, method_id, carrier_name, side, method)
elsif m = /^share_of_(\w*)$/.match(method_id.to_s) and parent = m.captures.first
self.class.create_share_of_converter_method_and_execute(self, parent)
elsif m = /^cost_(\w*)$/.match(method_id.to_s) and method_name = m.captures.first
self.send(method_name)
elsif m = /^primary_demand(\w*)$/.match(method_id.to_s)
# puts arguments
self.converter.send(method_id, *arguments)
elsif m = /^final_demand(\w*)$/.match(method_id.to_s)
self.converter.send(method_id, *arguments)
else
Rails.logger.info("ConverterApi#method_missing: #{method_id}")
super
end
end
# add all the attributes and methods that are modularized in calculator/
# loads all the "open classes" in calculator
Dir["app/models/qernel/converter_api/*.rb"].sort.each {|file| require_dependency file }
end
end
remove capacity_factor
module Qernel
##
# == Big Picture
#
# === Converter
# Responsible for calculating demands/energy flow only. Has links and
# slots, so can traverse the graph. But doesn't know about its other
# attributes like cost, co2, etc. It is more like a node in a graph.
#
# === ConverterApi
#
# A ConverterApi instance includes (static) attributes (stored in the
# ::Converter table) and dynamic attributes that are calculated based
# on the static ones. It doesn't (really) know about links, slots, etc
# but can access them through #converter. It's more like a data-model.
#
#
# === Reasons for separation
#
# Converter is relatively simple, ConverterApi has a lot of code. Like
# this we can keep the calculation part simple. Also regarding that in
# the future it might be easier to implement the graph for instance in
# another language (C, Java, Scala).
#
# In the energy flow calculation, we worry about speed much more. Things
# like method_missing can make that really slow, (e.g. calling flatten)
# on an array of objects that implement method_missing degrades performance
# a lot.
#
#
#
#
# You can use all the methods directly in the GQL. *Do not miss* the
# *Dynamic Method Handling* section.
#
#
#
class ConverterApi
include MethodMetaData
include DatasetAttributes
##
# :method: primary_demand_of_fossil
# Primary demand of fossil energy
EXPECTED_DEMAND_TOLERANCE = 0.001
# All the static attributes that come from the database
# Access the following attributes with @. e.g
# @demand_expected_value *and not* demand_expected_value (or self.demand_expected_value)
ATTRIBUTES_USED = [
:availability,
:variability,
:ccs_investment_per_mw_input,
:ccs_operation_and_maintenance_cost_per_full_load_hour,
:co2_free,
:construction_time,
:costs_per_mj,
:decommissioning_costs_per_mw_input,
:decrease_in_nominal_capacity_over_lifetime,
:demand_expected_value,
:full_load_hours,
:installing_costs_per_mw_input,
:land_use_per_unit,
:lead_time,
:network_capacity_available_in_mw,
:network_capacity_used_in_mw,
:network_expansion_costs_in_euro_per_mw,
:operation_and_maintenance_cost_fixed_per_mw_input,
:operation_and_maintenance_cost_variable_per_full_load_hour,
:part_ets,
:peak_load_units_present,
:purchase_price_per_mw_input,
:residual_value_per_mw_input,
:simult_sd,
:simult_se,
:simult_wd,
:simult_we,
:technical_lifetime,
:typical_nominal_input_capacity,
:wacc,
:merit_order_start,
:merit_order_end
]
# For the data/converter/show page we need grouping of the attributes
# these atrribut groups should only be used to show the values in the data section
ELECTRICITY_PRODUCTION_VALUES = {
:technical => {
:nominal_capacity_electricity_output_per_unit => ['Nominal electrical capacity','MW'],
:average_effective_output_of_nominal_capacity_over_lifetime => ['Average effective output of nominal capacity over lifetime', '%'],
:full_load_hours => ['Full load hours', 'hour / year'],
:electricity_output_conversion => ['Electrical efficiency', '%'],
:heat_output_conversion => ['Heat efficiency', '%']
},
:cost => {
:initial_investment_excl_ccs_per_mwe => ['Initial investment (excl CCS)', 'euro / MWe'],
:additional_investment_ccs_per_mwe => ['Additional inititial investment for CCS', 'euro / MWe'],
:cost_of_installing_per_mwe => ['Cost of installing','euro / MWe'],
:residual_value_per_mwe => ['Residual value after lifetime','euro / MWe'],
:decommissioning_costs_per_mwe => ['Decommissioning costs','euro / MWe'],
:fixed_yearly_operation_and_maintenance_costs_per_mwe => ['Fixed operation and maintenance costs','euro / MWe / year'],
:operation_and_maintenance_cost_variable_per_full_load_hour => ['Variable operation and maintenance costs (excl CCS)', 'euro / full load hour'],
:ccs_operation_and_maintenance_cost_per_full_load_hour => ['Additional variable operation and maintenance costs for CCS', 'euro / full load hour'],
:wacc => ['Weighted average cost of capital', '%'],
:part_ets => ['Do emissions have to be paid for through the ETS?', 'yes=1 / no=0']
},
:other => {
:land_use_per_unit => ['Land use per unit', 'km2'],
:construction_time => ['Construction time', 'year'],
:technical_lifetime => ['Technical lifetime', 'year']
}
}
HEAT_PRODUCTION_VALUES = {
:technical => {
:nominal_capacity_heat_output_per_unit => ['Nominal heat capacity','MW'],
:average_effective_output_of_nominal_capacity_over_lifetime => ['Average effective output of nominal capacity over lifetime', '%'],
:full_load_hours => ['Full load hours', 'hour / year'],
:heat_output_conversion => ['Heat efficiency', '%']
},
:cost => {
:purchase_price_per_unit => ['Initial purchase price', 'euro'],
:cost_of_installing_per_unit => ['Cost of installing','euro'],
:residual_value_per_unit => ['Residual value after lifetime','euro'],
:decommissioning_costs_per_unit => ['Decommissioning costs','euro'],
:fixed_yearly_operation_and_maintenance_costs_per_unit => ['Fixed operation and maintenance costs','euro / year'],
:operation_and_maintenance_cost_variable_per_full_load_hour => ['Variable operation and maintenance costs', 'euro / full load hour'],
:wacc => ['Weighted average cost of capital', '%'],
:part_ets => ['Do emissions have to be paid for through the ETS?', 'yes=1 / no=0']
},
:other => {
:land_use_per_unit => ['Land use per unit', 'km2'],
:technical_lifetime => ['Technical lifetime', 'year']
}
}
HEAT_PUMP_VALUES = {
:technical => {
:nominal_capacity_heat_output_per_unit => ['Nominal heat capacity','MW'],
:average_effective_output_of_nominal_capacity_over_lifetime => ['Average effective output of nominal capacity over lifetime', '%'],
:full_load_hours => ['Full load hours', 'hour / year'],
:coefficient_of_performance => ['Coefficient of Performance', ''],
:heat_and_cold_output_conversion => ['Efficiency (after COP)', '%']
},
:cost => {
:purchase_price_per_unit => ['Initial purchase price', 'euro'],
:cost_of_installing_per_unit => ['Cost of installing','euro'],
:residual_value_per_unit => ['Residual value after lifetime','euro'],
:decommissioning_costs_per_unit => ['Decommissioning costs','euro'],
:fixed_yearly_operation_and_maintenance_costs_per_unit => ['Fixed operation and maintenance costs','euro / year'],
:operation_and_maintenance_cost_variable_per_full_load_hour => ['Variable operation and maintenance costs', 'euro / full load hour'],
:wacc => ['Weighted average cost of capital', '%'],
:part_ets => ['Do emissions have to be paid for through the ETS?', 'yes=1 / no=0']
},
:other => {
:land_use_per_unit => ['Land use per unit', 'km2'],
:technical_lifetime => ['Technical lifetime', 'year']
}
}
CHP_VALUES = {
:technical => {
:nominal_capacity_electricity_output_per_unit => ['Nominal electrical capacity','MW'],
:nominal_capacity_heat_output_per_unit => ['Nominal heat capacity','MW'],
:average_effective_output_of_nominal_capacity_over_lifetime => ['Average effective output of nominal capacity over lifetime', '%'],
:full_load_hours => ['Full load hours', 'hour / year'],
:electricity_output_conversion => ['Electrical efficiency', '%'],
:heat_output_conversion => ['Heat efficiency', '%']
},
:cost => {
:initial_investment_excl_ccs_per_unit => ['Initial investment (excl CCS)', 'euro'],
:additional_investment_ccs_per_unit => ['Additional inititial investment for CCS', 'euro'],
:cost_of_installing_per_unit => ['Cost of installing','euro'],
:residual_value_per_unit => ['Residual value after lifetime','euro'],
:decommissioning_costs_per_unit => ['Decommissioning costs','euro'],
:fixed_yearly_operation_and_maintenance_costs_per_unit => ['Fixed operation and maintenance costs','euro / year'],
:operation_and_maintenance_cost_variable_per_full_load_hour => ['Variable operation and maintenance costs (excl CCS)', 'euro / full load hour'],
:ccs_operation_and_maintenance_cost_per_full_load_hour => ['Additional variable operation and maintenance costs for CCS', 'euro / full load hour'],
:wacc => ['Weighted average cost of capital', '%'],
:part_ets => ['Do emissions have to be paid for through the ETS?', 'yes=1 / no=0']
},
:other => {
:land_use_per_unit => ['Land use per unit', 'km2'],
:construction_time => ['Construction time', 'year'],
:technical_lifetime => ['Technical lifetime', 'year']
}
}
dataset_accessors ATTRIBUTES_USED
# attributes updated by #initialize
attr_reader :converter, :dataset_key, :dataset_group
# attributes updated by Converter#graph=
attr_accessor :area, :graph
# dataset attributes of converter
dataset_accessors [:preset_demand, :demand]
# ConverterApi has same accessor as it's converter
def self.dataset_group
@dataset_group ||= Qernel::Converter.dataset_group
end
def to_s
converter && converter.full_key.to_s
end
def inspect
to_s
end
# For testing only
# Otherwise graphs by GraphParser won't be Gqueryable
# DEBT properly fix
if Rails.env.development? or Rails.env.test?
def object_dataset
converter.object_dataset
end
end
#
#
def initialize(converter_qernel, attrs = {})
@converter = converter_qernel
@dataset_key = converter.dataset_key
@dataset_group = converter.dataset_group
end
def energy_balance_group
converter.energy_balance_group
end
# See {Qernel::Converter} for difference of demand/preset_demand
#
def preset_demand=(val)
converter.preset_demand = val
end
# Is the calculated near the demand_expected_value?
#
# @return [nil] if demand or expected is nil
# @return [true] if demand is within tolerance EXPECTED_DEMAND_TOLERANCE
#
def demand_expected?
expected = demand_expected_value
return nil if demand.nil? or expected.nil?
return true if demand.to_f == 0 and expected.to_f == 0.0
(demand.to_f / expected.to_f - 1.0).abs < EXPECTED_DEMAND_TOLERANCE
end
# Extracted into a method, because we have a circular dependency in specs
# Carriers are not imported, so when initializing all those methods won't get
# loaded. So this way we can load later.
def self.create_methods_for_each_carrier(carrier_names)
carrier_names.each do |carrier|
carrier_key = carrier.to_sym
define_method "demand_of_#{carrier}" do
self.output_of_carrier(carrier_key) || 0.0
end
define_method "supply_of_#{carrier}" do
self.input_of_carrier(carrier_key) || 0.0
end
define_method "input_of_#{carrier}" do
self.input_of_carrier(carrier_key) || 0.0
end
define_method "output_of_#{carrier}" do
self.output_of_carrier(carrier_key) || 0.0
end
define_method "primary_demand_of_#{carrier}" do
converter.primary_demand_of_carrier(carrier_key) || 0.0
end
['input', 'output'].each do |side|
define_method "#{carrier}_#{side}_link_share" do
if slot = self.converter.send(side, carrier_key)
if link = slot.links.first
link.send('share') || 0.0
else
0.0
end
else
0.0
end
end
%w[conversion value share actual_conversion].each do |method|
define_method "#{carrier}_#{side}_#{method}" do
slot = self.converter.send(side, carrier_key)
value = slot && slot.send(method)
value || 0.0
end
end
end
end
end
create_methods_for_each_carrier(Etsource::Dataset::Import.new('nl').carrier_keys)
# creates a method during run time if method_missing
#
def self.create_share_of_converter_method(converter_key)
key = converter_key.to_sym
define_method "share_of_#{key}" do
ol = self.converter.output_links.detect{|l| l.parent.full_key == key}
ol and ol.share
end
end
# creates a method during run time if method_missing and returns the value
#
def self.create_share_of_converter_method_and_execute(caller, converter_key)
create_share_of_converter_method(converter_key)
caller.send("share_of_#{converter_key}")
end
# creates a method during run time if method_missing
#
def self.create_input_link_method(method_id, carrier_name, side, method)
if carrier_name.match(/^(.*)_(constant|share|inversedflexible|flexible)$/)
carrier_name, link_type = carrier_name.match(/^(.*)_(constant|share|inversedflexible|flexible)$/).captures
link_type = "inversed_flexible" if link_type == "inversedflexible"
end
define_method method_id do
if slot = self.converter.send(side, carrier_name.to_sym)
if link = link_type.nil? ? slot.links.first : slot.links.detect{|l| l.send("#{link_type}?")}
link.send(method)
end
end
end
end
# creates a method during run time if method_missing and returns the value
#
def self.create_input_link_method_and_execute(caller, method_id, carrier_name, side, method)
create_input_link_method(method_id, carrier_name, side, method)
caller.send(method_id)
end
def primary_demand
self.converter.primary_demand
end
def final_demand
self.converter.final_demand
end
#
#
def method_missing(method_id, *arguments)
ActiveSupport::Notifications.instrument("gql.debug", "ConverterApi:method_missing #{method_id}")
# electricity_
if m = /^(.*)_(input|output)_link_(share|value)$/.match(method_id.to_s)
carrier_name, side, method = m.captures
self.class.create_input_link_method_and_execute(self, method_id, carrier_name, side, method)
elsif m = /^share_of_(\w*)$/.match(method_id.to_s) and parent = m.captures.first
self.class.create_share_of_converter_method_and_execute(self, parent)
elsif m = /^cost_(\w*)$/.match(method_id.to_s) and method_name = m.captures.first
self.send(method_name)
elsif m = /^primary_demand(\w*)$/.match(method_id.to_s)
# puts arguments
self.converter.send(method_id, *arguments)
elsif m = /^final_demand(\w*)$/.match(method_id.to_s)
self.converter.send(method_id, *arguments)
else
Rails.logger.info("ConverterApi#method_missing: #{method_id}")
super
end
end
# add all the attributes and methods that are modularized in calculator/
# loads all the "open classes" in calculator
Dir["app/models/qernel/converter_api/*.rb"].sort.each {|file| require_dependency file }
end
end
|
module Renalware
class Medication < ActiveRecord::Base
attr_accessor :drug_select
acts_as_paranoid
has_paper_trail class_name: 'Renalware::MedicationVersion'
belongs_to :patient
belongs_to :drug, class_name: "Renalware::Drugs::Drug"
belongs_to :treatable, polymorphic: true
belongs_to :medication_route
validates :patient, presence: true
validates :treatable, presence: true
validates :drug, presence: true
validates :dose, presence: true
validates :medication_route, presence: true
validates :frequency, presence: true
validates :start_date, presence: true
validates :provider, presence: true
validate :constrain_route_description
enum provider: Provider.codes
scope :ordered, -> { order(default_search_order) }
def self.default_search_order
"start_date desc"
end
def self.peritonitis
self.new(treatable_type: 'Renalware::PeritonitisEpisode')
end
def self.exit_site
self.new(treatable_type: 'Renalware::ExitSiteInfection')
end
def formatted
[].tap { |ary|
ary << drug.name if drug.present?
ary << dose
ary << medication_route.name if medication_route.present?
ary << frequency
ary << start_date
}.compact.join(", ")
end
private
def constrain_route_description
return unless medication_route
case
when medication_route.other? && route_description.empty?
errors.add(:route_description, "can't be blank")
when !medication_route.other? && route_description.present?
errors.add(:route_description, "can't be present when a route has been selected")
end
end
end
end
Handle nil
module Renalware
class Medication < ActiveRecord::Base
attr_accessor :drug_select
acts_as_paranoid
has_paper_trail class_name: 'Renalware::MedicationVersion'
belongs_to :patient
belongs_to :drug, class_name: "Renalware::Drugs::Drug"
belongs_to :treatable, polymorphic: true
belongs_to :medication_route
validates :patient, presence: true
validates :treatable, presence: true
validates :drug, presence: true
validates :dose, presence: true
validates :medication_route, presence: true
validates :frequency, presence: true
validates :start_date, presence: true
validates :provider, presence: true
validate :constrain_route_description
enum provider: Provider.codes
scope :ordered, -> { order(default_search_order) }
def self.default_search_order
"start_date desc"
end
def self.peritonitis
self.new(treatable_type: 'Renalware::PeritonitisEpisode')
end
def self.exit_site
self.new(treatable_type: 'Renalware::ExitSiteInfection')
end
def formatted
[].tap { |ary|
ary << drug.name if drug.present?
ary << dose
ary << medication_route.name if medication_route.present?
ary << frequency
ary << start_date
}.compact.join(", ")
end
private
def constrain_route_description
return unless medication_route
case
when medication_route.other? && !route_description.present?
errors.add(:route_description, "can't be blank")
when !medication_route.other? && route_description.present?
errors.add(:route_description, "can't be present when a route has been selected")
end
end
end
end
|
class ResourceImportFile < ApplicationRecord
include Statesman::Adapters::ActiveRecordQueries[
transition_class: ResourceImportFileTransition,
initial_state: :pending
]
include ImportFile
scope :not_imported, -> { in_state(:pending) }
scope :stucked, -> { in_state(:pending).where('resource_import_files.created_at < ?', 1.hour.ago) }
has_one_attached :resource_import
validates :resource_import, presence: true, on: :create
validates :default_shelf_id, presence: true, if: Proc.new{|model| model.edit_mode == 'create'}
belongs_to :user
belongs_to :default_shelf, class_name: 'Shelf', optional: true
has_many :resource_import_results, dependent: :destroy
has_many :resource_import_file_transitions, autosave: false, dependent: :destroy
attr_accessor :mode, :library_id
def state_machine
ResourceImportFileStateMachine.new(self, transition_class: ResourceImportFileTransition)
end
delegate :can_transition_to?, :transition_to!, :transition_to, :current_state,
to: :state_machine
def import_start
case edit_mode
when 'destroy'
remove
when 'update_relationship'
update_relationship
else
import
end
end
def import
transition_to!(:started)
row_num = 1
result = {
manifestation_created: 0, manifestation_updated: 0, manifestation_failed: 0, manifestation_found: 0, manifestation_skipped: 0,
item_created: 0, item_updated: 0, item_failed: 0, item_found: 0, item_skipped: 0,
circulation_imported: 0, circulation_skipped: 0
}
entries = ManifestationImporter.import(create_import_temp_file(resource_import), default_shelf: default_shelf&.name, action: edit_mode)
entries.each do |entry|
ResourceImportResult.create!(
resource_import_file: self,
manifestation: entry.manifestation_record,
item: entry.item_record,
error_message: entry.error_message
)
case entry.manifestation_result
when :created
result[:manifestation_created] += 1
when :updated
result[:manifestation_updated] += 1
when :found
result[:manifestation_found] += 1
when :failed
result[:manifestation_failed] += 1
when :skipped
result[:manifestation_skipped] += 1
end
case entry.item_result
when :created
result[:item_created] += 1
when :updated
result[:item_updated] += 1
when :found
result[:item_found] += 1
when :failed
result[:item_failed] += 1
when :skipped
result[:item_skipped] += 1
end
end
if defined?(EnjuNii)
NiiImporter.import(create_import_temp_file(resource_import), action: edit_mode)
end
if defined?(EnjuCirculation)
CirculationImporter.import(create_import_temp_file(resource_import), action: edit_mode).each do |circulation_entry|
case circulation_entry.result
when :imported
result[:circulation_imported] += 1
when :skipped
result[:circulation_skipped] += 1
end
end
end
Sunspot.commit
transition_to!(:completed)
mailer = ResourceImportMailer.completed(self)
send_message(mailer)
Rails.cache.write("manifestation_search_total", Manifestation.search.total)
result
rescue => e
self.error_message = "line #{row_num}: #{e.message}"
save
transition_to!(:failed)
mailer = ResourceImportMailer.failed(self)
send_message(mailer)
raise e
end
def import_marc(marc_type)
file = resource_import.download
case marc_type
when 'marcxml'
reader = MARC::XMLReader.new(file)
else
reader = MARC::Reader.new(file)
end
file.close
#when 'marc_xml_url'
# url = URI(params[:marc_xml_url])
# xml = open(url).read
# reader = MARC::XMLReader.new(StringIO.new(xml))
#end
# TODO
for record in reader
manifestation = Manifestation.new(original_title: expression.original_title)
manifestation.carrier_type = CarrierType.find(1)
manifestation.frequency = Frequency.find(1)
manifestation.language = Language.find(1)
manifestation.save
full_name = record['700']['a']
publisher = Agent.find_by(full_name: record['700']['a'])
unless publisher
publisher = Agent.new(full_name: full_name)
publisher.save
end
manifestation.publishers << publisher
end
end
def self.import
ResourceImportFile.not_imported.each do |file|
file.import_start
end
rescue
Rails.logger.info "#{Time.zone.now} importing resources failed!"
end
#def import_jpmarc
# marc = NKF::nkf('-wc', self.db_file.data)
# marc.split("\r\n").each do |record|
# end
#end
def update_relationship
transition_to!(:started)
rows = open_import_file(create_import_temp_file(resource_import))
rows.shift
row_num = 1
rows.each do |row|
item_identifier = row['item_identifier'].to_s.strip
item = Item.find_by(item_identifier: item_identifier)
unless item
item = Item.find_by(id: row['item_id'].to_s.strip)
end
manifestation_identifier = row['manifestation_identifier'].to_s.strip
manifestation = Manifestation.find_by(manifestation_identifier: manifestation_identifier)
unless manifestation
manifestation = Manifestation.find_by(id: row['manifestation_id'].to_s.strip)
end
if item && manifestation
item.manifestation = manifestation
item.save!
end
import_result = ResourceImportResult.create!(resource_import_file_id: id, body: row.fields.join("\t"))
import_result.item = item
import_result.manifestation = manifestation
import_result.save!
row_num += 1
end
transition_to!(:completed)
mailer = ResourceImportMailer.completed(self)
send_message(mailer)
rescue => e
self.error_message = "line #{row_num}: #{e.message}"
save
transition_to!(:failed)
mailer = ResourceImportMailer.failed(self)
send_message(mailer)
raise e
end
end
# == Schema Information
#
# Table name: resource_import_files
#
# id :bigint not null, primary key
# user_id :bigint
# note :text
# executed_at :datetime
# created_at :datetime not null
# updated_at :datetime not null
# edit_mode :string
# resource_import_fingerprint :string
# error_message :text
# user_encoding :string
# default_shelf_id :integer
#
Revert "run NiiImporter if EnjuNii is defined"
This reverts commit 9fe037c0926c3cfd0dc57fb723281ad473d054e5.
class ResourceImportFile < ApplicationRecord
include Statesman::Adapters::ActiveRecordQueries[
transition_class: ResourceImportFileTransition,
initial_state: :pending
]
include ImportFile
scope :not_imported, -> { in_state(:pending) }
scope :stucked, -> { in_state(:pending).where('resource_import_files.created_at < ?', 1.hour.ago) }
has_one_attached :resource_import
validates :resource_import, presence: true, on: :create
validates :default_shelf_id, presence: true, if: Proc.new{|model| model.edit_mode == 'create'}
belongs_to :user
belongs_to :default_shelf, class_name: 'Shelf', optional: true
has_many :resource_import_results, dependent: :destroy
has_many :resource_import_file_transitions, autosave: false, dependent: :destroy
attr_accessor :mode, :library_id
def state_machine
ResourceImportFileStateMachine.new(self, transition_class: ResourceImportFileTransition)
end
delegate :can_transition_to?, :transition_to!, :transition_to, :current_state,
to: :state_machine
def import_start
case edit_mode
when 'destroy'
remove
when 'update_relationship'
update_relationship
else
import
end
end
def import
transition_to!(:started)
row_num = 1
result = {
manifestation_created: 0, manifestation_updated: 0, manifestation_failed: 0, manifestation_found: 0, manifestation_skipped: 0,
item_created: 0, item_updated: 0, item_failed: 0, item_found: 0, item_skipped: 0,
circulation_imported: 0, circulation_skipped: 0
}
entries = ManifestationImporter.import(create_import_temp_file(resource_import), default_shelf: default_shelf&.name, action: edit_mode)
entries.each do |entry|
ResourceImportResult.create!(
resource_import_file: self,
manifestation: entry.manifestation_record,
item: entry.item_record,
error_message: entry.error_message
)
case entry.manifestation_result
when :created
result[:manifestation_created] += 1
when :updated
result[:manifestation_updated] += 1
when :found
result[:manifestation_found] += 1
when :failed
result[:manifestation_failed] += 1
when :skipped
result[:manifestation_skipped] += 1
end
case entry.item_result
when :created
result[:item_created] += 1
when :updated
result[:item_updated] += 1
when :found
result[:item_found] += 1
when :failed
result[:item_failed] += 1
when :skipped
result[:item_skipped] += 1
end
end
if defined?(EnjuCirculation)
CirculationImporter.import(create_import_temp_file(resource_import), action: edit_mode).each do |circulation_entry|
case circulation_entry.result
when :imported
result[:circulation_imported] += 1
when :skipped
result[:circulation_skipped] += 1
end
end
end
Sunspot.commit
transition_to!(:completed)
mailer = ResourceImportMailer.completed(self)
send_message(mailer)
Rails.cache.write("manifestation_search_total", Manifestation.search.total)
result
rescue => e
self.error_message = "line #{row_num}: #{e.message}"
save
transition_to!(:failed)
mailer = ResourceImportMailer.failed(self)
send_message(mailer)
raise e
end
def import_marc(marc_type)
file = resource_import.download
case marc_type
when 'marcxml'
reader = MARC::XMLReader.new(file)
else
reader = MARC::Reader.new(file)
end
file.close
#when 'marc_xml_url'
# url = URI(params[:marc_xml_url])
# xml = open(url).read
# reader = MARC::XMLReader.new(StringIO.new(xml))
#end
# TODO
for record in reader
manifestation = Manifestation.new(original_title: expression.original_title)
manifestation.carrier_type = CarrierType.find(1)
manifestation.frequency = Frequency.find(1)
manifestation.language = Language.find(1)
manifestation.save
full_name = record['700']['a']
publisher = Agent.find_by(full_name: record['700']['a'])
unless publisher
publisher = Agent.new(full_name: full_name)
publisher.save
end
manifestation.publishers << publisher
end
end
def self.import
ResourceImportFile.not_imported.each do |file|
file.import_start
end
rescue
Rails.logger.info "#{Time.zone.now} importing resources failed!"
end
#def import_jpmarc
# marc = NKF::nkf('-wc', self.db_file.data)
# marc.split("\r\n").each do |record|
# end
#end
def update_relationship
transition_to!(:started)
rows = open_import_file(create_import_temp_file(resource_import))
rows.shift
row_num = 1
rows.each do |row|
item_identifier = row['item_identifier'].to_s.strip
item = Item.find_by(item_identifier: item_identifier)
unless item
item = Item.find_by(id: row['item_id'].to_s.strip)
end
manifestation_identifier = row['manifestation_identifier'].to_s.strip
manifestation = Manifestation.find_by(manifestation_identifier: manifestation_identifier)
unless manifestation
manifestation = Manifestation.find_by(id: row['manifestation_id'].to_s.strip)
end
if item && manifestation
item.manifestation = manifestation
item.save!
end
import_result = ResourceImportResult.create!(resource_import_file_id: id, body: row.fields.join("\t"))
import_result.item = item
import_result.manifestation = manifestation
import_result.save!
row_num += 1
end
transition_to!(:completed)
mailer = ResourceImportMailer.completed(self)
send_message(mailer)
rescue => e
self.error_message = "line #{row_num}: #{e.message}"
save
transition_to!(:failed)
mailer = ResourceImportMailer.failed(self)
send_message(mailer)
raise e
end
end
# == Schema Information
#
# Table name: resource_import_files
#
# id :bigint not null, primary key
# user_id :bigint
# note :text
# executed_at :datetime
# created_at :datetime not null
# updated_at :datetime not null
# edit_mode :string
# resource_import_fingerprint :string
# error_message :text
# user_encoding :string
# default_shelf_id :integer
#
|
module Results
# Import Excel results file
#
# Result time limited to hundredths of seconds
#
# Notes example:
# Senior Men Pro/1/2 | Field size: 79 riders | Laps: 2
#
# Set DEBUG_RESULTS to Toggle expensive debug logging. E.g., DEBUG_RESULTS=yes ./script/server
class ResultsFile
attr_accessor :event
attr_accessor :source
attr_accessor :custom_columns
attr_accessor :import_warnings
def initialize(source, event)
self.event = event
self.custom_columns = Set.new
self.import_warnings = Set.new
self.source = source
end
# See http://racingonrails.rocketsurgeryllc.com/sample_import_files/ for format details and examples.
def import
ActiveSupport::Notifications.instrument "import.results_file.racing_on_rails", source: source.try(:path) do
Event.transaction do
race = nil
table = Tabular::Table.new
table.column_mapper = Results::ColumnMapper.new
table.read source
table.delete_blank_columns!
table.delete_blank_rows!
add_custom_columns table
assert_columns! table
table.rows.each do |row|
race = import_row(row, race, table.columns.map(&:key))
end
end
import_warnings = import_warnings.to_a.take(10)
end
ActiveSupport::Notifications.instrument "warnings.import.results_file.racing_on_rails", warnings_count: import_warnings.to_a.size
end
def import_row(row, race, columns)
Rails.logger.debug("Results::ResultsFile #{Time.zone.now} row #{row.to_hash}") if debug?
if race?(row)
race = find_or_create_race(row, columns)
elsif result?(row)
create_result row, race
end
race
end
def race?(row)
return false if row.last?
# Won't correctly detect races that only have DQs or DNSs
row.next &&
category_name_from_row(row).present? &&
!row[:place].to_s.upcase.in?(%w{ DNS DQ DNF}) &&
row.next[:place] &&
row.next[:place].to_i == 1 &&
(row.previous.nil? || result?(row.previous))
end
def find_or_create_race(row, columns)
category = Category.find_or_create_by_normalized_name(category_name_from_row(row))
race = event.races.detect { |r| r.category == category }
if race
race.results.clear
else
race = event.races.build(category: category, notes: notes(row), custom_columns: custom_columns.to_a)
end
race.result_columns = columns.map(&:to_s)
race.save!
ActiveSupport::Notifications.instrument "find_or_create_race.import.results_file.racing_on_rails", race_name: race.name, race_id: race.id
race
end
def result?(row)
return true if row[:place].present? || row[:number].present? || row[:license].present? || row[:team_name].present?
if !(row[:first_name].blank? && row[:last_name].blank? && row[:name].blank?)
return true
end
false
end
def create_result(row, race)
if race.nil?
Rails.logger.warn "No race. Skipping result file row."
return nil
end
result = race.results.build(result_methods(row, race))
result.updated_by = @event.name
if same_time?(row)
result.time = race.results[race.results.size - 2].time
end
set_place result, row
set_age_group result, row
result.cleanup
result.save!
Rails.logger.debug("Results::ResultsFile #{Time.zone.now} create result #{race} #{result.place}") if debug?
result
end
def result_methods(row, race)
attributes = row.to_hash.dup
custom_attributes = {}
attributes.delete_if do |key, value|
_key = key.to_s.to_sym
if custom_columns.include?(_key)
custom_attributes[_key] = case value
when Time
value.strftime "%H:%M:%S"
else
value
end
true
else
false
end
end
attributes.merge! custom_attributes: custom_attributes
attributes
end
def prototype_result
@prototype_result ||= Result.new.freeze
end
def debug?
ENV["DEBUG_RESULTS"].present? && Rails.logger.debug?
end
private
def category_name_from_row(row)
row.first
end
def notes(row)
if row[:notes].present?
row[:notes]
else
cells = row.to_a
cells[1, cells.size].select(&:present?).join(", ")
end
end
def strip_quotes(string)
if string.present?
string = string.strip
string = string.gsub(/^"/, '')
string = string.gsub(/"$/, '')
end
string
end
def set_place(result, row)
if result.numeric_place?
result.place = result.numeric_place
if race?(row) && result.place != 1
self.import_warnings << "First racer #{row[:first_name]} #{row[:last_name]} should be first place racer. "
# if we have a previous rov and the current place is not one more than the previous place, then sequence error.
elsif !race?(row) && row.previous && row.previous[:place].present? && row.previous[:place].to_i != (result.place - 1)
self.import_warnings << "Non-sequential placings detected for racer: #{row[:first_name]} #{row[:last_name]}. " unless row[:category_name].to_s.downcase.include?("tandem") # or event is TTT or ???
end
elsif result.place.present?
result.place = result.place.upcase
elsif row.previous[:place].present? && row.previous[:place].to_i == 0
result.place = row.previous[:place]
end
end
# USAC format input may contain an age range in the age column for juniors.
def set_age_group(result, row)
if row[:age].present? && /\d+-\d+/ =~ row[:age].to_s
result.age = nil
result.age_group = row[:age]
end
result
end
def to_column_name(cell)
cell = cell.downcase.
underscore.
gsub(" ", "_")
if COLUMN_MAP[cell]
COLUMN_MAP[cell]
else
cell
end
end
def add_custom_columns(table)
table.columns.each do |column|
if column.key && !result_method?(column.key)
custom_columns << column.key
end
end
end
def result_method?(column_name)
prototype_result.respond_to?(column_name.to_sym)
end
def assert_columns!(table)
keys = table.columns.map(&:key)
unless keys.include?(:place)
import_warnings << "No place column. Place is required."
end
unless keys.include?(:name) || (keys.include?(:first_name) && keys.include?(:lastname)) || keys.include?(:team_name)
import_warnings << "No name column. Name, first name, last name or team name is required."
end
end
def same_time?(row)
return false unless row.previous
return true if row[:time].blank?
if row[:time].present?
row_time = row[:time].try(:to_s)
row_time && (row_time[/st/i] || row_time[/s\.t\./i])
end
end
end
end
Ensure imported result place is a string before upcasing.
Prevent error in the rare case that place is a float.
module Results
# Import Excel results file
#
# Result time limited to hundredths of seconds
#
# Notes example:
# Senior Men Pro/1/2 | Field size: 79 riders | Laps: 2
#
# Set DEBUG_RESULTS to Toggle expensive debug logging. E.g., DEBUG_RESULTS=yes ./script/server
class ResultsFile
attr_accessor :event
attr_accessor :source
attr_accessor :custom_columns
attr_accessor :import_warnings
def initialize(source, event)
self.event = event
self.custom_columns = Set.new
self.import_warnings = Set.new
self.source = source
end
# See http://racingonrails.rocketsurgeryllc.com/sample_import_files/ for format details and examples.
def import
ActiveSupport::Notifications.instrument "import.results_file.racing_on_rails", source: source.try(:path) do
Event.transaction do
race = nil
table = Tabular::Table.new
table.column_mapper = Results::ColumnMapper.new
table.read source
table.delete_blank_columns!
table.delete_blank_rows!
add_custom_columns table
assert_columns! table
table.rows.each do |row|
race = import_row(row, race, table.columns.map(&:key))
end
end
import_warnings = import_warnings.to_a.take(10)
end
ActiveSupport::Notifications.instrument "warnings.import.results_file.racing_on_rails", warnings_count: import_warnings.to_a.size
end
def import_row(row, race, columns)
Rails.logger.debug("Results::ResultsFile #{Time.zone.now} row #{row.to_hash}") if debug?
if race?(row)
race = find_or_create_race(row, columns)
elsif result?(row)
create_result row, race
end
race
end
def race?(row)
return false if row.last?
# Won't correctly detect races that only have DQs or DNSs
row.next &&
category_name_from_row(row).present? &&
!row[:place].to_s.upcase.in?(%w{ DNS DQ DNF}) &&
row.next[:place] &&
row.next[:place].to_i == 1 &&
(row.previous.nil? || result?(row.previous))
end
def find_or_create_race(row, columns)
category = Category.find_or_create_by_normalized_name(category_name_from_row(row))
race = event.races.detect { |r| r.category == category }
if race
race.results.clear
else
race = event.races.build(category: category, notes: notes(row), custom_columns: custom_columns.to_a)
end
race.result_columns = columns.map(&:to_s)
race.save!
ActiveSupport::Notifications.instrument "find_or_create_race.import.results_file.racing_on_rails", race_name: race.name, race_id: race.id
race
end
def result?(row)
return true if row[:place].present? || row[:number].present? || row[:license].present? || row[:team_name].present?
if !(row[:first_name].blank? && row[:last_name].blank? && row[:name].blank?)
return true
end
false
end
def create_result(row, race)
if race.nil?
Rails.logger.warn "No race. Skipping result file row."
return nil
end
result = race.results.build(result_methods(row, race))
result.updated_by = @event.name
if same_time?(row)
result.time = race.results[race.results.size - 2].time
end
set_place result, row
set_age_group result, row
result.cleanup
result.save!
Rails.logger.debug("Results::ResultsFile #{Time.zone.now} create result #{race} #{result.place}") if debug?
result
end
def result_methods(row, race)
attributes = row.to_hash.dup
custom_attributes = {}
attributes.delete_if do |key, value|
_key = key.to_s.to_sym
if custom_columns.include?(_key)
custom_attributes[_key] = case value
when Time
value.strftime "%H:%M:%S"
else
value
end
true
else
false
end
end
attributes.merge! custom_attributes: custom_attributes
attributes
end
def prototype_result
@prototype_result ||= Result.new.freeze
end
def debug?
ENV["DEBUG_RESULTS"].present? && Rails.logger.debug?
end
private
def category_name_from_row(row)
row.first
end
def notes(row)
if row[:notes].present?
row[:notes]
else
cells = row.to_a
cells[1, cells.size].select(&:present?).join(", ")
end
end
def strip_quotes(string)
if string.present?
string = string.strip
string = string.gsub(/^"/, '')
string = string.gsub(/"$/, '')
end
string
end
def set_place(result, row)
if result.numeric_place?
result.place = result.numeric_place
if race?(row) && result.place != 1
self.import_warnings << "First racer #{row[:first_name]} #{row[:last_name]} should be first place racer. "
# if we have a previous rov and the current place is not one more than the previous place, then sequence error.
elsif !race?(row) && row.previous && row.previous[:place].present? && row.previous[:place].to_i != (result.place - 1)
self.import_warnings << "Non-sequential placings detected for racer: #{row[:first_name]} #{row[:last_name]}. " unless row[:category_name].to_s.downcase.include?("tandem") # or event is TTT or ???
end
elsif result.place.present?
result.place = result.place.to_s.upcase
elsif row.previous[:place].present? && row.previous[:place].to_i == 0
result.place = row.previous[:place]
end
end
# USAC format input may contain an age range in the age column for juniors.
def set_age_group(result, row)
if row[:age].present? && /\d+-\d+/ =~ row[:age].to_s
result.age = nil
result.age_group = row[:age]
end
result
end
def to_column_name(cell)
cell = cell.downcase.
underscore.
gsub(" ", "_")
if COLUMN_MAP[cell]
COLUMN_MAP[cell]
else
cell
end
end
def add_custom_columns(table)
table.columns.each do |column|
if column.key && !result_method?(column.key)
custom_columns << column.key
end
end
end
def result_method?(column_name)
prototype_result.respond_to?(column_name.to_sym)
end
def assert_columns!(table)
keys = table.columns.map(&:key)
unless keys.include?(:place)
import_warnings << "No place column. Place is required."
end
unless keys.include?(:name) || (keys.include?(:first_name) && keys.include?(:lastname)) || keys.include?(:team_name)
import_warnings << "No name column. Name, first name, last name or team name is required."
end
end
def same_time?(row)
return false unless row.previous
return true if row[:time].blank?
if row[:time].present?
row_time = row[:time].try(:to_s)
row_time && (row_time[/st/i] || row_time[/s\.t\./i])
end
end
end
end |
module Setup
module SchemaHandler
def schema
fail NotImplementedError
end
def object_schema?(schema)
schema['type'] == 'object' && schema['properties']
end
def merged_schema(options = {})
if (sch = merge_schema(schema, options))
unless (base_sch = sch.delete('extends')).nil? || (base_sch = find_ref_schema(base_sch)).nil?
sch = base_sch.deep_merge(sch) { |_, val1, val2| Cenit::Utility.array_hash_merge(val1, val2) }
end
check_properties(sch)
end
sch
end
def check_properties(json_schema)
object_schema =
case json_schema['type']
when 'object'
json_schema
when 'array'
json_schema['items']
else
nil
end
if object_schema && object_schema.is_a?(Hash) && object_schema['type'] == 'object' && (properties = object_schema['properties'])
# Check #id property
_id, id = properties.delete('_id'), properties.delete('id')
fail Exception, 'Defining both id and _id' if _id && id
if _id ||= id
naked_id = _id.reject { |k, _| %w(unique title description edi format example enum readOnly default).include?(k) }
type = naked_id.delete('type')
fail Exception, "Invalid id property type #{id}" unless naked_id.empty? && (type.nil? || !%w(object array).include?(type))
object_schema['properties'] = properties = { '_id' => _id.merge('unique' => true,
'title' => 'Id',
'description' => 'Required',
'edi' => { 'segment' => 'id' }) }.merge(properties)
unless (required = object_schema['required']).present?
required = object_schema['required'] = []
end
required.delete('_id')
required.delete('id')
required.unshift('_id')
end
# Check property names
new_names = {}
new_properties = {}
properties.keys.each do |property|
property_schema = properties.delete(property)
new_property = property
if property == 'object' || !(property =~ /\A[A-Za-z_]\w*\Z/)
c = 1
new_property = prefix = (property == 'object') ? 'obj' : property.to_s.to_method_name
while new_properties.has_key?(new_property) || properties.has_key?(new_property)
new_property = "#{prefix}_#{c += 1}"
end
property_schema['edi'] = { 'segment' => property }
new_names[property] = new_property
end
new_properties[new_property] = property_schema
end
new_properties.each { |property, schema| properties[property] = schema }
%w(required protected).each do |modifier_key|
if (modifier = object_schema[modifier_key])
new_names.each do |old_name, new_name|
modifier << new_name if modifier.delete(old_name)
end
end
end
# Check recursively
properties.each { |_, property_schema| check_properties(property_schema) if property_schema.is_a?(Hash) }
end
json_schema
end
def merge_schema!(schema, options = {})
merge_schema(schema, options.merge(silent: false))
end
def merge_schema(schema, options = {})
do_merge_schema(schema, options)
end
def find_data_type(ref, ns = self.namespace)
Setup::Optimizer.find_data_type(ref, ns)
end
def find_ref_schema(ref, root_schema = schema)
if ref.is_a?(String) && ref.start_with?('#')
get_embedded_schema(ref, root_schema)[1] rescue nil
else
(data_type = find_data_type(ref)) &&
data_type.schema
end
end
def get_embedded_schema(ref, root_schema, root_name='')
raise Exception.new("invalid format for embedded reference #{ref}") unless ref =~ /\A#(\/[a-z]+(_|([0-9]|[a-z])+)*)*\Z/
raise Exception.new("embedding itself (referencing '#')") if ref.eql?('#')
tokens = ref.split('/')
tokens.shift
type = root_name
while tokens.present?
token = tokens.shift
raise Exception.new("use invalid embedded reference path '#{ref}'") unless (root_schema.nil? || root_schema = root_schema[token]) && (%w{properties definitions}.include?(token) && !tokens.empty?)
token = tokens.shift
raise Exception.new("use invalid embedded reference path '#{ref}'") unless (root_schema.nil? || root_schema = root_schema[token])
type = root_name.empty? ? token.camelize : "#{type}::#{token.camelize}"
end
raise Exception.new("use invalid embedded reference path '#{ref}'") unless root_schema.is_a?(Hash)
[type, root_schema]
end
def check_embedded_ref(ref, root_schema, root_name='')
type, _ = get_embedded_schema(ref, root_schema, root_name)
type
end
private
def do_merge_schema(schema, options = {})
if schema.is_a?(Array)
return schema.collect { |sch| do_merge_schema(sch, options) }
end
return schema unless schema.is_a?(Hash)
schema = schema.deep_dup
options ||= {}
options[:root_schema] ||= schema
options[:silent] = true if options[:silent].nil?
references = Set.new
merging = true
merged = false
while merging
merging = false
if (options[:expand_extends].nil? && options[:only_overriders].nil?) || options[:expand_extends]
while (base_model = schema.delete('extends'))
merged = merging = true
base_model = find_ref_schema(base_model) if base_model.is_a?(String)
base_model = do_merge_schema(base_model)
if schema['type'] == 'object' && base_model['type'] != 'object'
schema['properties'] ||= {}
value_schema = schema['properties']['value'] || {}
value_schema = base_model.deep_merge(value_schema)
schema['properties']['value'] = value_schema.merge('title' => 'Value', 'xml' => { 'content' => true })
schema['xml'] ||= {}
schema['xml']['content_property'] = 'value'
else
unless (xml_opts = schema['xml']).nil? || xml_opts['content_property']
schema['xml'].delete('content_property') if (xml_opts = base_model['xml']) && xml_opts['content_property']
end
schema = base_model.deep_merge(schema) { |_, val1, val2| Cenit::Utility.array_hash_merge(val1, val2) }
end
end
elsif options[:only_overriders]
while (base_model = schema.delete('extends') || options.delete(:extends))
merged = merging = true
base_model = find_ref_schema(base_model) if base_model.is_a?(String)
base_model = do_merge_schema(base_model)
schema['extends'] = base_model['extends'] if base_model['extends']
if (base_properties = base_model['properties'])
properties = schema['properties'] || {}
base_properties.reject! { |property_name, _| properties[property_name].nil? }
schema = { 'properties' => base_properties }.deep_merge(schema) do |_, val1, val2|
Cenit::Utility.array_hash_merge(val1, val2)
end unless base_properties.blank?
end
end
end
while (refs = schema['$ref'])
merged = merging = true
refs = [refs] unless refs.is_a?(Array)
refs.each do |ref|
if references.include?(ref)
if options[:silent]
schema.delete('$ref')
else
raise Exception.new("contains a circular reference #{ref}")
end
else
references << ref
end
end
sch = {}
schema.each do |key, value|
if key == '$ref' && (!options[:keep_ref] || sch[key])
value = [value] unless value.is_a?(Array)
value.each do |ref|
if (ref_sch = find_ref_schema(ref))
sch = sch.reverse_merge(ref_sch) { |_, val1, val2| Cenit::Utility.array_hash_merge(val1, val2) }
else
raise Exception.new("contains an unresolved reference #{value}") unless options[:silent]
end
end
else
case existing_value = sch[key]
when Hash
if value.is_a?(Hash)
value = existing_value.deep_merge(value) { |_, val1, val2| Cenit::Utility.array_hash_merge(val1, val2) }
end
when Array
value = value + existing_value if value.is_a?(Array)
end
sch[key] = value
end
end
schema = sch
end
end
schema.each do |key, val|
if val.is_a?(Hash)
schema[key] = do_merge_schema(val, options)
elsif val.is_a?(Array)
schema[key] = val.collect { |sub_val| sub_val.is_a?(Hash) ? do_merge_schema(sub_val, options) : sub_val }
end
end if options[:recursive] || (options[:until_merge] && !merged)
schema
end
end
end
Fixing schema merging
module Setup
module SchemaHandler
def schema
fail NotImplementedError
end
def object_schema?(schema)
schema['type'] == 'object' && schema['properties']
end
def merged_schema(options = {})
if (sch = merge_schema(schema, options))
unless (base_sch = sch.delete('extends')).nil? || (base_sch = find_ref_schema(base_sch)).nil?
sch = base_sch.deep_merge(sch) { |_, val1, val2| Cenit::Utility.array_hash_merge(val1, val2) }
end
check_properties(sch)
end
sch
end
def check_properties(json_schema)
object_schema =
case json_schema['type']
when 'object'
json_schema
when 'array'
json_schema['items']
else
nil
end
if object_schema && object_schema.is_a?(Hash) && object_schema['type'] == 'object' && (properties = object_schema['properties'])
# Check #id property
_id, id = properties.delete('_id'), properties.delete('id')
fail Exception, 'Defining both id and _id' if _id && id
if _id ||= id
naked_id = _id.reject { |k, _| %w(unique title description edi format example enum readOnly default).include?(k) }
type = naked_id.delete('type')
fail Exception, "Invalid id property type #{id}" unless naked_id.empty? && (type.nil? || !%w(object array).include?(type))
object_schema['properties'] = properties = { '_id' => _id.merge('unique' => true,
'title' => 'Id',
'description' => 'Required',
'edi' => { 'segment' => 'id' }) }.merge(properties)
unless (required = object_schema['required']).present?
required = object_schema['required'] = []
end
required.delete('_id')
required.delete('id')
required.unshift('_id')
end
# Check property names
new_names = {}
new_properties = {}
properties.keys.each do |property|
property_schema = properties.delete(property)
new_property = property
if property == 'object' || !(property =~ /\A[A-Za-z_]\w*\Z/)
c = 1
new_property = prefix = (property == 'object') ? 'obj' : property.to_s.to_method_name
while new_properties.has_key?(new_property) || properties.has_key?(new_property)
new_property = "#{prefix}_#{c += 1}"
end
property_schema['edi'] = { 'segment' => property }
new_names[property] = new_property
end
new_properties[new_property] = property_schema
end
new_properties.each { |property, schema| properties[property] = schema }
%w(required protected).each do |modifier_key|
if (modifier = object_schema[modifier_key])
new_names.each do |old_name, new_name|
modifier << new_name if modifier.delete(old_name)
end
end
end
# Check recursively
properties.each { |_, property_schema| check_properties(property_schema) if property_schema.is_a?(Hash) }
end
json_schema
end
def merge_schema!(schema, options = {})
merge_schema(schema, options.merge(silent: false))
end
def merge_schema(schema, options = {})
do_merge_schema(schema, options)
end
def find_data_type(ref, ns = self.namespace)
Setup::Optimizer.find_data_type(ref, ns)
end
def find_ref_schema(ref, root_schema = schema)
if ref.is_a?(String) && ref.start_with?('#')
get_embedded_schema(ref, root_schema)[1] rescue nil
else
(data_type = find_data_type(ref)) &&
data_type.schema
end
end
def get_embedded_schema(ref, root_schema, root_name='')
raise Exception.new("invalid format for embedded reference #{ref}") unless ref =~ /\A#(\/[a-z]+(_|([0-9]|[a-z])+)*)*\Z/
raise Exception.new("embedding itself (referencing '#')") if ref.eql?('#')
tokens = ref.split('/')
tokens.shift
type = root_name
while tokens.present?
token = tokens.shift
raise Exception.new("use invalid embedded reference path '#{ref}'") unless (root_schema.nil? || root_schema = root_schema[token]) && (%w{properties definitions}.include?(token) && !tokens.empty?)
token = tokens.shift
raise Exception.new("use invalid embedded reference path '#{ref}'") unless (root_schema.nil? || root_schema = root_schema[token])
type = root_name.empty? ? token.camelize : "#{type}::#{token.camelize}"
end
raise Exception.new("use invalid embedded reference path '#{ref}'") unless root_schema.is_a?(Hash)
[type, root_schema]
end
def check_embedded_ref(ref, root_schema, root_name='')
type, _ = get_embedded_schema(ref, root_schema, root_name)
type
end
private
def do_merge_schema(schema, options = {})
if schema.is_a?(Array)
return schema.collect { |sch| do_merge_schema(sch, options) }
end
return schema unless schema.is_a?(Hash)
schema = schema.deep_dup
options ||= {}
options[:root_schema] ||= schema
options[:silent] = true if options[:silent].nil?
references = Set.new
merging = true
merged = false
while merging
merging = false
if (options[:expand_extends].nil? && options[:only_overriders].nil?) || options[:expand_extends]
while (base_model = schema.delete('extends'))
merged = merging = true
base_model = find_ref_schema(base_model) if base_model.is_a?(String)
base_model = do_merge_schema(base_model)
if schema['type'] == 'object' && base_model['type'] != 'object'
schema['properties'] ||= {}
value_schema = schema['properties']['value'] || {}
value_schema = base_model.deep_merge(value_schema)
schema['properties']['value'] = value_schema.merge('title' => 'Value', 'xml' => { 'content' => true })
schema['xml'] ||= {}
schema['xml']['content_property'] = 'value'
else
unless (xml_opts = schema['xml']).nil? || xml_opts['content_property']
schema['xml'].delete('content_property') if (xml_opts = base_model['xml']) && xml_opts['content_property']
end
schema = base_model.deep_merge(schema) { |_, ref_value, sch_value| Cenit::Utility.array_hash_merge(ref_value, sch_value) }
end
end
elsif options[:only_overriders]
while (base_model = schema.delete('extends') || options.delete(:extends))
merged = merging = true
base_model = find_ref_schema(base_model) if base_model.is_a?(String)
base_model = do_merge_schema(base_model)
schema['extends'] = base_model['extends'] if base_model['extends']
if (base_properties = base_model['properties'])
properties = schema['properties'] || {}
base_properties.reject! { |property_name, _| properties[property_name].nil? }
schema = { 'properties' => base_properties }.deep_merge(schema) do |_, ref_value, sch_value|
Cenit::Utility.array_hash_merge(ref_value, sch_value)
end unless base_properties.blank?
end
end
end
while (refs = schema['$ref'])
merged = merging = true
refs = [refs] unless refs.is_a?(Array)
refs.each do |ref|
if references.include?(ref)
if options[:silent]
schema.delete('$ref')
else
raise Exception.new("contains a circular reference #{ref}")
end
else
references << ref
end
end
sch = {}
schema.each do |key, value|
if key == '$ref' && (!options[:keep_ref] || sch[key])
value = [value] unless value.is_a?(Array)
value.each do |ref|
if (ref_sch = find_ref_schema(ref))
sch = ref_sch.merge(sch) { |_, ref_value, sch_value| Cenit::Utility.array_hash_merge(sch_value, ref_value) }
else
raise Exception.new("contains an unresolved reference #{value}") unless options[:silent]
end
end
else
case existing_value = sch[key]
when Hash
if value.is_a?(Hash)
value = existing_value.deep_merge(value) { |_, sch_value, ref_value| Cenit::Utility.array_hash_merge(sch_value, ref_value) }
end
when Array
value = value + existing_value if value.is_a?(Array)
end
sch[key] = value
end
end
schema = sch
end
end
schema.each do |key, val|
if val.is_a?(Hash)
schema[key] = do_merge_schema(val, options)
elsif val.is_a?(Array)
schema[key] = val.collect { |sub_val| sub_val.is_a?(Hash) ? do_merge_schema(sub_val, options) : sub_val }
end
end if options[:recursive] || (options[:until_merge] && !merged)
schema
end
end
end
|
# This model is the master routine for uploading products
# Requires Paperclip and CSV to upload the CSV file and read it nicely.
# Original Author:: Josh McArthur
# License:: MIT
module Spree
class ProductError < StandardError; end;
class ImportError < StandardError; end;
class SkuError < StandardError; end;
class ProductImport < ActiveRecord::Base
attr_accessible :data_file, :data_file_file_name, :data_file_content_type, :data_file_file_size, :data_file_updated_at, :product_ids, :state, :failed_at, :completed_at
has_attached_file :data_file, :path => ":rails_root/lib/etc/product_data/data-files/:basename.:extension"
validates_attachment_presence :data_file
after_destroy :destroy_products
serialize :product_ids, Array
cattr_accessor :settings
def products
Spree::Product.where :id => product_ids
end
require 'csv'
require 'pp'
require 'open-uri'
def destroy_products
products.destroy_all
end
state_machine :initial => :created do
event :start do
transition :to => :started, :from => :created
end
event :complete do
transition :to => :completed, :from => :started
end
event :failure do
transition :to => :failed, :from => :started
end
before_transition :to => [:failed] do |import|
import.product_ids = []
import.failed_at = Time.now
import.completed_at = nil
end
before_transition :to => [:completed] do |import|
import.failed_at = nil
import.completed_at = Time.now
end
end
def state_datetime
if failed?
failed_at
elsif completed?
completed_at
else
Time.now
end
end
## Data Importing:
# List Price maps to Master Price, Current MAP to Cost Price, Net 30 Cost unused
# Width, height, Depth all map directly to object
# Image main is created independtly, then each other image also created and associated with the product
# Meta keywords and description are created on the product model
def import_data!(_transaction=true)
start
if _transaction
transaction do
_import_data
end
else
_import_data
end
rescue Exception => exp
log("An error occurred during import, please check file and try again. (#{exp.message})\n#{exp.backtrace.join('\n')}", :error)
failure
raise ImportError, exp.message
end
def _import_data
begin
#Get products *before* import -
@products_before_import = Spree::Product.all
@names_of_products_before_import = @products_before_import.map(&:name)
rows = CSV.read(self.data_file.path)
if Spree::ProductImport.settings[:first_row_is_headings]
col = get_column_mappings(rows[0])
else
col = Spree::ProductImport.settings[:column_mappings]
end
log("Importing products for #{self.data_file_file_name} began at #{Time.now}")
rows[Spree::ProductImport.settings[:rows_to_skip]..-1].each do |row|
product_information = {}
#Automatically map 'mapped' fields to a collection of product information.
#NOTE: This code will deal better with the auto-mapping function - i.e. if there
#are named columns in the spreadsheet that correspond to product
# and variant field names.
col.each do |key, value|
#Trim whitespace off the beginning and end of row fields
row[value].try :strip!
product_information[key] = row[value]
end
#Manually set available_on if it is not already set
product_information[:available_on] = Date.today - 1.day if product_information[:available_on].nil?
log("#{pp product_information}")
variant_comparator_field = Spree::ProductImport.settings[:variant_comparator_field].try :to_sym
variant_comparator_column = col[variant_comparator_field]
if Spree::ProductImport.settings[:create_variants] and variant_comparator_column and
p = Spree::Product.where(variant_comparator_field => row[variant_comparator_column]).first
log("found product with this field #{variant_comparator_field}=#{row[variant_comparator_column]}")
p.update_attribute(:deleted_at, nil) if p.deleted_at #Un-delete product if it is there
p.variants.each { |variant| variant.update_attribute(:deleted_at, nil) }
create_variant_for(p, :with => product_information)
else
next unless create_product_using(product_information)
end
end
if Spree::ProductImport.settings[:destroy_original_products]
@products_before_import.each { |p| p.destroy }
end
log("Importing products for #{self.data_file_file_name} completed at #{DateTime.now}")
end
#All done!
complete
return [:notice, "Product data was successfully imported."]
end
private
# create_variant_for
# This method assumes that some form of checking has already been done to
# make sure that we do actually want to create a variant.
# It performs a similar task to a product, but it also must pick up on
# size/color options
def create_variant_for(product, options = {:with => {}})
return if options[:with].nil?
# Just update variant if exists
variant = Spree::Variant.find_by_sku(options[:with][:sku])
raise SkuError, "SKU #{variant.sku} should belongs to #{product.inspect} but was #{variant.product.inspect}" if variant && variant.product != product
if !variant
variant = product.variants.new
variant.id = options[:with][:id]
else
options[:with].delete(:id)
end
field = Spree::ProductImport.settings[:variant_comparator_field]
log "VARIANT:: #{variant.inspect} /// #{options.inspect } /// #{options[:with][field]} /// #{field}"
#Remap the options - oddly enough, Spree's product model has master_price and cost_price, while
#variant has price and cost_price.
options[:with][:price] = options[:with].delete(:master_price)
#First, set the primitive fields on the object (prices, etc.)
options[:with].each do |field, value|
variant.send("#{field}=", value) if variant.respond_to?("#{field}=")
applicable_option_type = Spree::OptionType.find(:first, :conditions => [
"lower(presentation) = ? OR lower(name) = ?",
field.to_s, field.to_s]
)
if applicable_option_type.is_a?(Spree::OptionType)
product.option_types << applicable_option_type unless product.option_types.include?(applicable_option_type)
opt_value = applicable_option_type.option_values.where(["presentation = ? OR name = ?", value, value]).first
opt_value = applicable_option_type.option_values.create(:presentation => value, :name => value) unless opt_value
variant.option_values << opt_value unless variant.option_values.include?(opt_value)
end
end
log "VARIANT PRICE #{variant.inspect} /// #{variant.price}"
if variant.valid?
variant.save
#Associate our new variant with any new taxonomies
log("Associating taxonomies")
Spree::ProductImport.settings[:taxonomy_fields].each do |field|
log("taxonomy_field: #{field} - #{options[:with][field.to_sym]}")
associate_product_with_taxon(variant.product, field.to_s, options[:with][field.to_sym])
end
#Finally, attach any images that have been specified
Spree::ProductImport.settings[:image_fields].each do |field|
find_and_attach_image_to(variant, options[:with][field.to_sym])
end
#Log a success message
log("Variant of SKU #{variant.sku} successfully imported.\n")
else
log("A variant could not be imported - here is the information we have:\n" +
"#{pp options[:with]}, #{variant.errors.full_messages.join(', ')}")
return false
end
end
# create_product_using
# This method performs the meaty bit of the import - taking the parameters for the
# product we have gathered, and creating the product and related objects.
# It also logs throughout the method to try and give some indication of process.
def create_product_using(params_hash)
product = Spree::Product.new
#The product is inclined to complain if we just dump all params
# into the product (including images and taxonomies).
# What this does is only assigns values to products if the product accepts that field.
params_hash[:price] ||= params_hash[:master_price]
params_hash.each do |field, value|
if product.respond_to?("#{field}=")
product.send("#{field}=", value)
elsif property = Spree::Property.where(["name = ?", field]).first
product.product_properties.build :value => value, :property => property
end
end
after_product_built(product, params_hash)
#We can't continue without a valid product here
unless product.valid?
log(msg = "A product could not be imported - here is the information we have:\n" +
"#{pp params_hash}, #{product.errors.full_messages.join(', ')}")
raise ProductError, msg
end
#Just log which product we're processing
log(product.name)
#This should be caught by code in the main import code that checks whether to create
#variants or not. Since that check can be turned off, however, we should double check.
if @names_of_products_before_import.include? product.name
log("#{product.name} is already in the system.\n")
else
#Save the object before creating asssociated objects
product.save and product_ids << product.id
#Associate our new product with any taxonomies that we need to worry about
logger.info "\nAssociating taxonomies\n"
Spree::ProductImport.settings[:taxonomy_fields].each do |field|
logger.info "taxonomy_field: #{field} - #{options[:with][field.to_sym]}"
associate_product_with_taxon(product, field.to_s, params_hash[field.to_sym])
end
#Finally, attach any images that have been specified
Spree::ProductImport.settings[:image_fields].each do |field|
find_and_attach_image_to(product, params_hash[field.to_sym])
end
if Spree::ProductImport.settings[:multi_domain_importing] && product.respond_to?(:stores)
begin
store = Store.find(
:first,
:conditions => ["id = ? OR code = ?",
params_hash[Spree::ProductImport.settings[:store_field]],
params_hash[Spree::ProductImport.settings[:store_field]]
]
)
product.stores << store
rescue
log("#{product.name} could not be associated with a store. Ensure that Spree's multi_domain extension is installed and that fields are mapped to the CSV correctly.")
end
end
#Log a success message
log("#{product.name} successfully imported.\n")
end
return true
end
# get_column_mappings
# This method attempts to automatically map headings in the CSV files
# with fields in the product and variant models.
# If the headings of columns are going to be called something other than this,
# or if the files will not have headings, then the manual initializer
# mapping of columns must be used.
# Row is an array of headings for columns - SKU, Master Price, etc.)
# @return a hash of symbol heading => column index pairs
def get_column_mappings(row)
mappings = {}
row.each_with_index do |heading, index|
mappings[heading.downcase.gsub(/\A\s*/, '').chomp.gsub(/\s/, '_').to_sym] = index
end
mappings
end
### MISC HELPERS ####
#Log a message to a file - logs in standard Rails format to logfile set up in the import_products initializer
#and console.
#Message is string, severity symbol - either :info, :warn or :error
def log(message, severity = :info)
@rake_log ||= ActiveSupport::BufferedLogger.new(Spree::ProductImport.settings[:log_to])
message = "[#{Time.now.to_s(:db)}] [#{severity.to_s.capitalize}] #{message}\n"
@rake_log.send severity, message
puts message
end
### IMAGE HELPERS ###
# find_and_attach_image_to
# This method attaches images to products. The images may come
# from a local source (i.e. on disk), or they may be online (HTTP/HTTPS).
def find_and_attach_image_to(product_or_variant, filename)
return if filename.blank?
#The image can be fetched from an HTTP or local source - either method returns a Tempfile
file = filename =~ /\Ahttp[s]*:\/\// ? fetch_remote_image(filename) : fetch_local_image(filename)
#An image has an attachment (the image file) and some object which 'views' it
product_image = Spree::Image.new({:attachment => file,
:viewable_id => product_or_variant.id,
:viewable_type => product_or_variant.class.name,
:position => product_or_variant.images.length
})
product_or_variant.images << product_image if product_image.save
end
# This method is used when we have a set location on disk for
# images, and the file is accessible to the script.
# It is basically just a wrapper around basic File IO methods.
def fetch_local_image(filename)
filename = Spree::ProductImport.settings[:product_image_path] + filename
unless File.exists?(filename) && File.readable?(filename)
log("Image #{filename} was not found on the server, so this image was not imported.", :warn)
return nil
else
return File.open(filename, 'rb')
end
end
#This method can be used when the filename matches the format of a URL.
# It uses open-uri to fetch the file, returning a Tempfile object if it
# is successful.
# If it fails, it in the first instance logs the HTTP error (404, 500 etc)
# If it fails altogether, it logs it and exits the method.
def fetch_remote_image(filename)
begin
open(filename)
rescue OpenURI::HTTPError => error
log("Image #{filename} retrival returned #{error.message}, so this image was not imported")
rescue
log("Image #{filename} could not be downloaded, so was not imported.")
end
end
### TAXON HELPERS ###
# associate_product_with_taxon
# This method accepts three formats of taxon hierarchy strings which will
# associate the given products with taxons:
# 1. A string on it's own will will just find or create the taxon and
# add the product to it. e.g. taxonomy = "Category", taxon_hierarchy = "Tools" will
# add the product to the 'Tools' category.
# 2. A item > item > item structured string will read this like a tree - allowing
# a particular taxon to be picked out
# 3. An item > item & item > item will work as above, but will associate multiple
# taxons with that product. This form should also work with format 1.
def associate_product_with_taxon(product, taxonomy, taxon_hierarchy)
return if product.nil? || taxonomy.nil? || taxon_hierarchy.nil?
#Using find_or_create_by_name is more elegant, but our magical params code automatically downcases
# the taxonomy name, so unless we are using MySQL, this isn't going to work.
taxonomy_name = taxonomy
taxonomy = Spree::Taxonomy.find(:first, :conditions => ["lower(name) = ?", taxonomy])
taxonomy = Spree::Taxonomy.create(:name => taxonomy_name.capitalize) if taxonomy.nil? && Spree::ProductImport.settings[:create_missing_taxonomies]
taxon_hierarchy.split(/\s*\&\s*/).each do |hierarchy|
hierarchy = hierarchy.split(/\s*>\s*/)
last_taxon = taxonomy.root
hierarchy.each do |taxon|
last_taxon = last_taxon.children.find_or_create_by_name_and_taxonomy_id(taxon, taxonomy.id)
end
#Spree only needs to know the most detailed taxonomy item
product.taxons << last_taxon unless product.taxons.include?(last_taxon)
end
end
### END TAXON HELPERS ###
# May be implemented via decorator if useful:
#
# ProductImport.class_eval do
#
# private
#
# def after_product_built(product, params_hash)
# # so something with the product
# end
# end
def after_product_built(product, params_hash)
end
end
end
log changes
# This model is the master routine for uploading products
# Requires Paperclip and CSV to upload the CSV file and read it nicely.
# Original Author:: Josh McArthur
# License:: MIT
module Spree
class ProductError < StandardError; end;
class ImportError < StandardError; end;
class SkuError < StandardError; end;
class ProductImport < ActiveRecord::Base
attr_accessible :data_file, :data_file_file_name, :data_file_content_type, :data_file_file_size, :data_file_updated_at, :product_ids, :state, :failed_at, :completed_at
has_attached_file :data_file, :path => ":rails_root/lib/etc/product_data/data-files/:basename.:extension"
validates_attachment_presence :data_file
after_destroy :destroy_products
serialize :product_ids, Array
cattr_accessor :settings
def products
Spree::Product.where :id => product_ids
end
require 'csv'
require 'pp'
require 'open-uri'
def destroy_products
products.destroy_all
end
state_machine :initial => :created do
event :start do
transition :to => :started, :from => :created
end
event :complete do
transition :to => :completed, :from => :started
end
event :failure do
transition :to => :failed, :from => :started
end
before_transition :to => [:failed] do |import|
import.product_ids = []
import.failed_at = Time.now
import.completed_at = nil
end
before_transition :to => [:completed] do |import|
import.failed_at = nil
import.completed_at = Time.now
end
end
def state_datetime
if failed?
failed_at
elsif completed?
completed_at
else
Time.now
end
end
## Data Importing:
# List Price maps to Master Price, Current MAP to Cost Price, Net 30 Cost unused
# Width, height, Depth all map directly to object
# Image main is created independtly, then each other image also created and associated with the product
# Meta keywords and description are created on the product model
def import_data!(_transaction=true)
start
if _transaction
transaction do
_import_data
end
else
_import_data
end
rescue Exception => exp
log("An error occurred during import, please check file and try again. (#{exp.message})\n#{exp.backtrace.join('\n')}", :error)
failure
raise ImportError, exp.message
end
def _import_data
begin
#Get products *before* import -
@products_before_import = Spree::Product.all
@names_of_products_before_import = @products_before_import.map(&:name)
rows = CSV.read(self.data_file.path)
if Spree::ProductImport.settings[:first_row_is_headings]
col = get_column_mappings(rows[0])
else
col = Spree::ProductImport.settings[:column_mappings]
end
log("Importing products for #{self.data_file_file_name} began at #{Time.now}")
rows[Spree::ProductImport.settings[:rows_to_skip]..-1].each do |row|
product_information = {}
#Automatically map 'mapped' fields to a collection of product information.
#NOTE: This code will deal better with the auto-mapping function - i.e. if there
#are named columns in the spreadsheet that correspond to product
# and variant field names.
col.each do |key, value|
#Trim whitespace off the beginning and end of row fields
row[value].try :strip!
product_information[key] = row[value]
end
#Manually set available_on if it is not already set
product_information[:available_on] = Date.today - 1.day if product_information[:available_on].nil?
log("#{pp product_information}")
variant_comparator_field = Spree::ProductImport.settings[:variant_comparator_field].try :to_sym
variant_comparator_column = col[variant_comparator_field]
if Spree::ProductImport.settings[:create_variants] and variant_comparator_column and
p = Spree::Product.where(variant_comparator_field => row[variant_comparator_column]).first
log("found product with this field #{variant_comparator_field}=#{row[variant_comparator_column]}")
p.update_attribute(:deleted_at, nil) if p.deleted_at #Un-delete product if it is there
p.variants.each { |variant| variant.update_attribute(:deleted_at, nil) }
create_variant_for(p, :with => product_information)
else
next unless create_product_using(product_information)
end
end
if Spree::ProductImport.settings[:destroy_original_products]
@products_before_import.each { |p| p.destroy }
end
log("Importing products for #{self.data_file_file_name} completed at #{DateTime.now}")
end
#All done!
complete
return [:notice, "Product data was successfully imported."]
end
private
# create_variant_for
# This method assumes that some form of checking has already been done to
# make sure that we do actually want to create a variant.
# It performs a similar task to a product, but it also must pick up on
# size/color options
def create_variant_for(product, options = {:with => {}})
return if options[:with].nil?
# Just update variant if exists
variant = Spree::Variant.find_by_sku(options[:with][:sku])
raise SkuError, "SKU #{variant.sku} should belongs to #{product.inspect} but was #{variant.product.inspect}" if variant && variant.product != product
if !variant
variant = product.variants.new
variant.id = options[:with][:id]
else
options[:with].delete(:id)
end
field = Spree::ProductImport.settings[:variant_comparator_field]
log "VARIANT:: #{variant.inspect} /// #{options.inspect } /// #{options[:with][field]} /// #{field}"
#Remap the options - oddly enough, Spree's product model has master_price and cost_price, while
#variant has price and cost_price.
options[:with][:price] = options[:with].delete(:master_price)
#First, set the primitive fields on the object (prices, etc.)
options[:with].each do |field, value|
variant.send("#{field}=", value) if variant.respond_to?("#{field}=")
applicable_option_type = Spree::OptionType.find(:first, :conditions => [
"lower(presentation) = ? OR lower(name) = ?",
field.to_s, field.to_s]
)
if applicable_option_type.is_a?(Spree::OptionType)
product.option_types << applicable_option_type unless product.option_types.include?(applicable_option_type)
opt_value = applicable_option_type.option_values.where(["presentation = ? OR name = ?", value, value]).first
opt_value = applicable_option_type.option_values.create(:presentation => value, :name => value) unless opt_value
variant.option_values << opt_value unless variant.option_values.include?(opt_value)
end
end
log "VARIANT PRICE #{variant.inspect} /// #{variant.price}"
if variant.valid?
variant.save
#Associate our new variant with any new taxonomies
log("Associating taxonomies")
Spree::ProductImport.settings[:taxonomy_fields].each do |field|
log("taxonomy_field: #{field} - #{options[:with][field.to_sym]}")
associate_product_with_taxon(variant.product, field.to_s, options[:with][field.to_sym])
end
#Finally, attach any images that have been specified
Spree::ProductImport.settings[:image_fields].each do |field|
find_and_attach_image_to(variant, options[:with][field.to_sym])
end
#Log a success message
log("Variant of SKU #{variant.sku} successfully imported.\n")
else
log("A variant could not be imported - here is the information we have:\n" +
"#{pp options[:with]}, #{variant.errors.full_messages.join(', ')}")
return false
end
end
# create_product_using
# This method performs the meaty bit of the import - taking the parameters for the
# product we have gathered, and creating the product and related objects.
# It also logs throughout the method to try and give some indication of process.
def create_product_using(params_hash)
product = Spree::Product.new
#The product is inclined to complain if we just dump all params
# into the product (including images and taxonomies).
# What this does is only assigns values to products if the product accepts that field.
params_hash[:price] ||= params_hash[:master_price]
params_hash.each do |field, value|
if product.respond_to?("#{field}=")
product.send("#{field}=", value)
elsif property = Spree::Property.where(["name = ?", field]).first
product.product_properties.build :value => value, :property => property
end
end
after_product_built(product, params_hash)
#We can't continue without a valid product here
unless product.valid?
log(msg = "A product could not be imported - here is the information we have:\n" +
"#{pp params_hash}, #{product.errors.full_messages.join(', ')}")
raise ProductError, msg
end
#Just log which product we're processing
log(product.name)
#This should be caught by code in the main import code that checks whether to create
#variants or not. Since that check can be turned off, however, we should double check.
if @names_of_products_before_import.include? product.name
log("#{product.name} is already in the system.\n")
else
#Save the object before creating asssociated objects
product.save and product_ids << product.id
#Associate our new product with any taxonomies that we need to worry about
logger.info "\nAssociating taxonomies\n"
Spree::ProductImport.settings[:taxonomy_fields].each do |field|
logger.info "taxonomy_field: #{field} - #{params_hash[field.to_sym]}"
associate_product_with_taxon(product, field.to_s, params_hash[field.to_sym])
end
#Finally, attach any images that have been specified
Spree::ProductImport.settings[:image_fields].each do |field|
find_and_attach_image_to(product, params_hash[field.to_sym])
end
if Spree::ProductImport.settings[:multi_domain_importing] && product.respond_to?(:stores)
begin
store = Store.find(
:first,
:conditions => ["id = ? OR code = ?",
params_hash[Spree::ProductImport.settings[:store_field]],
params_hash[Spree::ProductImport.settings[:store_field]]
]
)
product.stores << store
rescue
log("#{product.name} could not be associated with a store. Ensure that Spree's multi_domain extension is installed and that fields are mapped to the CSV correctly.")
end
end
#Log a success message
log("#{product.name} successfully imported.\n")
end
return true
end
# get_column_mappings
# This method attempts to automatically map headings in the CSV files
# with fields in the product and variant models.
# If the headings of columns are going to be called something other than this,
# or if the files will not have headings, then the manual initializer
# mapping of columns must be used.
# Row is an array of headings for columns - SKU, Master Price, etc.)
# @return a hash of symbol heading => column index pairs
def get_column_mappings(row)
mappings = {}
row.each_with_index do |heading, index|
mappings[heading.downcase.gsub(/\A\s*/, '').chomp.gsub(/\s/, '_').to_sym] = index
end
mappings
end
### MISC HELPERS ####
#Log a message to a file - logs in standard Rails format to logfile set up in the import_products initializer
#and console.
#Message is string, severity symbol - either :info, :warn or :error
def log(message, severity = :info)
@rake_log ||= ActiveSupport::BufferedLogger.new(Spree::ProductImport.settings[:log_to])
message = "[#{Time.now.to_s(:db)}] [#{severity.to_s.capitalize}] #{message}\n"
@rake_log.send severity, message
puts message
end
### IMAGE HELPERS ###
# find_and_attach_image_to
# This method attaches images to products. The images may come
# from a local source (i.e. on disk), or they may be online (HTTP/HTTPS).
def find_and_attach_image_to(product_or_variant, filename)
return if filename.blank?
#The image can be fetched from an HTTP or local source - either method returns a Tempfile
file = filename =~ /\Ahttp[s]*:\/\// ? fetch_remote_image(filename) : fetch_local_image(filename)
#An image has an attachment (the image file) and some object which 'views' it
product_image = Spree::Image.new({:attachment => file,
:viewable_id => product_or_variant.id,
:viewable_type => product_or_variant.class.name,
:position => product_or_variant.images.length
})
product_or_variant.images << product_image if product_image.save
end
# This method is used when we have a set location on disk for
# images, and the file is accessible to the script.
# It is basically just a wrapper around basic File IO methods.
def fetch_local_image(filename)
filename = Spree::ProductImport.settings[:product_image_path] + filename
unless File.exists?(filename) && File.readable?(filename)
log("Image #{filename} was not found on the server, so this image was not imported.", :warn)
return nil
else
return File.open(filename, 'rb')
end
end
#This method can be used when the filename matches the format of a URL.
# It uses open-uri to fetch the file, returning a Tempfile object if it
# is successful.
# If it fails, it in the first instance logs the HTTP error (404, 500 etc)
# If it fails altogether, it logs it and exits the method.
def fetch_remote_image(filename)
begin
open(filename)
rescue OpenURI::HTTPError => error
log("Image #{filename} retrival returned #{error.message}, so this image was not imported")
rescue
log("Image #{filename} could not be downloaded, so was not imported.")
end
end
### TAXON HELPERS ###
# associate_product_with_taxon
# This method accepts three formats of taxon hierarchy strings which will
# associate the given products with taxons:
# 1. A string on it's own will will just find or create the taxon and
# add the product to it. e.g. taxonomy = "Category", taxon_hierarchy = "Tools" will
# add the product to the 'Tools' category.
# 2. A item > item > item structured string will read this like a tree - allowing
# a particular taxon to be picked out
# 3. An item > item & item > item will work as above, but will associate multiple
# taxons with that product. This form should also work with format 1.
def associate_product_with_taxon(product, taxonomy, taxon_hierarchy)
return if product.nil? || taxonomy.nil? || taxon_hierarchy.nil?
#Using find_or_create_by_name is more elegant, but our magical params code automatically downcases
# the taxonomy name, so unless we are using MySQL, this isn't going to work.
taxonomy_name = taxonomy
taxonomy = Spree::Taxonomy.find(:first, :conditions => ["lower(name) = ?", taxonomy])
taxonomy = Spree::Taxonomy.create(:name => taxonomy_name.capitalize) if taxonomy.nil? && Spree::ProductImport.settings[:create_missing_taxonomies]
taxon_hierarchy.split(/\s*\&\s*/).each do |hierarchy|
hierarchy = hierarchy.split(/\s*>\s*/)
last_taxon = taxonomy.root
hierarchy.each do |taxon|
last_taxon = last_taxon.children.find_or_create_by_name_and_taxonomy_id(taxon, taxonomy.id)
end
#Spree only needs to know the most detailed taxonomy item
product.taxons << last_taxon unless product.taxons.include?(last_taxon)
end
end
### END TAXON HELPERS ###
# May be implemented via decorator if useful:
#
# ProductImport.class_eval do
#
# private
#
# def after_product_built(product, params_hash)
# # so something with the product
# end
# end
def after_product_built(product, params_hash)
end
end
end
|
# This model is the master routine for uploading products
# Requires Paperclip and CSV to upload the CSV file and read it nicely.
# Original Author:: Josh McArthur
# Author:: Chetan Mittal
# License:: MIT
module Spree
class ProductImport < ActiveRecord::Base
has_attached_file :data_file, :path => ":rails_root/lib/etc/product_data/data-files/:basename.:extension"
validates_attachment_presence :data_file
require 'csv'
require 'pp'
require 'open-uri'
## Data Importing:
# List Price maps to Master Price, Current MAP to Cost Price, Net 30 Cost unused
# Width, height, Depth all map directly to object
# Image main is created independtly, then each other image also created and associated with the product
# Meta keywords and description are created on the product model
def import_data!
# begin
#Get products *before* import -
@products_before_import = Product.all
@names_of_products_before_import = @products_before_import.map(&:name)
log("#{@names_of_products_before_import}")
rows = CSV.read(self.data_file.path)
if IMPORT_PRODUCT_SETTINGS[:first_row_is_headings]
col = get_column_mappings(rows[0])
else
col = IMPORT_PRODUCT_SETTINGS[:column_mappings]
end
log("Importing products for #{self.data_file_file_name} began at #{Time.now}")
rows[IMPORT_PRODUCT_SETTINGS[:rows_to_skip]..-1].each do |row|
product_information = {}
#Automatically map 'mapped' fields to a collection of product information.
#NOTE: This code will deal better with the auto-mapping function - i.e. if there
#are named columns in the spreadsheet that correspond to product
# and variant field names.
col.each do |key, value|
product_information[key] = row[value]
end
#Manually set available_on if it is not already set
product_information[:available_on] = DateTime.now - 1.day if product_information[:available_on].nil?
#Trim whitespace off the beginning and end of row fields
row.each do |r|
next unless r.is_a?(String)
r.gsub!(/\A\s*/, '').chomp!
end
if IMPORT_PRODUCT_SETTINGS[:create_variants]
field = IMPORT_PRODUCT_SETTINGS[:variant_comparator_field].to_sym
if p = Product.where(field => row[col[field]]).first
p.update_attribute(:deleted_at, nil) if p.deleted_at #Un-delete product if it is there
p.variants.each { |variant| variant.update_attribute(:deleted_at, nil) }
create_variant_for(p, :with => product_information)
else
next unless create_product_using(product_information)
end
else
next unless create_product_using(product_information)
end
end
if IMPORT_PRODUCT_SETTINGS[:destroy_original_products]
@products_before_import.each { |p| p.destroy }
end
log("Importing products for #{self.data_file_file_name} completed at #{DateTime.now}")
# rescue Exception => exp
# log("An error occurred during import, please check file and try again. (#{exp.message})\n#{exp.backtrace.join('\n')}", :error)
# raise Exception(exp.message)
# end
#All done!
return [:notice, "Product data was successfully imported."]
end
private
# create_variant_for
# This method assumes that some form of checking has already been done to
# make sure that we do actually want to create a variant.
# It performs a similar task to a product, but it also must pick up on
# size/color options
def create_variant_for(product, options = {:with => {}})
return if options[:with].nil?
variant = product.variants.new
#Remap the options - oddly enough, Spree's product model has master_price and cost_price, while
#variant has price and cost_price.
options[:with][:price] = options[:with].delete(:master_price)
#First, set the primitive fields on the object (prices, etc.)
options[:with].each do |field, value|
variant.send("#{field}=", value) if variant.respond_to?("#{field}=")
applicable_option_type = OptionType.find(:first, :conditions => [
"lower(presentation) = ? OR lower(name) = ?",
field.to_s, field.to_s]
)
if applicable_option_type.is_a?(OptionType)
product.option_types << applicable_option_type unless product.option_types.include?(applicable_option_type)
variant.option_values << applicable_option_type.option_values.find(
:all,
:conditions => ["presentation = ? OR name = ?", value, value]
)
end
end
if variant.valid?
variant.save
#Associate our new variant with any new taxonomies
IMPORT_PRODUCT_SETTINGS[:taxonomy_fields].each do |field|
associate_product_with_taxon(variant.product, field.to_s, options[:with][field.to_sym])
end
#Finally, attach any images that have been specified
IMPORT_PRODUCT_SETTINGS[:image_fields].each do |field|
find_and_attach_image_to(variant, options[:with][field.to_sym])
end
#Log a success message
log("Variant of SKU #{variant.sku} successfully imported.\n")
else
log("A variant could not be imported - here is the information we have:\n" +
"#{pp options[:with]}, :error")
return false
end
end
# create_product_using
# This method performs the meaty bit of the import - taking the parameters for the
# product we have gathered, and creating the product and related objects.
# It also logs throughout the method to try and give some indication of process.
def create_product_using(params_hash)
product = Product.new
#The product is inclined to complain if we just dump all params
# into the product (including images and taxonomies).
# What this does is only assigns values to products if the product accepts that field.
params_hash.each do |field, value|
product.send("#{field}=", value) if product.respond_to?("#{field}=")
end
after_product_built(product, params_hash)
#We can't continue without a valid product here
unless product.valid?
log("A product could not be imported - here is the information we have:\n" +
"#{pp params_hash}, :error")
return false
end
#Just log which product we're processing
log(product.name)
#This should be caught by code in the main import code that checks whether to create
#variants or not. Since that check can be turned off, however, we should double check.
if @names_of_products_before_import.include? product.name
log("#{product.name} is already in the system.\n")
else
#Save the object before creating asssociated objects
product.save
#Associate our new product with any taxonomies that we need to worry about
IMPORT_PRODUCT_SETTINGS[:taxonomy_fields].each do |field|
associate_product_with_taxon(product, field.to_s, params_hash[field.to_sym])
end
#Finally, attach any images that have been specified
IMPORT_PRODUCT_SETTINGS[:image_fields].each do |field|
find_and_attach_image_to(product, params_hash[field.to_sym])
end
if IMPORT_PRODUCT_SETTINGS[:multi_domain_importing] && product.respond_to?(:stores)
begin
store = Store.find(
:first,
:conditions => ["id = ? OR code = ?",
params_hash[IMPORT_PRODUCT_SETTINGS[:store_field]],
params_hash[IMPORT_PRODUCT_SETTINGS[:store_field]]
]
)
product.stores << store
rescue
log("#{product.name} could not be associated with a store. Ensure that Spree's multi_domain extension is installed and that fields are mapped to the CSV correctly.")
end
end
#Log a success message
log("#{product.name} successfully imported.\n")
end
return true
end
# get_column_mappings
# This method attempts to automatically map headings in the CSV files
# with fields in the product and variant models.
# If the headings of columns are going to be called something other than this,
# or if the files will not have headings, then the manual initializer
# mapping of columns must be used.
# Row is an array of headings for columns - SKU, Master Price, etc.)
# @return a hash of symbol heading => column index pairs
def get_column_mappings(row)
mappings = {}
row.each_with_index do |heading, index|
mappings[heading.downcase.gsub(/\A\s*/, '').chomp.gsub(/\s/, '_').to_sym] = index
end
mappings
end
### MISC HELPERS ####
#Log a message to a file - logs in standard Rails format to logfile set up in the import_products initializer
#and console.
#Message is string, severity symbol - either :info, :warn or :error
def log(message, severity = :info)
@rake_log ||= ActiveSupport::BufferedLogger.new(IMPORT_PRODUCT_SETTINGS[:log_to])
message = "[#{Time.now.to_s(:db)}] [#{severity.to_s.capitalize}] #{message}\n"
@rake_log.send severity, message
puts message
end
### IMAGE HELPERS ###
# find_and_attach_image_to
# This method attaches images to products. The images may come
# from a local source (i.e. on disk), or they may be online (HTTP/HTTPS).
def find_and_attach_image_to(product_or_variant, filename)
return if filename.blank?
#The image can be fetched from an HTTP or local source - either method returns a Tempfile
file = filename =~ /\Ahttp[s]*:\/\// ? fetch_remote_image(filename) : fetch_local_image(filename)
#An image has an attachment (the image file) and some object which 'views' it
product_image = Image.new({:attachment => file,
:viewable => product_or_variant,
:position => product_or_variant.images.length
})
product_or_variant.images << product_image if product_image.save
end
# This method is used when we have a set location on disk for
# images, and the file is accessible to the script.
# It is basically just a wrapper around basic File IO methods.
def fetch_local_image(filename)
filename = IMPORT_PRODUCT_SETTINGS[:product_image_path] + filename
unless File.exists?(filename) && File.readable?(filename)
log("Image #{filename} was not found on the server, so this image was not imported.", :warn)
return nil
else
return File.open(filename, 'rb')
end
end
#This method can be used when the filename matches the format of a URL.
# It uses open-uri to fetch the file, returning a Tempfile object if it
# is successful.
# If it fails, it in the first instance logs the HTTP error (404, 500 etc)
# If it fails altogether, it logs it and exits the method.
def fetch_remote_image(filename)
begin
open(filename)
rescue OpenURI::HTTPError => error
log("Image #{filename} retrival returned #{error.message}, so this image was not imported")
rescue
log("Image #{filename} could not be downloaded, so was not imported.")
end
end
### TAXON HELPERS ###
# associate_product_with_taxon
# This method accepts three formats of taxon hierarchy strings which will
# associate the given products with taxons:
# 1. A string on it's own will will just find or create the taxon and
# add the product to it. e.g. taxonomy = "Category", taxon_hierarchy = "Tools" will
# add the product to the 'Tools' category.
# 2. A item > item > item structured string will read this like a tree - allowing
# a particular taxon to be picked out
# 3. An item > item & item > item will work as above, but will associate multiple
# taxons with that product. This form should also work with format 1.
def associate_product_with_taxon(product, taxonomy, taxon_hierarchy)
return if product.nil? || taxonomy.nil? || taxon_hierarchy.nil?
#Using find_or_create_by_name is more elegant, but our magical params code automatically downcases
# the taxonomy name, so unless we are using MySQL, this isn't going to work.
taxonomy_name = taxonomy
taxonomy = Taxonomy.find(:first, :conditions => ["lower(name) = ?", taxonomy])
taxonomy = Taxonomy.create(:name => taxonomy_name.capitalize) if taxonomy.nil? && IMPORT_PRODUCT_SETTINGS[:create_missing_taxonomies]
taxon_hierarchy.split(/\s*\&\s*/).each do |hierarchy|
hierarchy = hierarchy.split(/\s*>\s*/)
last_taxon = taxonomy.root
hierarchy.each do |taxon|
last_taxon = last_taxon.children.find_or_create_by_name_and_taxonomy_id(taxon, taxonomy.id)
end
#Spree only needs to know the most detailed taxonomy item
product.taxons << last_taxon unless product.taxons.include?(last_taxon)
end
end
### END TAXON HELPERS ###
# May be implemented via decorator if useful:
#
# ProductImport.class_eval do
#
# private
#
# def after_product_built(product, params_hash)
# # so something with the product
# end
# end
def after_product_built(product, params_hash)
end
end
end
uncomment begin/rescue block
# This model is the master routine for uploading products
# Requires Paperclip and CSV to upload the CSV file and read it nicely.
# Original Author:: Josh McArthur
# Author:: Chetan Mittal
# License:: MIT
module Spree
class ProductImport < ActiveRecord::Base
has_attached_file :data_file, :path => ":rails_root/lib/etc/product_data/data-files/:basename.:extension"
validates_attachment_presence :data_file
require 'csv'
require 'pp'
require 'open-uri'
## Data Importing:
# List Price maps to Master Price, Current MAP to Cost Price, Net 30 Cost unused
# Width, height, Depth all map directly to object
# Image main is created independtly, then each other image also created and associated with the product
# Meta keywords and description are created on the product model
def import_data!
begin
#Get products *before* import -
@products_before_import = Product.all
@names_of_products_before_import = @products_before_import.map(&:name)
log("#{@names_of_products_before_import}")
rows = CSV.read(self.data_file.path)
if IMPORT_PRODUCT_SETTINGS[:first_row_is_headings]
col = get_column_mappings(rows[0])
else
col = IMPORT_PRODUCT_SETTINGS[:column_mappings]
end
log("Importing products for #{self.data_file_file_name} began at #{Time.now}")
rows[IMPORT_PRODUCT_SETTINGS[:rows_to_skip]..-1].each do |row|
product_information = {}
#Automatically map 'mapped' fields to a collection of product information.
#NOTE: This code will deal better with the auto-mapping function - i.e. if there
#are named columns in the spreadsheet that correspond to product
# and variant field names.
col.each do |key, value|
product_information[key] = row[value]
end
#Manually set available_on if it is not already set
product_information[:available_on] = DateTime.now - 1.day if product_information[:available_on].nil?
#Trim whitespace off the beginning and end of row fields
row.each do |r|
next unless r.is_a?(String)
r.gsub!(/\A\s*/, '').chomp!
end
if IMPORT_PRODUCT_SETTINGS[:create_variants]
field = IMPORT_PRODUCT_SETTINGS[:variant_comparator_field].to_sym
if p = Product.where(field => row[col[field]]).first
p.update_attribute(:deleted_at, nil) if p.deleted_at #Un-delete product if it is there
p.variants.each { |variant| variant.update_attribute(:deleted_at, nil) }
create_variant_for(p, :with => product_information)
else
next unless create_product_using(product_information)
end
else
next unless create_product_using(product_information)
end
end
if IMPORT_PRODUCT_SETTINGS[:destroy_original_products]
@products_before_import.each { |p| p.destroy }
end
log("Importing products for #{self.data_file_file_name} completed at #{DateTime.now}")
rescue Exception => exp
log("An error occurred during import, please check file and try again. (#{exp.message})\n#{exp.backtrace.join('\n')}", :error)
raise Exception(exp.message)
end
#All done!
return [:notice, "Product data was successfully imported."]
end
private
# create_variant_for
# This method assumes that some form of checking has already been done to
# make sure that we do actually want to create a variant.
# It performs a similar task to a product, but it also must pick up on
# size/color options
def create_variant_for(product, options = {:with => {}})
return if options[:with].nil?
variant = product.variants.new
#Remap the options - oddly enough, Spree's product model has master_price and cost_price, while
#variant has price and cost_price.
options[:with][:price] = options[:with].delete(:master_price)
#First, set the primitive fields on the object (prices, etc.)
options[:with].each do |field, value|
variant.send("#{field}=", value) if variant.respond_to?("#{field}=")
applicable_option_type = OptionType.find(:first, :conditions => [
"lower(presentation) = ? OR lower(name) = ?",
field.to_s, field.to_s]
)
if applicable_option_type.is_a?(OptionType)
product.option_types << applicable_option_type unless product.option_types.include?(applicable_option_type)
variant.option_values << applicable_option_type.option_values.find(
:all,
:conditions => ["presentation = ? OR name = ?", value, value]
)
end
end
if variant.valid?
variant.save
#Associate our new variant with any new taxonomies
IMPORT_PRODUCT_SETTINGS[:taxonomy_fields].each do |field|
associate_product_with_taxon(variant.product, field.to_s, options[:with][field.to_sym])
end
#Finally, attach any images that have been specified
IMPORT_PRODUCT_SETTINGS[:image_fields].each do |field|
find_and_attach_image_to(variant, options[:with][field.to_sym])
end
#Log a success message
log("Variant of SKU #{variant.sku} successfully imported.\n")
else
log("A variant could not be imported - here is the information we have:\n" +
"#{pp options[:with]}, :error")
return false
end
end
# create_product_using
# This method performs the meaty bit of the import - taking the parameters for the
# product we have gathered, and creating the product and related objects.
# It also logs throughout the method to try and give some indication of process.
def create_product_using(params_hash)
product = Product.new
#The product is inclined to complain if we just dump all params
# into the product (including images and taxonomies).
# What this does is only assigns values to products if the product accepts that field.
params_hash.each do |field, value|
product.send("#{field}=", value) if product.respond_to?("#{field}=")
end
after_product_built(product, params_hash)
#We can't continue without a valid product here
unless product.valid?
log("A product could not be imported - here is the information we have:\n" +
"#{pp params_hash}, :error")
return false
end
#Just log which product we're processing
log(product.name)
#This should be caught by code in the main import code that checks whether to create
#variants or not. Since that check can be turned off, however, we should double check.
if @names_of_products_before_import.include? product.name
log("#{product.name} is already in the system.\n")
else
#Save the object before creating asssociated objects
product.save
#Associate our new product with any taxonomies that we need to worry about
IMPORT_PRODUCT_SETTINGS[:taxonomy_fields].each do |field|
associate_product_with_taxon(product, field.to_s, params_hash[field.to_sym])
end
#Finally, attach any images that have been specified
IMPORT_PRODUCT_SETTINGS[:image_fields].each do |field|
find_and_attach_image_to(product, params_hash[field.to_sym])
end
if IMPORT_PRODUCT_SETTINGS[:multi_domain_importing] && product.respond_to?(:stores)
begin
store = Store.find(
:first,
:conditions => ["id = ? OR code = ?",
params_hash[IMPORT_PRODUCT_SETTINGS[:store_field]],
params_hash[IMPORT_PRODUCT_SETTINGS[:store_field]]
]
)
product.stores << store
rescue
log("#{product.name} could not be associated with a store. Ensure that Spree's multi_domain extension is installed and that fields are mapped to the CSV correctly.")
end
end
#Log a success message
log("#{product.name} successfully imported.\n")
end
return true
end
# get_column_mappings
# This method attempts to automatically map headings in the CSV files
# with fields in the product and variant models.
# If the headings of columns are going to be called something other than this,
# or if the files will not have headings, then the manual initializer
# mapping of columns must be used.
# Row is an array of headings for columns - SKU, Master Price, etc.)
# @return a hash of symbol heading => column index pairs
def get_column_mappings(row)
mappings = {}
row.each_with_index do |heading, index|
mappings[heading.downcase.gsub(/\A\s*/, '').chomp.gsub(/\s/, '_').to_sym] = index
end
mappings
end
### MISC HELPERS ####
#Log a message to a file - logs in standard Rails format to logfile set up in the import_products initializer
#and console.
#Message is string, severity symbol - either :info, :warn or :error
def log(message, severity = :info)
@rake_log ||= ActiveSupport::BufferedLogger.new(IMPORT_PRODUCT_SETTINGS[:log_to])
message = "[#{Time.now.to_s(:db)}] [#{severity.to_s.capitalize}] #{message}\n"
@rake_log.send severity, message
puts message
end
### IMAGE HELPERS ###
# find_and_attach_image_to
# This method attaches images to products. The images may come
# from a local source (i.e. on disk), or they may be online (HTTP/HTTPS).
def find_and_attach_image_to(product_or_variant, filename)
return if filename.blank?
#The image can be fetched from an HTTP or local source - either method returns a Tempfile
file = filename =~ /\Ahttp[s]*:\/\// ? fetch_remote_image(filename) : fetch_local_image(filename)
#An image has an attachment (the image file) and some object which 'views' it
product_image = Image.new({:attachment => file,
:viewable => product_or_variant,
:position => product_or_variant.images.length
})
product_or_variant.images << product_image if product_image.save
end
# This method is used when we have a set location on disk for
# images, and the file is accessible to the script.
# It is basically just a wrapper around basic File IO methods.
def fetch_local_image(filename)
filename = IMPORT_PRODUCT_SETTINGS[:product_image_path] + filename
unless File.exists?(filename) && File.readable?(filename)
log("Image #{filename} was not found on the server, so this image was not imported.", :warn)
return nil
else
return File.open(filename, 'rb')
end
end
#This method can be used when the filename matches the format of a URL.
# It uses open-uri to fetch the file, returning a Tempfile object if it
# is successful.
# If it fails, it in the first instance logs the HTTP error (404, 500 etc)
# If it fails altogether, it logs it and exits the method.
def fetch_remote_image(filename)
begin
open(filename)
rescue OpenURI::HTTPError => error
log("Image #{filename} retrival returned #{error.message}, so this image was not imported")
rescue
log("Image #{filename} could not be downloaded, so was not imported.")
end
end
### TAXON HELPERS ###
# associate_product_with_taxon
# This method accepts three formats of taxon hierarchy strings which will
# associate the given products with taxons:
# 1. A string on it's own will will just find or create the taxon and
# add the product to it. e.g. taxonomy = "Category", taxon_hierarchy = "Tools" will
# add the product to the 'Tools' category.
# 2. A item > item > item structured string will read this like a tree - allowing
# a particular taxon to be picked out
# 3. An item > item & item > item will work as above, but will associate multiple
# taxons with that product. This form should also work with format 1.
def associate_product_with_taxon(product, taxonomy, taxon_hierarchy)
return if product.nil? || taxonomy.nil? || taxon_hierarchy.nil?
#Using find_or_create_by_name is more elegant, but our magical params code automatically downcases
# the taxonomy name, so unless we are using MySQL, this isn't going to work.
taxonomy_name = taxonomy
taxonomy = Taxonomy.find(:first, :conditions => ["lower(name) = ?", taxonomy])
taxonomy = Taxonomy.create(:name => taxonomy_name.capitalize) if taxonomy.nil? && IMPORT_PRODUCT_SETTINGS[:create_missing_taxonomies]
taxon_hierarchy.split(/\s*\&\s*/).each do |hierarchy|
hierarchy = hierarchy.split(/\s*>\s*/)
last_taxon = taxonomy.root
hierarchy.each do |taxon|
last_taxon = last_taxon.children.find_or_create_by_name_and_taxonomy_id(taxon, taxonomy.id)
end
#Spree only needs to know the most detailed taxonomy item
product.taxons << last_taxon unless product.taxons.include?(last_taxon)
end
end
### END TAXON HELPERS ###
# May be implemented via decorator if useful:
#
# ProductImport.class_eval do
#
# private
#
# def after_product_built(product, params_hash)
# # so something with the product
# end
# end
def after_product_built(product, params_hash)
end
end
end |
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "spws/version"
Gem::Specification.new do |s|
s.name = "viewpoint-spws"
s.version = Viewpoint::SPWS::VERSION
s.date = Date.today.to_s
s.author = "Dan Wanek"
s.email = "dan.wanek@gmail.com"
s.homepage = "http://github.com/zenchild/viewpoint-spws"
s.summary = "A Ruby client access library for Microsoft Sharepoint Web Services (SPWS)"
s.description = %q{TODO: Write a gem description}
s.description = <<-EOF
A Ruby client access library for Microsoft Sharepoint Web Services (SPWS). It is a work in progress. Methods are still being added from the Sharepoint API docs.
EOF
s.required_ruby_version = '>= 1.8.7'
s.rubyforge_project = nil
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {spec}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
s.rdoc_options = %w(-x spec/)
s.extra_rdoc_files = %w(README.md LICENSE)
s.add_runtime_dependency 'nokogiri', '~> 1.5.0'
s.add_runtime_dependency 'httpclient', '~> 2.2.4'
s.add_runtime_dependency 'logging', '~> 1.6.1'
s.add_runtime_dependency 'rubyntlm'
end
fix gemspec
# -*- encoding: utf-8 -*-
$:.push File.expand_path("../lib", __FILE__)
require "viewpoint/spws/version"
Gem::Specification.new do |s|
s.name = "viewpoint-spws"
s.version = Viewpoint::SPWS::VERSION
s.date = Date.today.to_s
s.author = "Dan Wanek"
s.email = "dan.wanek@gmail.com"
s.homepage = "http://github.com/zenchild/viewpoint-spws"
s.summary = "A Ruby client access library for Microsoft Sharepoint Web Services (SPWS)"
s.description = %q{TODO: Write a gem description}
s.description = <<-EOF
A Ruby client access library for Microsoft Sharepoint Web Services (SPWS). It is a work in progress. Methods are still being added from the Sharepoint API docs.
EOF
s.required_ruby_version = '>= 1.8.7'
s.rubyforge_project = nil
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {spec}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
s.rdoc_options = %w(-x spec/)
s.extra_rdoc_files = %w(README.md LICENSE)
s.add_runtime_dependency 'nokogiri', '~> 1.5.0'
s.add_runtime_dependency 'httpclient', '~> 2.2.4'
s.add_runtime_dependency 'logging', '~> 1.6.1'
s.add_runtime_dependency 'rubyntlm'
end
|
require "rake/clean"
import "../../../shared/rakefiles/ci.rake"
import "../../../shared/rakefiles/test.rake"
require_relative "./vars.rb"
if @env.nil?
puts " ERROR: @env must be set!"
puts " This is a problem in rake code."
puts " Set @env before importing this rakefile."
raise ArgumentError, "@env must be set"
end
if @project_type.nil?
puts " ERROR: @project_type must be set!"
puts " This is a problem in rake code."
puts " Set @project_type before importing this rakefile."
raise ArgumentError, "@project_type must be set"
end
@exekube_cmd = "docker-compose run --rm xk"
task :clean_volumes => :set_vars do
["helm", "kube", "locust_tasks"].each do |app|
sh "docker volume rm -f -- #{ENV["TF_VAR_project_id"]}-#{ENV["USER"]}-#{app}"
end
end
Rake::Task["clean"].enhance do
Rake::Task["clean_volumes"].invoke
end
task :clobber_volumes => :set_vars do
["secrets", "gcloud", "aws"].each do |app|
sh "docker volume rm -f -- #{ENV["TF_VAR_project_id"]}-#{ENV["USER"]}-#{app}"
end
end
Rake::Task["clobber"].enhance do
Rake::Task["clobber_volumes"].invoke
end
desc "Create cluster and deploy GPII components to it"
task :default => :deploy
task :set_vars do
Vars.set_vars(@env, @project_type)
Vars.set_versions()
Rake::Task[:set_compose_env].invoke
end
@compose_env_file = "compose.env"
CLEAN << @compose_env_file
# We do this with a task rather than a rule so that compose_env_file is always
# re-written.
task :set_compose_env do
tf_vars = []
ENV.each do |key, val|
tf_vars << key if key.start_with?("TF_VAR_")
end
File.open(@compose_env_file, 'w') do |file|
file.write(tf_vars.sort.join("\n"))
end
end
desc "[ADVANCED] Create or update low-level infrastructure"
task :apply_infra => [:set_vars] do
sh "#{@exekube_cmd} rake refresh_common_infra['#{@project_type}']"
sh "#{@exekube_cmd} rake apply_infra"
end
desc "Create cluster and deploy GPII components to it"
task :deploy => [:set_vars, :apply_infra] do
sh "#{@exekube_cmd} rake xk[up,false,false,true]"
Rake::Task["display_cluster_info"].invoke
end
desc "Display some handy info about the cluster"
task :display_cluster_info => [:set_vars] do
puts
puts
puts "*************************************************"
puts "Congratulations! Your GPII Cloud in GCP is ready!"
puts "*************************************************"
puts
puts "GCP Dashboard:"
puts " https://console.cloud.google.com/home/dashboard?organizationId=#{ ENV["TF_VAR_organization_id"] }&project=#{ ENV["TF_VAR_project_id"] }"
puts
puts "Stackdriver Logging Dashboard:"
puts " https://console.cloud.google.com/logs/viewer?project=#{ ENV["TF_VAR_project_id"] }&organizationId=#{ ENV["TF_VAR_organization_id"] }&advancedFilter=search%20text"
puts
puts "Stackdriver Monitoring Dashboard:"
puts " https://app.google.stackdriver.com/?project=#{ ENV["TF_VAR_project_id"] }"
puts
puts "Flowmanager endpoint:"
puts " curl -k https://flowmanager.#{ENV["TF_VAR_domain_name"] }"
puts
puts "Run `rake test_preferences` to execute Locust tests for Preferences."
puts "Run `rake test_flowmanager` to execute Locust tests for Flowmanager."
puts
puts "Run `rake destroy` to delete all the expensive resources created by the deployment."
puts
end
desc "Display debugging info about the current state of the cluster"
task :display_cluster_state => [:set_vars] do
sh "#{@exekube_cmd} rake display_cluster_state"
end
desc "Display gpii/universal image SHA, CI job links, and link to GitHub commit that triggered the image build"
task :display_universal_image_info => [:set_vars] do
sh "#{@exekube_cmd} rake display_universal_image_info"
end
task :check_destroy_allowed do
if ["prd"].include?(@env)
if ENV["RAKE_REALLY_DESTROY_IN_PRD"].nil?
puts " ERROR: Tried to destroy something in env 'prd' but RAKE_REALLY_DESTROY_IN_PRD is not set"
raise ArgumentError, "Tried to destroy something in env 'prd' but RAKE_REALLY_DESTROY_IN_PRD is not set"
end
end
end
desc "Undeploy GPII components and destroy cluster"
task :destroy => [:set_vars, :check_destroy_allowed, :fetch_helm_certs] do
sh "#{@exekube_cmd} rake xk[down]"
end
desc "Destroy environment, state, and secrets"
task :destroy_hard => [:set_vars] do
# Try to clean up any previous incarnation of this environment.
#
# Only destroy additional resources (e.g. secrets, terraform state) if
# previous steps succeeded; see https://issues.gpii.net/browse/GPII-3488.
begin
Rake::Task["destroy"].reenable
Rake::Task["destroy"].invoke
Rake::Task["destroy_secrets"].reenable
Rake::Task["destroy_secrets"].invoke
# Iff destroy and destroy_secrets both succeed, we want to run all of these
# destroy_tfstate commands (regardless if any one destroy_tfstate fails).
begin
Rake::Task["destroy_tfstate"].reenable
Rake::Task["destroy_tfstate"].invoke("k8s")
rescue RuntimeError => err
puts "destroy_tfstate step failed:"
puts err
puts "Continuing."
end
begin
Rake::Task["destroy_tfstate"].reenable
Rake::Task["destroy_tfstate"].invoke("locust")
rescue RuntimeError => err
puts "destroy_tfstate step failed:"
puts err
puts "Continuing."
end
rescue RuntimeError => err
puts "Destroy step failed:"
puts err
puts "Continuing."
end
end
desc "Destroy cluster and low-level infrastructure"
task :destroy_infra => [:set_vars, :check_destroy_allowed, :destroy] do
sh "#{@exekube_cmd} rake destroy_infra"
end
desc "[ADVANCED] Remove stale Terraform locks from GS -- for non-dev environments coordinate with the team first"
task :unlock => [:set_vars] do
sh "#{@exekube_cmd} sh -c ' \
for lock in $(gsutil ls -R gs://#{ENV["TF_VAR_project_id"]}-tfstate/#{@env}/ | grep .tflock); do \
gsutil rm $lock; \
done'"
end
desc "[ADVANCED] Run arbitrary command in exekube container via rake wrapper (with secrets set) -- rake sh['kubectl exec --n gpii couchdb-couchdb-0 -c \
couchdb -- curl -s http://$TF_VAR_secret_couchdb_admin_username:$TF_VAR_secret_couchdb_admin_password@127.0.0.1:5984/gpii/_all_docs']"
task :sh, [:cmd] => [:set_vars] do |taskname, args|
if args[:cmd]
cmd = args[:cmd]
else
puts "Argument :cmd -- the command to run inside the exekube container -- not present, defaulting to 'bash'"
cmd = "bash"
end
sh "#{@exekube_cmd} rake xk['#{cmd}',skip_secret_mgmt,preserve_stderr]"
end
desc "[ADVANCED] Run arbitrary command in exekube container via plain shell -- rake plain_sh['kubectl --namespace gpii get pods']"
task :plain_sh, [:cmd] => [:set_vars] do |taskname, args|
if args[:cmd]
cmd = args[:cmd]
else
puts "Argument :cmd -- the command to run inside the exekube container -- not present, defaulting to 'bash'"
cmd = "bash"
end
sh "#{@exekube_cmd} #{cmd}"
end
desc "[ADVANCED] Destroy all SA keys except current one"
task :destroy_sa_keys => [:set_vars, :check_destroy_allowed] do
sh "#{@exekube_cmd} rake destroy_sa_keys"
end
desc "[ADVANCED] Destroy secrets file stored in GS bucket for encryption key, passed as argument -- rake destroy_secrets['default']"
task :destroy_secrets, [:encryption_key] => [:set_vars, :check_destroy_allowed] do |taskname, args|
sh "#{@exekube_cmd} sh -c ' \
for secret_bucket in $(gsutil ls -p #{ENV["TF_VAR_project_id"]} | grep #{args[:encryption_key]}-secrets/); do \
for secret_file in $(gsutil ls -R $secret_bucket | grep yaml); do \
gsutil rm $secret_file; \
done \
done'"
end
desc "[ADVANCED] Destroy Terraform state stored in GS bucket for prefix, passed as argument -- rake destroy_tfstate['k8s']"
task :destroy_tfstate, [:prefix] => [:set_vars, :check_destroy_allowed] do |taskname, args|
if args[:prefix].nil? || args[:prefix].size == 0
puts "Argument :prefix not present, defaulting to 'k8s'"
prefix = "k8s"
else
prefix = args[:prefix]
end
sh "#{@exekube_cmd} sh -c 'gsutil rm -r gs://#{ENV["TF_VAR_project_id"]}-tfstate/#{@env}/#{prefix}'"
end
desc "[ADVANCED] Rotate Terraform state key for prefix, passed as argument -- rake rotate_tfstate_key['k8s']"
task :rotate_tfstate_key, [:prefix] => [:set_vars, :check_destroy_allowed] do |taskname, args|
if args[:prefix].nil? || args[:prefix].size == 0
puts "Argument :prefix not present, defaulting to 'k8s'"
prefix = "k8s"
else
prefix = args[:prefix]
end
sh "#{@exekube_cmd} rake rotate_secret['default','key_tfstate_encryption_key','sh -c \"gsutil \
-o GSUtil:decryption_key1=$TF_VAR_key_tfstate_encryption_key_rotated \
-o GSUtil:encryption_key=$TF_VAR_key_tfstate_encryption_key \
rewrite -k -r gs://#{ENV["TF_VAR_project_id"]}-tfstate/#{@env}/#{prefix}\"',skip_secret_mgmt,preserve_stderr]"
end
desc "[ADVANCED] Rotate provided KMS key and re-encrypt its associated secrets file in GS bucket -- rake rotate_secrets_key['default']"
task :rotate_secrets_key, [:kms_key] => [:set_vars, :check_destroy_allowed] do |taskname, args|
if args[:kms_key].nil? || args[:kms_key].size == 0
puts "Argument :kms_key not present, defaulting to 'default'"
kms_key = "default"
else
kms_key = args[:kms_key]
end
sh "#{@exekube_cmd} rake rotate_secrets_key['#{kms_key}']"
end
desc "[EXPERIMENTAL] [ADVANCED] Import an existing KMS keyring, e.g when moving an environment to a new (but previously-used) region"
task :import_keyring => [:set_vars, :check_destroy_allowed] do
sh "#{@exekube_cmd} rake import_keyring"
end
# We need Google to create the Container Registry for us (see
# common/modules/gcp-container-registry/main.tf). This task pushes an image to
# the Registry, which creates the Registry if it does not exist (or does
# basically nothing if it already exists).
task :init_registry => [:set_vars] do
# I've chosen the current exekube base image (alpine:3.9) because it is small
# and because it will end up in the Registry anyway. Note that this
# duplicates information in exekube/dockerfiles, i.e. there is coupling
# without cohesion.
image = "alpine:3.9"
registry_url_base = "gcr.io"
registry_url = "#{registry_url_base}/#{ENV["TF_VAR_project_id"]}"
# Pull the image to localhost
sh "docker pull #{image}"
# Tag the local image with our Registry
sh "docker tag #{image} #{registry_url}/#{image}"
# Authenticate with gcloud if we haven't already (the task that does this
# must run inside the exekube container, so we can't include it as a
# dependency to this task).
sh "#{@exekube_cmd} rake configure_login"
# Get an auth token using our gcloud credentials
token = %x{
#{@exekube_cmd} gcloud auth print-access-token
}.chomp
# Load the auth token into Docker
# (Use an env var to avoid echoing the token to stdout / the CI logs.)
ENV["RAKE_INIT_REGISTRY_TOKEN"] = token
sh "echo \"$RAKE_INIT_REGISTRY_TOKEN\" | docker login -u oauth2accesstoken --password-stdin https://#{registry_url_base}"
# Push the local image to our Registry
sh "docker push #{registry_url}/#{image}"
# Clean up
sh "docker rmi #{registry_url}/#{image}" # || true"
# We won't remove #{image} in case it existed previously. This is a small leak.
end
desc "[ADVANCED] Fetch helm TLS certificates from TF state (only in case they are present)"
task :fetch_helm_certs => [:set_vars] do
sh "#{@exekube_cmd} rake fetch_helm_certs"
end
desc "[ADVANCED] Destroy provided module in the cluster, and then deploy it -- rake redeploy_module['k8s/kube-system/cert-manager']"
task :redeploy_module, [:module] => [:set_vars] do |taskname, args|
Rake::Task[:destroy_module].invoke(args[:module])
Rake::Task[:deploy_module].invoke(args[:module])
end
desc "[ADVANCED] Deploy provided module into the cluster -- rake deploy_module['k8s/kube-system/cert-manager']"
task :deploy_module, [:module] => [:set_vars, :fetch_helm_certs] do |taskname, args|
if args[:module].nil?
puts " ERROR: args[:module] must be set and point to Terragrunt directory!"
raise
elsif !File.directory?(args[:module])
puts " ERROR: args[:module] must point to Terragrunt directory!"
raise
end
sh "#{@exekube_cmd} rake xk['apply live/#{@env}/#{args[:module]}',true,false,true]"
end
desc "[ADVANCED] Destroy provided module in the cluster -- rake destroy_module['k8s/kube-system/cert-manager']"
task :destroy_module, [:module] => [:set_vars, :check_destroy_allowed, :fetch_helm_certs] do |taskname, args|
if args[:module].nil?
puts " ERROR: args[:module] must be set and point to Terragrunt directory!"
raise
elsif !File.directory?(args[:module])
puts " ERROR: args[:module] must point to Terragrunt directory!"
raise
end
sh "#{@exekube_cmd} rake xk['destroy live/#{@env}/#{args[:module]}',skip_secret_mgmt]"
end
desc "[ADMIN ONLY] Grant owner role in the current project to the current user"
task :grant_project_admin => [:set_vars] do
sh "#{@exekube_cmd} rake grant_project_admin"
end
desc "[ADMIN ONLY] Revoke owner role in the current project from the current user"
task :revoke_project_admin, [:force] => [:set_vars] do |taskname, args|
if !args[:force] and ENV['TF_VAR_project_id'].match("dev-#{ENV['USER']}")
puts " ERROR: You can not revoke project admin role from yourself in your own dev project!"
puts " Run `rake revoke_project_admin[true]` to do this anyway."
exit 1
end
sh "#{@exekube_cmd} rake revoke_project_admin"
end
desc "[ADMIN ONLY] Grant org-level admin roles to the current user"
task :grant_org_admin => [:set_vars] do
sh "#{@exekube_cmd} rake grant_org_admin"
end
desc "[ADMIN ONLY] Revoke org-level admin roles from the current user"
task :revoke_org_admin => [:set_vars] do
sh "#{@exekube_cmd} rake revoke_org_admin"
end
desc "[ADMIN ONLY] Restore a snapshot from a remote file"
task :restore_snapshot_from_image_file, [:files] => [:set_vars] do |taskname, args|
sh "#{@exekube_cmd} rake restore_snapshot_from_image_file['#{args[:files]}']"
end
# vim: et ts=2 sw=2:
No need to destroy locust tfstate in :destroy_hard
require "rake/clean"
import "../../../shared/rakefiles/ci.rake"
import "../../../shared/rakefiles/test.rake"
require_relative "./vars.rb"
if @env.nil?
puts " ERROR: @env must be set!"
puts " This is a problem in rake code."
puts " Set @env before importing this rakefile."
raise ArgumentError, "@env must be set"
end
if @project_type.nil?
puts " ERROR: @project_type must be set!"
puts " This is a problem in rake code."
puts " Set @project_type before importing this rakefile."
raise ArgumentError, "@project_type must be set"
end
@exekube_cmd = "docker-compose run --rm xk"
task :clean_volumes => :set_vars do
["helm", "kube", "locust_tasks"].each do |app|
sh "docker volume rm -f -- #{ENV["TF_VAR_project_id"]}-#{ENV["USER"]}-#{app}"
end
end
Rake::Task["clean"].enhance do
Rake::Task["clean_volumes"].invoke
end
task :clobber_volumes => :set_vars do
["secrets", "gcloud", "aws"].each do |app|
sh "docker volume rm -f -- #{ENV["TF_VAR_project_id"]}-#{ENV["USER"]}-#{app}"
end
end
Rake::Task["clobber"].enhance do
Rake::Task["clobber_volumes"].invoke
end
desc "Create cluster and deploy GPII components to it"
task :default => :deploy
task :set_vars do
Vars.set_vars(@env, @project_type)
Vars.set_versions()
Rake::Task[:set_compose_env].invoke
end
@compose_env_file = "compose.env"
CLEAN << @compose_env_file
# We do this with a task rather than a rule so that compose_env_file is always
# re-written.
task :set_compose_env do
tf_vars = []
ENV.each do |key, val|
tf_vars << key if key.start_with?("TF_VAR_")
end
File.open(@compose_env_file, 'w') do |file|
file.write(tf_vars.sort.join("\n"))
end
end
desc "[ADVANCED] Create or update low-level infrastructure"
task :apply_infra => [:set_vars] do
sh "#{@exekube_cmd} rake refresh_common_infra['#{@project_type}']"
sh "#{@exekube_cmd} rake apply_infra"
end
desc "Create cluster and deploy GPII components to it"
task :deploy => [:set_vars, :apply_infra] do
sh "#{@exekube_cmd} rake xk[up,false,false,true]"
Rake::Task["display_cluster_info"].invoke
end
desc "Display some handy info about the cluster"
task :display_cluster_info => [:set_vars] do
puts
puts
puts "*************************************************"
puts "Congratulations! Your GPII Cloud in GCP is ready!"
puts "*************************************************"
puts
puts "GCP Dashboard:"
puts " https://console.cloud.google.com/home/dashboard?organizationId=#{ ENV["TF_VAR_organization_id"] }&project=#{ ENV["TF_VAR_project_id"] }"
puts
puts "Stackdriver Logging Dashboard:"
puts " https://console.cloud.google.com/logs/viewer?project=#{ ENV["TF_VAR_project_id"] }&organizationId=#{ ENV["TF_VAR_organization_id"] }&advancedFilter=search%20text"
puts
puts "Stackdriver Monitoring Dashboard:"
puts " https://app.google.stackdriver.com/?project=#{ ENV["TF_VAR_project_id"] }"
puts
puts "Flowmanager endpoint:"
puts " curl -k https://flowmanager.#{ENV["TF_VAR_domain_name"] }"
puts
puts "Run `rake test_preferences` to execute Locust tests for Preferences."
puts "Run `rake test_flowmanager` to execute Locust tests for Flowmanager."
puts
puts "Run `rake destroy` to delete all the expensive resources created by the deployment."
puts
end
desc "Display debugging info about the current state of the cluster"
task :display_cluster_state => [:set_vars] do
sh "#{@exekube_cmd} rake display_cluster_state"
end
desc "Display gpii/universal image SHA, CI job links, and link to GitHub commit that triggered the image build"
task :display_universal_image_info => [:set_vars] do
sh "#{@exekube_cmd} rake display_universal_image_info"
end
task :check_destroy_allowed do
if ["prd"].include?(@env)
if ENV["RAKE_REALLY_DESTROY_IN_PRD"].nil?
puts " ERROR: Tried to destroy something in env 'prd' but RAKE_REALLY_DESTROY_IN_PRD is not set"
raise ArgumentError, "Tried to destroy something in env 'prd' but RAKE_REALLY_DESTROY_IN_PRD is not set"
end
end
end
desc "Undeploy GPII components and destroy cluster"
task :destroy => [:set_vars, :check_destroy_allowed, :fetch_helm_certs] do
sh "#{@exekube_cmd} rake xk[down]"
end
desc "Destroy environment, state, and secrets"
task :destroy_hard => [:set_vars] do
# Try to clean up any previous incarnation of this environment.
#
# Only destroy additional resources (e.g. secrets, terraform state) if
# previous steps succeeded; see https://issues.gpii.net/browse/GPII-3488.
begin
Rake::Task["destroy"].reenable
Rake::Task["destroy"].invoke
Rake::Task["destroy_secrets"].reenable
Rake::Task["destroy_secrets"].invoke
# If destroy and destroy_secrets both succeed, we want to run all of these
# destroy_tfstate commands (regardless if any one destroy_tfstate fails).
begin
Rake::Task["destroy_tfstate"].reenable
Rake::Task["destroy_tfstate"].invoke("k8s")
rescue RuntimeError => err
puts "destroy_tfstate step failed:"
puts err
puts "Continuing."
end
rescue RuntimeError => err
puts "Destroy step failed:"
puts err
puts "Continuing."
end
end
desc "Destroy cluster and low-level infrastructure"
task :destroy_infra => [:set_vars, :check_destroy_allowed, :destroy] do
sh "#{@exekube_cmd} rake destroy_infra"
end
desc "[ADVANCED] Remove stale Terraform locks from GS -- for non-dev environments coordinate with the team first"
task :unlock => [:set_vars] do
sh "#{@exekube_cmd} sh -c ' \
for lock in $(gsutil ls -R gs://#{ENV["TF_VAR_project_id"]}-tfstate/#{@env}/ | grep .tflock); do \
gsutil rm $lock; \
done'"
end
desc "[ADVANCED] Run arbitrary command in exekube container via rake wrapper (with secrets set) -- rake sh['kubectl exec --n gpii couchdb-couchdb-0 -c \
couchdb -- curl -s http://$TF_VAR_secret_couchdb_admin_username:$TF_VAR_secret_couchdb_admin_password@127.0.0.1:5984/gpii/_all_docs']"
task :sh, [:cmd] => [:set_vars] do |taskname, args|
if args[:cmd]
cmd = args[:cmd]
else
puts "Argument :cmd -- the command to run inside the exekube container -- not present, defaulting to 'bash'"
cmd = "bash"
end
sh "#{@exekube_cmd} rake xk['#{cmd}',skip_secret_mgmt,preserve_stderr]"
end
desc "[ADVANCED] Run arbitrary command in exekube container via plain shell -- rake plain_sh['kubectl --namespace gpii get pods']"
task :plain_sh, [:cmd] => [:set_vars] do |taskname, args|
if args[:cmd]
cmd = args[:cmd]
else
puts "Argument :cmd -- the command to run inside the exekube container -- not present, defaulting to 'bash'"
cmd = "bash"
end
sh "#{@exekube_cmd} #{cmd}"
end
desc "[ADVANCED] Destroy all SA keys except current one"
task :destroy_sa_keys => [:set_vars, :check_destroy_allowed] do
sh "#{@exekube_cmd} rake destroy_sa_keys"
end
desc "[ADVANCED] Destroy secrets file stored in GS bucket for encryption key, passed as argument -- rake destroy_secrets['default']"
task :destroy_secrets, [:encryption_key] => [:set_vars, :check_destroy_allowed] do |taskname, args|
sh "#{@exekube_cmd} sh -c ' \
for secret_bucket in $(gsutil ls -p #{ENV["TF_VAR_project_id"]} | grep #{args[:encryption_key]}-secrets/); do \
for secret_file in $(gsutil ls -R $secret_bucket | grep yaml); do \
gsutil rm $secret_file; \
done \
done'"
end
desc "[ADVANCED] Destroy Terraform state stored in GS bucket for prefix, passed as argument -- rake destroy_tfstate['k8s']"
task :destroy_tfstate, [:prefix] => [:set_vars, :check_destroy_allowed] do |taskname, args|
if args[:prefix].nil? || args[:prefix].size == 0
puts "Argument :prefix not present, defaulting to 'k8s'"
prefix = "k8s"
else
prefix = args[:prefix]
end
sh "#{@exekube_cmd} sh -c 'gsutil rm -r gs://#{ENV["TF_VAR_project_id"]}-tfstate/#{@env}/#{prefix}'"
end
desc "[ADVANCED] Rotate Terraform state key for prefix, passed as argument -- rake rotate_tfstate_key['k8s']"
task :rotate_tfstate_key, [:prefix] => [:set_vars, :check_destroy_allowed] do |taskname, args|
if args[:prefix].nil? || args[:prefix].size == 0
puts "Argument :prefix not present, defaulting to 'k8s'"
prefix = "k8s"
else
prefix = args[:prefix]
end
sh "#{@exekube_cmd} rake rotate_secret['default','key_tfstate_encryption_key','sh -c \"gsutil \
-o GSUtil:decryption_key1=$TF_VAR_key_tfstate_encryption_key_rotated \
-o GSUtil:encryption_key=$TF_VAR_key_tfstate_encryption_key \
rewrite -k -r gs://#{ENV["TF_VAR_project_id"]}-tfstate/#{@env}/#{prefix}\"',skip_secret_mgmt,preserve_stderr]"
end
desc "[ADVANCED] Rotate provided KMS key and re-encrypt its associated secrets file in GS bucket -- rake rotate_secrets_key['default']"
task :rotate_secrets_key, [:kms_key] => [:set_vars, :check_destroy_allowed] do |taskname, args|
if args[:kms_key].nil? || args[:kms_key].size == 0
puts "Argument :kms_key not present, defaulting to 'default'"
kms_key = "default"
else
kms_key = args[:kms_key]
end
sh "#{@exekube_cmd} rake rotate_secrets_key['#{kms_key}']"
end
desc "[EXPERIMENTAL] [ADVANCED] Import an existing KMS keyring, e.g when moving an environment to a new (but previously-used) region"
task :import_keyring => [:set_vars, :check_destroy_allowed] do
sh "#{@exekube_cmd} rake import_keyring"
end
# We need Google to create the Container Registry for us (see
# common/modules/gcp-container-registry/main.tf). This task pushes an image to
# the Registry, which creates the Registry if it does not exist (or does
# basically nothing if it already exists).
task :init_registry => [:set_vars] do
# I've chosen the current exekube base image (alpine:3.9) because it is small
# and because it will end up in the Registry anyway. Note that this
# duplicates information in exekube/dockerfiles, i.e. there is coupling
# without cohesion.
image = "alpine:3.9"
registry_url_base = "gcr.io"
registry_url = "#{registry_url_base}/#{ENV["TF_VAR_project_id"]}"
# Pull the image to localhost
sh "docker pull #{image}"
# Tag the local image with our Registry
sh "docker tag #{image} #{registry_url}/#{image}"
# Authenticate with gcloud if we haven't already (the task that does this
# must run inside the exekube container, so we can't include it as a
# dependency to this task).
sh "#{@exekube_cmd} rake configure_login"
# Get an auth token using our gcloud credentials
token = %x{
#{@exekube_cmd} gcloud auth print-access-token
}.chomp
# Load the auth token into Docker
# (Use an env var to avoid echoing the token to stdout / the CI logs.)
ENV["RAKE_INIT_REGISTRY_TOKEN"] = token
sh "echo \"$RAKE_INIT_REGISTRY_TOKEN\" | docker login -u oauth2accesstoken --password-stdin https://#{registry_url_base}"
# Push the local image to our Registry
sh "docker push #{registry_url}/#{image}"
# Clean up
sh "docker rmi #{registry_url}/#{image}" # || true"
# We won't remove #{image} in case it existed previously. This is a small leak.
end
desc "[ADVANCED] Fetch helm TLS certificates from TF state (only in case they are present)"
task :fetch_helm_certs => [:set_vars] do
sh "#{@exekube_cmd} rake fetch_helm_certs"
end
desc "[ADVANCED] Destroy provided module in the cluster, and then deploy it -- rake redeploy_module['k8s/kube-system/cert-manager']"
task :redeploy_module, [:module] => [:set_vars] do |taskname, args|
Rake::Task[:destroy_module].invoke(args[:module])
Rake::Task[:deploy_module].invoke(args[:module])
end
desc "[ADVANCED] Deploy provided module into the cluster -- rake deploy_module['k8s/kube-system/cert-manager']"
task :deploy_module, [:module] => [:set_vars, :fetch_helm_certs] do |taskname, args|
if args[:module].nil?
puts " ERROR: args[:module] must be set and point to Terragrunt directory!"
raise
elsif !File.directory?(args[:module])
puts " ERROR: args[:module] must point to Terragrunt directory!"
raise
end
sh "#{@exekube_cmd} rake xk['apply live/#{@env}/#{args[:module]}',true,false,true]"
end
desc "[ADVANCED] Destroy provided module in the cluster -- rake destroy_module['k8s/kube-system/cert-manager']"
task :destroy_module, [:module] => [:set_vars, :check_destroy_allowed, :fetch_helm_certs] do |taskname, args|
if args[:module].nil?
puts " ERROR: args[:module] must be set and point to Terragrunt directory!"
raise
elsif !File.directory?(args[:module])
puts " ERROR: args[:module] must point to Terragrunt directory!"
raise
end
sh "#{@exekube_cmd} rake xk['destroy live/#{@env}/#{args[:module]}',skip_secret_mgmt]"
end
desc "[ADMIN ONLY] Grant owner role in the current project to the current user"
task :grant_project_admin => [:set_vars] do
sh "#{@exekube_cmd} rake grant_project_admin"
end
desc "[ADMIN ONLY] Revoke owner role in the current project from the current user"
task :revoke_project_admin, [:force] => [:set_vars] do |taskname, args|
if !args[:force] and ENV['TF_VAR_project_id'].match("dev-#{ENV['USER']}")
puts " ERROR: You can not revoke project admin role from yourself in your own dev project!"
puts " Run `rake revoke_project_admin[true]` to do this anyway."
exit 1
end
sh "#{@exekube_cmd} rake revoke_project_admin"
end
desc "[ADMIN ONLY] Grant org-level admin roles to the current user"
task :grant_org_admin => [:set_vars] do
sh "#{@exekube_cmd} rake grant_org_admin"
end
desc "[ADMIN ONLY] Revoke org-level admin roles from the current user"
task :revoke_org_admin => [:set_vars] do
sh "#{@exekube_cmd} rake revoke_org_admin"
end
desc "[ADMIN ONLY] Restore a snapshot from a remote file"
task :restore_snapshot_from_image_file, [:files] => [:set_vars] do |taskname, args|
sh "#{@exekube_cmd} rake restore_snapshot_from_image_file['#{args[:files]}']"
end
# vim: et ts=2 sw=2:
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
Bundler.require(:default, Rails.env) if defined?(Bundler)
module RacingOnRails
class Application < Rails::Application
config.autoload_paths += %W(
#{config.root}/app/rack #{config.root}/app/models/competitions #{config.root}/app/models/observers #{config.root}/app/pdfs #{config.root}/lib/racing_on_rails
#{config.root}/lib/results #{config.root}/lib
)
config.session_store :key, "_racing_on_rails_session"
config.session_store :secret, "9998d23d32c59a8161aba78b03630a93"
config.time_zone = "Pacific Time (US & Canada)"
# Racing on Rails has many foreign key constraints, so :sql is required
config.active_record.schema_format = :sql
unless ENV["SKIP_OBSERVERS"]
config.active_record.observers = :event_observer, :name_observer, :person_observer, :race_observer, :result_observer, :team_observer
end
config.filter_parameters += [ :password, :password_confirmation ]
# HP"s proxy, among others, gets this wrong
config.action_dispatch.ip_spoofing_check = false
# Ugh. Make config accessible to overrides
@config = config
if File.exist?("#{config.root}/local/config/environments/#{::Rails.env}.rb")
load("#{config.root}/local/config/environments/#{::Rails.env}.rb")
end
# See Rails::Configuration for more options
if File.exists?("#{config.root}/local/config/database.yml")
paths.config.database = "#{config.root}/local/config/database.yml"
end
end
# Local config customization
load("#{::Rails.root.to_s}/local/config/environment.rb") if File.exist?("#{::Rails.root.to_s}/local/config/environment.rb")
# Prefer local templates, partials etc. if they exist. Otherwise, use the base
# application's generic files.
ActionController::Base.view_paths = ActionView::Base.process_view_paths(["#{::Rails.root.to_s}/local/app/views", "#{::Rails.root.to_s}/app/views"])
class ActionView::Base
def self.default_form_builder
RacingOnRails::FormBuilder
end
end
end
Require sentient_user
require File.expand_path('../boot', __FILE__)
require 'rails/all'
Bundler.require(:default, Rails.env) if defined?(Bundler)
module RacingOnRails
class Application < Rails::Application
config.autoload_paths += %W(
#{config.root}/app/rack
#{config.root}/app/models/competitions
#{config.root}/app/models/observers
#{config.root}/app/pdfs #{config.root}/lib/racing_on_rails
#{config.root}/lib/results
#{config.root}/lib
#{config.root}/lib/sentient_user
)
config.session_store :key, "_racing_on_rails_session"
config.session_store :secret, "9998d23d32c59a8161aba78b03630a93"
config.time_zone = "Pacific Time (US & Canada)"
# Racing on Rails has many foreign key constraints, so :sql is required
config.active_record.schema_format = :sql
unless ENV["SKIP_OBSERVERS"]
config.active_record.observers = :event_observer, :name_observer, :person_observer, :race_observer, :result_observer, :team_observer
end
config.filter_parameters += [ :password, :password_confirmation ]
# HP"s proxy, among others, gets this wrong
config.action_dispatch.ip_spoofing_check = false
# Ugh. Make config accessible to overrides
@config = config
if File.exist?("#{config.root}/local/config/environments/#{::Rails.env}.rb")
load("#{config.root}/local/config/environments/#{::Rails.env}.rb")
end
# See Rails::Configuration for more options
if File.exists?("#{config.root}/local/config/database.yml")
paths.config.database = "#{config.root}/local/config/database.yml"
end
end
# Local config customization
load("#{::Rails.root.to_s}/local/config/environment.rb") if File.exist?("#{::Rails.root.to_s}/local/config/environment.rb")
# Prefer local templates, partials etc. if they exist. Otherwise, use the base
# application's generic files.
ActionController::Base.view_paths = ActionView::Base.process_view_paths(["#{::Rails.root.to_s}/local/app/views", "#{::Rails.root.to_s}/app/views"])
class ActionView::Base
def self.default_form_builder
RacingOnRails::FormBuilder
end
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
module RailsPortal
class Application < Rails::Application
# ExpandB64Gzip needs to be before ActionController::ParamsParser in the rack middleware stack:
# $ rake middleware
# (in /Users/stephen/dev/ruby/src/webapps/rigse2.git)
# use Rack::Lock
# use ActionController::Failsafe
# use ActionController::Reloader
# use ActiveRecord::ConnectionAdapters::ConnectionManagement
# use ActiveRecord::QueryCache
# use ActiveRecord::SessionStore, #<Proc:0x0192dfc8@(eval):8>
# use Rack::ExpandB64Gzip
# use ActionController::ParamsParser
# use Rack::MethodOverride
# use Rack::Head
# run ActionController::Dispatcher.new
config.autoload_paths += %W(#{config.root}/lib) # include lib directory
config.autoload_paths += %W(#{config.root}/lib/**/") # include all subdirectories
config.middleware.insert_before("ActionDispatch::ParamsParser", "Rack::ExpandB64Gzip")
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Skip frameworks you're not going to use. To use Rails without a database
# you must remove the Active Record framework.
# config.frameworks -= [ :active_record, :active_resource, :action_mailer ]
# Specify gems that this application depends on.
# They can then be installed with "rake gems:install" on new installations.
# You have to specify the <tt>:lib</tt> option for libraries, where the Gem name (<em>sqlite3-ruby</em>) differs from the file itself (_sqlite3_)
# config.gem "bj"
# config.gem "hpricot", :version => '0.6', :source => "http://code.whytheluckystiff.net"
# config.gem "sqlite3-ruby", :lib => "sqlite3"
# config.gem "aws-s3", :lib => "aws/s3"
# FIXME: see comment about this hack in config/environments/development.rb
$: << 'vendor/gems/ffi-ncurses-0.3.2.1/lib/'
# config.gem "ffi-ncurses ", :version => "= 0.3.3"
# These cause problems with irb. Left in for reference
# config.gem 'rspec-rails', :lib => 'spec/rails', :version => '1.1.11'
# config.gem 'rspec', :lib => 'spec', :version => '1.1.11'
# Only load the plugins named here, in the order given. By default, all plugins
# in vendor/plugins are loaded in alphabetical order.
# :all can be used as a placeholder for all plugins not explicitly named
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Add additional load paths for your own custom dirs
# config.load_paths += %W( #{::Rails.root.to_s}/extras )
# Force all environments to use the same logger level
# (by default production uses :info, the others :debug)
# config.log_level = :debug
# Make Time.zone default to the specified zone, and make Active Record store time values
# in the database in UTC, and return them converted to the specified local zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Comment line to use default local time.
config.time_zone = 'UTC'
# Set the default location for page caching
config.action_controller.page_cache_directory = ::Rails.root.to_s + '/public'
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Activate observers that should always be running
# Please note that observers generated using script/generate observer need to have an _observer suffix
# ... observers are now started in config/initializers/observers.rb
# Nov 10 NP: This technique wasn't working, so, I figued we would just surround w/ begin / rescue
# if ActiveRecord::Base.connection_handler.connection_pools["ActiveRecord::Base"].connected?
if $PROGRAM_NAME =~ /rake/ && ARGV.grep(/^db:migrate/).length > 0
puts "Didn't start observers because you are running: rake db:migrate"
else
config.after_initialize do
begin
ActiveRecord::Base.observers = :user_observer, :investigation_observer, :"dataservice/bundle_content_observer", :"admin/project_observer"
ActiveRecord::Base.instantiate_observers
puts "Started observers"
rescue
# interestingly Rails::logger doesn't seem to be working here, so I am using ugly puts for now:
puts "Couldn't start observers #{$!} ... but continuing process anyway"
puts "This might be because you have not setup the appropriate database tables yet... "
puts "see config/initializers/observers.rb for more information."
end
end
end
# config.after_initialize do
# opts = config.has_many_polymorphs_options
# opts[:file_pattern] = Array(opts[:file_pattern])
# opts[:file_pattern] << "#{::Rails.root.to_s}/app/models/**/*.rb"
# config.has_many_polymorphs_options = opts
# end
end
# ANONYMOUS_USER = User.find_by_login('anonymous')
require 'prawn'
require 'prawn/format'
# Special-case for when the migration that adds the default_user
# attribute hasn't been run yet.
# TODO: This causes troubles when the user table is not present.
# Like on a fresh install, or in various migration situations
# begin
# site_admin = User.site_admin
# if site_admin.respond_to? :default_user
# if APP_CONFIG[:enable_default_users]
# User.unsuspend_default_users
# else
# User.suspend_default_users
# end
# end
# rescue StandardError => e
# # rescue Mysql::Error => e
# puts "e"
# end
module Enumerable
# An extended group_by which will group at multiple depths
# Ex:
# >> ["aab","abc","aba","abd","aac","ada"].extended_group_by([lambda {|e| e.first}, lambda {|e| e.first(2)}])
# => {"a"=>{"aa"=>["aab", "aac"], "ab"=>["abc", "aba", "abd"], "ad"=>["ada"]}}
def extended_group_by(lambdas)
lamb = lambdas.shift
result = lamb ? self.group_by{|e| lamb.call(e)} : self
if lambdas.size > 0
final = {}
temp = result.map{|k,v| {k => v.extended_group_by(lambdas.clone)}}
temp.each {|r| final.merge!(r) }
result = final
end
return result
end
end
end
Add Bundler.require to application.rb
require File.expand_path('../boot', __FILE__)
require 'rails/all'
module RailsPortal
class Application < Rails::Application
Bundler.require(:default, Rails.env) if defined?(Bundler)
# ExpandB64Gzip needs to be before ActionController::ParamsParser in the rack middleware stack:
# $ rake middleware
# (in /Users/stephen/dev/ruby/src/webapps/rigse2.git)
# use Rack::Lock
# use ActionController::Failsafe
# use ActionController::Reloader
# use ActiveRecord::ConnectionAdapters::ConnectionManagement
# use ActiveRecord::QueryCache
# use ActiveRecord::SessionStore, #<Proc:0x0192dfc8@(eval):8>
# use Rack::ExpandB64Gzip
# use ActionController::ParamsParser
# use Rack::MethodOverride
# use Rack::Head
# run ActionController::Dispatcher.new
config.autoload_paths += %W(#{config.root}/lib) # include lib directory
config.autoload_paths += %W(#{config.root}/lib/**/") # include all subdirectories
config.middleware.insert_before("ActionDispatch::ParamsParser", "Rack::ExpandB64Gzip")
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Skip frameworks you're not going to use. To use Rails without a database
# you must remove the Active Record framework.
# config.frameworks -= [ :active_record, :active_resource, :action_mailer ]
# Specify gems that this application depends on.
# They can then be installed with "rake gems:install" on new installations.
# You have to specify the <tt>:lib</tt> option for libraries, where the Gem name (<em>sqlite3-ruby</em>) differs from the file itself (_sqlite3_)
# config.gem "bj"
# config.gem "hpricot", :version => '0.6', :source => "http://code.whytheluckystiff.net"
# config.gem "sqlite3-ruby", :lib => "sqlite3"
# config.gem "aws-s3", :lib => "aws/s3"
# FIXME: see comment about this hack in config/environments/development.rb
$: << 'vendor/gems/ffi-ncurses-0.3.2.1/lib/'
# config.gem "ffi-ncurses ", :version => "= 0.3.3"
# These cause problems with irb. Left in for reference
# config.gem 'rspec-rails', :lib => 'spec/rails', :version => '1.1.11'
# config.gem 'rspec', :lib => 'spec', :version => '1.1.11'
# Only load the plugins named here, in the order given. By default, all plugins
# in vendor/plugins are loaded in alphabetical order.
# :all can be used as a placeholder for all plugins not explicitly named
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Add additional load paths for your own custom dirs
# config.load_paths += %W( #{::Rails.root.to_s}/extras )
# Force all environments to use the same logger level
# (by default production uses :info, the others :debug)
# config.log_level = :debug
# Make Time.zone default to the specified zone, and make Active Record store time values
# in the database in UTC, and return them converted to the specified local zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Comment line to use default local time.
config.time_zone = 'UTC'
# Set the default location for page caching
config.action_controller.page_cache_directory = ::Rails.root.to_s + '/public'
# Use SQL instead of Active Record's schema dumper when creating the test database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Activate observers that should always be running
# Please note that observers generated using script/generate observer need to have an _observer suffix
# ... observers are now started in config/initializers/observers.rb
# Nov 10 NP: This technique wasn't working, so, I figued we would just surround w/ begin / rescue
# if ActiveRecord::Base.connection_handler.connection_pools["ActiveRecord::Base"].connected?
if $PROGRAM_NAME =~ /rake/ && ARGV.grep(/^db:migrate/).length > 0
puts "Didn't start observers because you are running: rake db:migrate"
else
config.after_initialize do
begin
ActiveRecord::Base.observers = :user_observer, :investigation_observer, :"dataservice/bundle_content_observer", :"admin/project_observer"
ActiveRecord::Base.instantiate_observers
puts "Started observers"
rescue
# interestingly Rails::logger doesn't seem to be working here, so I am using ugly puts for now:
puts "Couldn't start observers #{$!} ... but continuing process anyway"
puts "This might be because you have not setup the appropriate database tables yet... "
puts "see config/initializers/observers.rb for more information."
end
end
end
# config.after_initialize do
# opts = config.has_many_polymorphs_options
# opts[:file_pattern] = Array(opts[:file_pattern])
# opts[:file_pattern] << "#{::Rails.root.to_s}/app/models/**/*.rb"
# config.has_many_polymorphs_options = opts
# end
end
# ANONYMOUS_USER = User.find_by_login('anonymous')
require 'prawn'
require 'prawn/format'
# Special-case for when the migration that adds the default_user
# attribute hasn't been run yet.
# TODO: This causes troubles when the user table is not present.
# Like on a fresh install, or in various migration situations
# begin
# site_admin = User.site_admin
# if site_admin.respond_to? :default_user
# if APP_CONFIG[:enable_default_users]
# User.unsuspend_default_users
# else
# User.suspend_default_users
# end
# end
# rescue StandardError => e
# # rescue Mysql::Error => e
# puts "e"
# end
module Enumerable
# An extended group_by which will group at multiple depths
# Ex:
# >> ["aab","abc","aba","abd","aac","ada"].extended_group_by([lambda {|e| e.first}, lambda {|e| e.first(2)}])
# => {"a"=>{"aa"=>["aab", "aac"], "ab"=>["abc", "aba", "abd"], "ad"=>["ada"]}}
def extended_group_by(lambdas)
lamb = lambdas.shift
result = lamb ? self.group_by{|e| lamb.call(e)} : self
if lambdas.size > 0
final = {}
temp = result.map{|k,v| {k => v.extended_group_by(lambdas.clone)}}
temp.each {|r| final.merge!(r) }
result = final
end
return result
end
end
end
|
module Api
module V1
class SearchController < BaseController
def create
term = params[:term]
# Make sure a term is provided
if term.nil?
return render json: { message: "Validation Failed", errors: [{ field: "term", errors: ["is required"] }] }, status: 422
end
page = (params[:page] || 1).to_i
limit = (params[:limit] || 20).to_i
results = SearchDocument.search(term, current_team.id, current_user.project_ids).page(page).per(limit)
has_more_results = (page * limit) < results.total_count
next_url = api_v1_search_index_path(term: term, page: page+1, limit: limit) if has_more_results
meta = {
term: term,
total: results.total_count,
next_url: next_url
}
render json: results, meta: meta, each_serializer: SearchDocumentApiSerializer
end
end
end
end
api docs
module Api
module V1
class SearchController < BaseController
resource_description do
formats ["json"]
end
api! "Fetchs search results"
param :term, String, desc: "The search term", required: true
param :limit, Integer, desc: "The number of results you want returned (default: 20)", required: false
param :page, Integer, desc: "The page number you want returned (default: 1)", required: false
def create
term = params[:term]
# Make sure a term is provided
if term.nil?
return render json: { message: "Validation Failed", errors: [{ field: "term", errors: ["is required"] }] }, status: 422
end
page = (params[:page] || 1).to_i
limit = (params[:limit] || 20).to_i
results = SearchDocument.search(term, current_team.id, current_user.project_ids).page(page).per(limit)
has_more_results = (page * limit) < results.total_count
next_url = api_v1_search_index_path(term: term, page: page+1, limit: limit) if has_more_results
meta = {
term: term,
total: results.total_count,
next_url: next_url
}
render json: results, meta: meta, each_serializer: SearchDocumentApiSerializer
end
end
end
end
|
require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Ladder
VERSION = "1.6.9"
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
config.time_zone = 'Auckland'
config.action_mailer.default_url_options = { :host => 'ladders.pw', :protocol => 'https' }
config.exceptions_app = self.routes
end
end
Release v1.6.10
require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Ladder
VERSION = "1.6.10"
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
config.time_zone = 'Auckland'
config.action_mailer.default_url_options = { :host => 'ladders.pw', :protocol => 'https' }
config.exceptions_app = self.routes
end
end
|
class Api::V1::TracksController < Api::V1::BaseController
def index
render json: @tracks = Track.order("created_at desc").
paginate(page: params[:page] || 1)
end
def json
render json: process_tracks
end
def create
@track = current_user.tracks.build({"track_time" => (params[:track_time] || Time.now).to_time.in_time_zone("Singapore")})
if params[:lat].present? && params[:lng].present?
lat = to_decimal(params[:lat])
lng = to_decimal(params[:lng])
@location = Location.where(lat: lat, lng: lng).first
unless @location.present?
@location = Location.create({lat: lat, lng: lng})
end
@track.location = @location
end
if @track.save
render json: { id: @track.id }, status: :ok
else
render json: { errors: @track.errors }, status: :unprocessable_entity
end
end
private
# Never trust parameters from the scary internet, only allow the white list through.
def track_params
params.permit(:lat, :lng, :track_time)
end
def to_decimal(float)
float.to_d.round(5)
end
def process_tracks
data = Track.includes(:user, :location)
data = data.where("created_at >= '#{params[:from]}'") if params[:from].present?
data = data.group_by { |t| t.user.username }.
map do |user,tracks|
{
user => tracks.map do |track|
{
"id" => track.id,
"lat" => track.location.lat,
"lng" => track.location.lng,
"time" => track.track_time || track.created_at,
"distance" => nil, # metres
"speed" => nil, # m/s
"time_diff" => nil, # seconds
"user" => user.to_s
}
end
}
end.inject(:merge)
processed = []
(data || {}).map do |user, tracks|
tracks.each_with_index do |track, index|
if index > 0
distance = calc_distance(track, tracks[index - 1])
time_diff = track["time"] - tracks[index - 1]["time"]
speed = time_diff == 0 ? 999 : distance/time_diff)
track["time_diff"] = time_diff
track["speed"] = speed
track["distance"] = distance
processed << track if (time_diff < 7200 && speed < 40)
end
end
end
processed
end
def calc_distance loc1, loc2
rad_per_deg = Math::PI/180 # PI / 180
rkm = 6371 # Earth radius in kilometers
rm = rkm * 1000 # Radius in meters
dlat_rad = (loc2["lat"]-loc1["lat"]) * rad_per_deg # Delta, converted to rad
dlon_rad = (loc2["lng"]-loc1["lng"]) * rad_per_deg
lat1_rad, lon1_rad = [loc1["lat"] * rad_per_deg, loc1["lng"] * rad_per_deg]
lat2_rad, lon2_rad = [loc2["lat"] * rad_per_deg, loc2["lng"] * rad_per_deg]
a = Math.sin(dlat_rad/2)**2 + Math.cos(lat1_rad) * Math.cos(lat2_rad) * Math.sin(dlon_rad/2)**2
c = 2 * Math::atan2(Math::sqrt(a), Math::sqrt(1-a))
rm * c # Delta in metres
end
end
fix syntax error
class Api::V1::TracksController < Api::V1::BaseController
def index
render json: @tracks = Track.order("created_at desc").
paginate(page: params[:page] || 1)
end
def json
render json: process_tracks
end
def create
@track = current_user.tracks.build({"track_time" => (params[:track_time] || Time.now).to_time.in_time_zone("Singapore")})
if params[:lat].present? && params[:lng].present?
lat = to_decimal(params[:lat])
lng = to_decimal(params[:lng])
@location = Location.where(lat: lat, lng: lng).first
unless @location.present?
@location = Location.create({lat: lat, lng: lng})
end
@track.location = @location
end
if @track.save
render json: { id: @track.id }, status: :ok
else
render json: { errors: @track.errors }, status: :unprocessable_entity
end
end
private
# Never trust parameters from the scary internet, only allow the white list through.
def track_params
params.permit(:lat, :lng, :track_time)
end
def to_decimal(float)
float.to_d.round(5)
end
def process_tracks
data = Track.includes(:user, :location)
data = data.where("created_at >= '#{params[:from]}'") if params[:from].present?
data = data.group_by { |t| t.user.username }.
map do |user,tracks|
{
user => tracks.map do |track|
{
"id" => track.id,
"lat" => track.location.lat,
"lng" => track.location.lng,
"time" => track.track_time || track.created_at,
"distance" => nil, # metres
"speed" => nil, # m/s
"time_diff" => nil, # seconds
"user" => user.to_s
}
end
}
end.inject(:merge)
processed = []
(data || {}).map do |user, tracks|
tracks.each_with_index do |track, index|
if index > 0
distance = calc_distance(track, tracks[index - 1])
time_diff = track["time"] - tracks[index - 1]["time"]
speed = (time_diff == 0 ? 999 : distance/time_diff)
track["time_diff"] = time_diff
track["speed"] = speed
track["distance"] = distance
processed << track if (time_diff < 7200 && speed < 40)
end
end
end
processed
end
def calc_distance loc1, loc2
rad_per_deg = Math::PI/180 # PI / 180
rkm = 6371 # Earth radius in kilometers
rm = rkm * 1000 # Radius in meters
dlat_rad = (loc2["lat"]-loc1["lat"]) * rad_per_deg # Delta, converted to rad
dlon_rad = (loc2["lng"]-loc1["lng"]) * rad_per_deg
lat1_rad, lon1_rad = [loc1["lat"] * rad_per_deg, loc1["lng"] * rad_per_deg]
lat2_rad, lon2_rad = [loc2["lat"] * rad_per_deg, loc2["lng"] * rad_per_deg]
a = Math.sin(dlat_rad/2)**2 + Math.cos(lat1_rad) * Math.cos(lat2_rad) * Math.sin(dlon_rad/2)**2
c = 2 * Math::atan2(Math::sqrt(a), Math::sqrt(1-a))
rm * c # Delta in metres
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module WikiEduDashboard
class Application < Rails::Application
config.autoload_paths += Dir[Rails.root.join("app", "models", "{*/}")]
config.autoload_paths += Dir[Rails.root.join("app", "workers", "{*/}")]
config.generators do |g|
g.test_framework :rspec,
fixtures: true,
view_specs: false,
helper_specs: false,
routing_specs: false,
controller_specs: false,
request_specs: false
g.fixture_replacement :factory_girl, dir: "spec/factories"
end
# Get the secret_key_base from application.yml
config.secret_key_base = ENV['secret_key_base']
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = :en
## NOTE - LOCALES with hyphens creates and error when exporting translation files
## currently can't add :ku-latn, :roa-tara, or :zh-hans
# config.i18n.available_locales = [:en, :de, :bcl, :de, :es, :ja, :ksh, :lb, :nl, :pl, :pt, :qqq, :ru, :ur]
# Fallback to default locale when messages are missing.
config.i18n.fallbacks = true
require "#{Rails.root}/config/cldr_pluralization"
I18n::Backend::Simple.send(:include, I18n::Backend::CldrPluralization)
# Set fallback locale to en, which is the source locale.
config.i18n.fallbacks = [:en]
# Disables native processing of Sass and Coffeescript
config.assets.enabled = false
# Use custom error pages (like 404) instead of Rails defaults
config.exceptions_app = self.routes
# Rails cache with Dalli/memcached
config.cache_store = :mem_cache_store, 'localhost', { pool_size: 5, expires_in: 7.days, compress: false, value_max_bytes: 1024 * 1024 * 4 }
config.action_dispatch.return_only_media_type_on_content_type = false
config.middleware.insert_before 0, Rack::Cors do
allow do
origins '*'
# Allows for embedding course stats
resource '/embed/course_stats/*/*', :headers => :any, :methods => [:get, :options]
# For use by on-wiki gadgets
resource '/campaigns/*/*', :headers => :any, methods: [:get, :options]
end
end
end
end
Remove config that didn't work as expected
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module WikiEduDashboard
class Application < Rails::Application
config.autoload_paths += Dir[Rails.root.join("app", "models", "{*/}")]
config.autoload_paths += Dir[Rails.root.join("app", "workers", "{*/}")]
config.generators do |g|
g.test_framework :rspec,
fixtures: true,
view_specs: false,
helper_specs: false,
routing_specs: false,
controller_specs: false,
request_specs: false
g.fixture_replacement :factory_girl, dir: "spec/factories"
end
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
config.i18n.default_locale = :en
## NOTE - LOCALES with hyphens creates and error when exporting translation files
## currently can't add :ku-latn, :roa-tara, or :zh-hans
# config.i18n.available_locales = [:en, :de, :bcl, :de, :es, :ja, :ksh, :lb, :nl, :pl, :pt, :qqq, :ru, :ur]
# Fallback to default locale when messages are missing.
config.i18n.fallbacks = true
require "#{Rails.root}/config/cldr_pluralization"
I18n::Backend::Simple.send(:include, I18n::Backend::CldrPluralization)
# Set fallback locale to en, which is the source locale.
config.i18n.fallbacks = [:en]
# Disables native processing of Sass and Coffeescript
config.assets.enabled = false
# Use custom error pages (like 404) instead of Rails defaults
config.exceptions_app = self.routes
# Rails cache with Dalli/memcached
config.cache_store = :mem_cache_store, 'localhost', { pool_size: 5, expires_in: 7.days, compress: false, value_max_bytes: 1024 * 1024 * 4 }
config.action_dispatch.return_only_media_type_on_content_type = false
config.middleware.insert_before 0, Rack::Cors do
allow do
origins '*'
# Allows for embedding course stats
resource '/embed/course_stats/*/*', :headers => :any, :methods => [:get, :options]
# For use by on-wiki gadgets
resource '/campaigns/*/*', :headers => :any, methods: [:get, :options]
end
end
end
end
|
class Assessment360Controller < ApplicationController
include GradesHelper
# Added the @instructor to display the instructor name in the home page of the 360 degree assessment
def action_allowed?
['Super-Administrator',
'Administrator',
'Instructor',
'Teaching Assistant'].include? current_role_name
end
# Find the list of all students and assignments pertaining to the course.
# This data is used to compute the metareview and teammate review scores.
def all_students_all_reviews
course = Course.find(params[:course_id])
@assignments = course.assignments.reject(&:is_calibrated).reject {|a| a.participants.empty? }
@course_participants = course.get_participants
insure_existence_of(@course_participants,course)
# hashes for view
@meta_review = {}
@teammate_review = {}
@teamed_count = {}
# for course
# eg. @overall_teammate_review_grades = {assgt_id1: 100, assgt_id2: 178, ...}
# @overall_teammate_review_count = {assgt_id1: 1, assgt_id2: 2, ...}
%w[teammate meta].each do |type|
instance_variable_set("@overall_#{type}_review_grades", {})
instance_variable_set("@overall_#{type}_review_count", {})
end
@course_participants.each do |cp|
# for each assignment
# [aggregrate_review_grades_per_stu, review_count_per_stu] --> [0, 0]
%w[teammate meta].each {|type| instance_variable_set("@#{type}_review_info_per_stu", [0, 0]) }
students_teamed = StudentTask.teamed_students(cp.user)
@teamed_count[cp.id] = students_teamed[course.id].try(:size).to_i
@assignments.each do |assignment|
@meta_review[cp.id] = {} unless @meta_review.key?(cp.id)
@teammate_review[cp.id] = {} unless @teammate_review.key?(cp.id)
assignment_participant = assignment.participants.find_by(user_id: cp.user_id)
next if assignment_participant.nil?
teammate_reviews = assignment_participant.teammate_reviews
meta_reviews = assignment_participant.metareviews
calc_overall_review_info(assignment,
cp,
teammate_reviews,
@teammate_review,
@overall_teammate_review_grades,
@overall_teammate_review_count,
@teammate_review_info_per_stu)
calc_overall_review_info(assignment,
cp,
meta_reviews,
@meta_review,
@overall_meta_review_grades,
@overall_meta_review_count,
@meta_review_info_per_stu)
end
# calculate average grade for each student on all assignments in this course
avg_review_calc_per_student(cp, @teammate_review_info_per_stu, @teammate_review)
avg_review_calc_per_student(cp, @meta_review_info_per_stu, @meta_review)
end
# avoid divide by zero error
overall_review_count(@assignments, @overall_teammate_review_count, @overall_meta_review_count)
end
def overall_review_count(assignments, overall_teammate_review_count, overall_meta_review_count)
assignments.each do |assignment|
temp_count = overall_teammate_review_count[assignment.id]
overall_teammate_review_count[assignment.id] = 1 if temp_count.nil? or temp_count.zero?
temp_count = overall_meta_review_count[assignment.id]
overall_meta_review_count[assignment.id] = 1 if temp_count.nil? or temp_count.zero?
end
end
# Calculate the overall average review grade that a student has gotten from their teammate(s) and instructor(s)
def avg_review_calc_per_student(cp, review_info_per_stu, review)
# check to see if the student has been given a review
if review_info_per_stu[1] > 0
temp_avg_grade = review_info_per_stu[0] * 1.0 / review_info_per_stu[1]
review[cp.id][:avg_grade_for_assgt] = temp_avg_grade.round.to_s + '%'
end
end
# Find the list of all students and assignments pertaining to the course.
# This data is used to compute the instructor assigned grade and peer review scores.
# There are many nuances about how to collect these scores. See our design document for more deails
# http://wiki.expertiza.ncsu.edu/index.php/CSC/ECE_517_Fall_2018_E1871_Grade_Summary_By_Student
def course_student_grade_summary
@topics = {}
@assignment_grades = {}
@peer_review_scores = {}
@final_grades = {}
course = Course.find(params[:course_id])
@assignments = course.assignments.reject(&:is_calibrated).reject {|a| a.participants.empty? }
@course_participants = course.get_participants
insure_existence_of(@course_participants,course)
@course_participants.each do |cp|
@topics[cp.id] = {}
@assignment_grades[cp.id] = {}
@peer_review_scores[cp.id] = {}
@final_grades[cp.id] = 0
@assignments.each do |assignment|
user_id = cp.user_id
assignment_id = assignment.id
assignment_participant = assignment.participants.find_by(user_id: user_id)
# break out of the loop if there are no participants in the assignment
next if assignment.participants.find_by(user_id: user_id).nil?
# break out of the loop if the participant has no team
next if TeamsUser.team_id(assignment_id, user_id).nil?
# pull information about the student's grades for particular assignment
assignment_grade_summary(cp, assignment_id)
peer_review_score = find_peer_review_score(user_id, assignment_id)
next if (peer_review_score[:review].nil? || peer_review_score[:review][:scores].nil? || peer_review_score[:review][:scores][:avg].nil?)
@peer_review_scores[cp.id][assignment_id] = peer_review_score[:review][:scores][:avg].round(2)
end
end
end
def assignment_grade_summary(cp, assignment_id)
user_id = cp.user_id
# topic exists if a team signed up for a topic, which can be found via the user and the assignment
topic_id = SignedUpTeam.topic_id(assignment_id, user_id)
@topics[cp.id][assignment_id] = SignUpTopic.find_by(id: topic_id)
# instructor grade is stored in the team model, which is found by finding the user's team for the assignment
team_id = TeamsUser.team_id(assignment_id, user_id)
team = Team.find(team_id)
@assignment_grades[cp.id][assignment_id] = team[:grade_for_submission]
return if @assignment_grades[cp.id][assignment_id].nil?
@final_grades[cp.id] += @assignment_grades[cp.id][assignment_id]
end
def insure_existence_of(course_participants,course)
if course_participants.empty?
flash[:error] = "There is no course participant in course #{course.name}"
redirect_to(:back)
end
end
# The function populates the hash value for all students for all the reviews that they have gotten.
# I.e., Teammate and Meta for each of the assignments that they have taken
# This value is then used to display the overall teammate_review and meta_review grade in the view
def calc_overall_review_info(assignment,
course_participant,
reviews,
hash_per_stu,
overall_review_grade_hash,
overall_review_count_hash,
review_info_per_stu)
# If a student has not taken an assignment or if they have not received any grade for the same,
# assign it as 0 instead of leaving it blank. This helps in easier calculation of overall grade
overall_review_grade_hash[assignment.id] = 0 unless overall_review_grade_hash.key?(assignment.id)
overall_review_count_hash[assignment.id] = 0 unless overall_review_count_hash.key?(assignment.id)
grades = 0
# Check if they person has gotten any review for the assignment
if reviews.count > 0
reviews.each {|review| grades += review.average_score.to_i }
avg_grades = (grades * 1.0 / reviews.count).round
hash_per_stu[course_participant.id][assignment.id] = avg_grades.to_s + '%'
end
# Calculate sum of averages to get student's overall grade
if avg_grades and grades > 0
# for each assignment
review_info_per_stu[0] += avg_grades
review_info_per_stu[1] += 1
# for course
overall_review_grade_hash[assignment.id] += avg_grades
overall_review_count_hash[assignment.id] += 1
end
end
# The peer review score is taken from the questions for the assignment
def find_peer_review_score(user_id, assignment_id)
participant = AssignmentParticipant.find_by(user_id: user_id, parent_id: assignment_id)
assignment = participant.assignment
questions = retrieve_questions assignment.questionnaires, assignment_id
participant.scores(questions)
end
def format_topic(topic)
topic.nil? ? '-' : topic.format_for_display
end
def format_score(score)
score.nil? ? '-' : score
end
helper_method :format_score
helper_method :format_topic
end
The checking condition for peer review score
class Assessment360Controller < ApplicationController
include GradesHelper
# Added the @instructor to display the instructor name in the home page of the 360 degree assessment
def action_allowed?
['Super-Administrator',
'Administrator',
'Instructor',
'Teaching Assistant'].include? current_role_name
end
# Find the list of all students and assignments pertaining to the course.
# This data is used to compute the metareview and teammate review scores.
def all_students_all_reviews
course = Course.find(params[:course_id])
@assignments = course.assignments.reject(&:is_calibrated).reject {|a| a.participants.empty? }
@course_participants = course.get_participants
insure_existence_of(@course_participants,course)
# hashes for view
@meta_review = {}
@teammate_review = {}
@teamed_count = {}
# for course
# eg. @overall_teammate_review_grades = {assgt_id1: 100, assgt_id2: 178, ...}
# @overall_teammate_review_count = {assgt_id1: 1, assgt_id2: 2, ...}
%w[teammate meta].each do |type|
instance_variable_set("@overall_#{type}_review_grades", {})
instance_variable_set("@overall_#{type}_review_count", {})
end
@course_participants.each do |cp|
# for each assignment
# [aggregrate_review_grades_per_stu, review_count_per_stu] --> [0, 0]
%w[teammate meta].each {|type| instance_variable_set("@#{type}_review_info_per_stu", [0, 0]) }
students_teamed = StudentTask.teamed_students(cp.user)
@teamed_count[cp.id] = students_teamed[course.id].try(:size).to_i
@assignments.each do |assignment|
@meta_review[cp.id] = {} unless @meta_review.key?(cp.id)
@teammate_review[cp.id] = {} unless @teammate_review.key?(cp.id)
assignment_participant = assignment.participants.find_by(user_id: cp.user_id)
next if assignment_participant.nil?
teammate_reviews = assignment_participant.teammate_reviews
meta_reviews = assignment_participant.metareviews
calc_overall_review_info(assignment,
cp,
teammate_reviews,
@teammate_review,
@overall_teammate_review_grades,
@overall_teammate_review_count,
@teammate_review_info_per_stu)
calc_overall_review_info(assignment,
cp,
meta_reviews,
@meta_review,
@overall_meta_review_grades,
@overall_meta_review_count,
@meta_review_info_per_stu)
end
# calculate average grade for each student on all assignments in this course
avg_review_calc_per_student(cp, @teammate_review_info_per_stu, @teammate_review)
avg_review_calc_per_student(cp, @meta_review_info_per_stu, @meta_review)
end
# avoid divide by zero error
overall_review_count(@assignments, @overall_teammate_review_count, @overall_meta_review_count)
end
def overall_review_count(assignments, overall_teammate_review_count, overall_meta_review_count)
assignments.each do |assignment|
temp_count = overall_teammate_review_count[assignment.id]
overall_teammate_review_count[assignment.id] = 1 if temp_count.nil? or temp_count.zero?
temp_count = overall_meta_review_count[assignment.id]
overall_meta_review_count[assignment.id] = 1 if temp_count.nil? or temp_count.zero?
end
end
# Calculate the overall average review grade that a student has gotten from their teammate(s) and instructor(s)
def avg_review_calc_per_student(cp, review_info_per_stu, review)
# check to see if the student has been given a review
if review_info_per_stu[1] > 0
temp_avg_grade = review_info_per_stu[0] * 1.0 / review_info_per_stu[1]
review[cp.id][:avg_grade_for_assgt] = temp_avg_grade.round.to_s + '%'
end
end
# Find the list of all students and assignments pertaining to the course.
# This data is used to compute the instructor assigned grade and peer review scores.
# There are many nuances about how to collect these scores. See our design document for more deails
# http://wiki.expertiza.ncsu.edu/index.php/CSC/ECE_517_Fall_2018_E1871_Grade_Summary_By_Student
def course_student_grade_summary
@topics = {}
@assignment_grades = {}
@peer_review_scores = {}
@final_grades = {}
course = Course.find(params[:course_id])
@assignments = course.assignments.reject(&:is_calibrated).reject {|a| a.participants.empty? }
@course_participants = course.get_participants
insure_existence_of(@course_participants,course)
@course_participants.each do |cp|
@topics[cp.id] = {}
@assignment_grades[cp.id] = {}
@peer_review_scores[cp.id] = {}
@final_grades[cp.id] = 0
@assignments.each do |assignment|
user_id = cp.user_id
assignment_id = assignment.id
assignment_participant = assignment.participants.find_by(user_id: user_id)
# break out of the loop if there are no participants in the assignment
next if assignment.participants.find_by(user_id: user_id).nil?
# break out of the loop if the participant has no team
next if TeamsUser.team_id(assignment_id, user_id).nil?
# pull information about the student's grades for particular assignment
assignment_grade_summary(cp, assignment_id)
peer_review_score = find_peer_review_score(user_id, assignment_id)
next if peer_review_score[:review].nil?
next if peer_review_score[:review][:scores].nil?
next if peer_review_score[:review][:scores][:avg].nil?
@peer_review_scores[cp.id][assignment_id] = peer_review_score[:review][:scores][:avg].round(2)
end
end
end
def assignment_grade_summary(cp, assignment_id)
user_id = cp.user_id
# topic exists if a team signed up for a topic, which can be found via the user and the assignment
topic_id = SignedUpTeam.topic_id(assignment_id, user_id)
@topics[cp.id][assignment_id] = SignUpTopic.find_by(id: topic_id)
# instructor grade is stored in the team model, which is found by finding the user's team for the assignment
team_id = TeamsUser.team_id(assignment_id, user_id)
team = Team.find(team_id)
@assignment_grades[cp.id][assignment_id] = team[:grade_for_submission]
return if @assignment_grades[cp.id][assignment_id].nil?
@final_grades[cp.id] += @assignment_grades[cp.id][assignment_id]
end
def insure_existence_of(course_participants,course)
if course_participants.empty?
flash[:error] = "There is no course participant in course #{course.name}"
redirect_to(:back)
end
end
# The function populates the hash value for all students for all the reviews that they have gotten.
# I.e., Teammate and Meta for each of the assignments that they have taken
# This value is then used to display the overall teammate_review and meta_review grade in the view
def calc_overall_review_info(assignment,
course_participant,
reviews,
hash_per_stu,
overall_review_grade_hash,
overall_review_count_hash,
review_info_per_stu)
# If a student has not taken an assignment or if they have not received any grade for the same,
# assign it as 0 instead of leaving it blank. This helps in easier calculation of overall grade
overall_review_grade_hash[assignment.id] = 0 unless overall_review_grade_hash.key?(assignment.id)
overall_review_count_hash[assignment.id] = 0 unless overall_review_count_hash.key?(assignment.id)
grades = 0
# Check if they person has gotten any review for the assignment
if reviews.count > 0
reviews.each {|review| grades += review.average_score.to_i }
avg_grades = (grades * 1.0 / reviews.count).round
hash_per_stu[course_participant.id][assignment.id] = avg_grades.to_s + '%'
end
# Calculate sum of averages to get student's overall grade
if avg_grades and grades > 0
# for each assignment
review_info_per_stu[0] += avg_grades
review_info_per_stu[1] += 1
# for course
overall_review_grade_hash[assignment.id] += avg_grades
overall_review_count_hash[assignment.id] += 1
end
end
# The peer review score is taken from the questions for the assignment
def find_peer_review_score(user_id, assignment_id)
participant = AssignmentParticipant.find_by(user_id: user_id, parent_id: assignment_id)
assignment = participant.assignment
questions = retrieve_questions assignment.questionnaires, assignment_id
participant.scores(questions)
end
def format_topic(topic)
topic.nil? ? '-' : topic.format_for_display
end
def format_score(score)
score.nil? ? '-' : score
end
helper_method :format_score
helper_method :format_topic
end |
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module LegisTracker2
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
# 2/7/2013: Apped for RailsApp.
config.generators do |g|
g.test_framework :rspec, fixture: true
g.fixture_replacement :factory_girl, dir: 'spec/factories'
g.view_specs false
g.helper_specs false
g.stylesheets = false
g.javascripts = false
g.helper = false
end
end
end
more heroku deploy changes
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module LegisTracker2
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
config.assets.initialize_on_precompile = false
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
# 2/7/2013: Apped for RailsApp.
config.generators do |g|
g.test_framework :rspec, fixture: true
g.fixture_replacement :factory_girl, dir: 'spec/factories'
g.view_specs false
g.helper_specs false
g.stylesheets = false
g.javascripts = false
g.helper = false
end
end
end
|
class CapitalPlansController < OrganizationAwareController
before_action :get_capital_plan, only: [:show]
def index
# state view
authorize! :read_all, CapitalPlan
add_breadcrumb 'Capital Plans', capital_plans_path
@capital_plans = []
@organization_list.each do |org|
if Asset.operational.where(organization_id: org).count > 0
plan = CapitalPlan.current_plan(org)
@capital_plans << plan
run_system_actions(plan)
end
end
end
def show
authorize! :read, @capital_plan
# pagination if needed
org_list = @organization_list.select{ |o| Asset.operational.where(organization_id: o).count > 0 }
if org_list.count > 1
if can? :read_all, CapitalPlan
add_breadcrumb 'Capital Plans', capital_plans_path
end
@total_rows = org_list.count
org_idx = org_list.index(@capital_plan.organization_id)
@row_number = org_idx+1
@prev_record_key = CapitalPlan.current_plan(org_list[org_idx-1]).object_key if org_idx > 0
@next_record_key = CapitalPlan.current_plan(org_list[org_idx+1]).object_key if org_idx < org_list.count - 1
@prev_record_path = @prev_record_key.nil? ? "#" : capital_plan_path(@prev_record_key)
@next_record_path = @next_record_key.nil? ? "#" : capital_plan_path(@next_record_key)
end
add_breadcrumb @capital_plan, capital_plan_path(@capital_plan)
run_system_actions(@capital_plan)
end
def complete_actions
actions = CapitalPlanAction.where('capital_plan_actions.object_key IN (?) AND capital_plan_actions.completed_at IS NULL', params[:targets].split(','))
actions.each do |action|
authorize! :update, action.capital_plan
action.capital_plan_action_type.class_name.constantize.new(capital_plan_action: action, user: current_user).run
end
undo_actions = CapitalPlanAction.unscoped.where('capital_plan_actions.object_key IN (?) AND capital_plan_actions.completed_at IS NOT NULL', params[:undo_targets].split(',')).order('capital_plan_modules.sequence DESC', 'capital_plan_actions.sequence DESC')
undo_actions.each do |action|
authorize! :update, action.capital_plan
action.capital_plan_action_type.class_name.constantize.new(capital_plan_action: action, user: current_user).run
end
respond_to do |format|
format.html { redirect_to :back }
format.js { render text: URI(request.referer) }
end
end
def get_checkboxes
checked = params[:checked_targets].split(',')
unless checked.any?
checked = [nil]
end
capital_plan = CapitalPlan.find_by(object_key: params[:id])
checkbox_params = Hash.new
capital_plan.capital_plan_actions.each do |a|
checkbox_params[a.object_key] = Hash.new
if checked.include? a.object_key
if !(a.is_undo_allowed?(checked) && (can? :complete_action, a))
checkbox_params[a.object_key]['disabled'] = 'disabled'
end
else
if !(a.is_allowed?(checked) && (can? :complete_action, a))
checkbox_params[a.object_key]['disabled'] = 'disabled'
end
end
end
respond_to do |format|
format.json { render json: checkbox_params.to_json }
end
end
protected
def get_capital_plan
@capital_plan = CapitalPlan.find_by(object_key: params[:id], organization_id: @organization_list)
if @capital_plan.nil?
if CapitalPlan.find_by(object_key: params[:id], :organization_id => current_user.user_organization_filters.system_filters.first.get_organizations.map{|x| x.id}).nil?
redirect_to '/404'
else
notify_user(:warning, 'This record is outside your filter. Change your filter if you want to access it.')
org_list = @organization_list.select{|x| Asset.operational.where(organization_id: x).count > 0}
redirect_to capital_plan_path(CapitalPlan.current_plan(org_list.first))
end
return
end
end
def run_system_actions(plan)
plan.system_actions.each do |sys_action|
sys_action.capital_plan_action_type.class_name.constantize.new(capital_plan_action: sys_action, user: current_user).run
end
end
private
end
fix bug where didnt join capital plan module so couldnt sort undo action properly
class CapitalPlansController < OrganizationAwareController
before_action :get_capital_plan, only: [:show]
def index
# state view
authorize! :read_all, CapitalPlan
add_breadcrumb 'Capital Plans', capital_plans_path
@capital_plans = []
@organization_list.each do |org|
if Asset.operational.where(organization_id: org).count > 0
plan = CapitalPlan.current_plan(org)
@capital_plans << plan
run_system_actions(plan)
end
end
end
def show
authorize! :read, @capital_plan
# pagination if needed
org_list = @organization_list.select{ |o| Asset.operational.where(organization_id: o).count > 0 }
if org_list.count > 1
if can? :read_all, CapitalPlan
add_breadcrumb 'Capital Plans', capital_plans_path
end
@total_rows = org_list.count
org_idx = org_list.index(@capital_plan.organization_id)
@row_number = org_idx+1
@prev_record_key = CapitalPlan.current_plan(org_list[org_idx-1]).object_key if org_idx > 0
@next_record_key = CapitalPlan.current_plan(org_list[org_idx+1]).object_key if org_idx < org_list.count - 1
@prev_record_path = @prev_record_key.nil? ? "#" : capital_plan_path(@prev_record_key)
@next_record_path = @next_record_key.nil? ? "#" : capital_plan_path(@next_record_key)
end
add_breadcrumb @capital_plan, capital_plan_path(@capital_plan)
run_system_actions(@capital_plan)
end
def complete_actions
actions = CapitalPlanAction.where('capital_plan_actions.object_key IN (?) AND capital_plan_actions.completed_at IS NULL', params[:targets].split(','))
actions.each do |action|
authorize! :update, action.capital_plan
action.capital_plan_action_type.class_name.constantize.new(capital_plan_action: action, user: current_user).run
end
undo_actions = CapitalPlanAction.unscoped.joins(:capital_plan_module).where('capital_plan_actions.object_key IN (?) AND capital_plan_actions.completed_at IS NOT NULL', params[:undo_targets].split(',')).order('capital_plan_modules.sequence DESC', 'capital_plan_actions.sequence DESC')
undo_actions.each do |action|
authorize! :update, action.capital_plan
action.capital_plan_action_type.class_name.constantize.new(capital_plan_action: action, user: current_user).run
end
respond_to do |format|
format.html { redirect_to :back }
format.js { render text: URI(request.referer) }
end
end
def get_checkboxes
checked = params[:checked_targets].split(',')
unless checked.any?
checked = [nil]
end
capital_plan = CapitalPlan.find_by(object_key: params[:id])
checkbox_params = Hash.new
capital_plan.capital_plan_actions.each do |a|
checkbox_params[a.object_key] = Hash.new
if checked.include? a.object_key
if !(a.is_undo_allowed?(checked) && (can? :complete_action, a))
checkbox_params[a.object_key]['disabled'] = 'disabled'
end
else
if !(a.is_allowed?(checked) && (can? :complete_action, a))
checkbox_params[a.object_key]['disabled'] = 'disabled'
end
end
end
respond_to do |format|
format.json { render json: checkbox_params.to_json }
end
end
protected
def get_capital_plan
@capital_plan = CapitalPlan.find_by(object_key: params[:id], organization_id: @organization_list)
if @capital_plan.nil?
if CapitalPlan.find_by(object_key: params[:id], :organization_id => current_user.user_organization_filters.system_filters.first.get_organizations.map{|x| x.id}).nil?
redirect_to '/404'
else
notify_user(:warning, 'This record is outside your filter. Change your filter if you want to access it.')
org_list = @organization_list.select{|x| Asset.operational.where(organization_id: x).count > 0}
redirect_to capital_plan_path(CapitalPlan.current_plan(org_list.first))
end
return
end
end
def run_system_actions(plan)
plan.system_actions.each do |sys_action|
sys_action.capital_plan_action_type.class_name.constantize.new(capital_plan_action: sys_action, user: current_user).run
end
end
private
end
|
class ConversationsController < ApplicationController
respond_to :html
before_action :authenticate_member!
before_action :set_box
before_action :check_current_subject_in_conversation, only: %i(show update destroy)
def index
@conversations = if @box.eql? "inbox"
mailbox.inbox
elsif @box.eql? "sent"
mailbox.sentbox
else
mailbox.trash
end.paginate(page: params[:page])
respond_with @conversations
end
def show
@receipts = mailbox.receipts_for(@conversation)
@receipts.mark_as_read
@participants = @conversation.participants
end
def update
@conversation.untrash(current_member)
redirect_to conversations_path(box: params[:box])
end
def destroy
@conversation = Mailboxer::Conversation.find(params[:id])
@conversation.move_to_trash(current_member)
redirect_to conversations_path(box: params[:box])
end
private
def mailbox
current_member.mailbox
end
def set_box
@boxes = {
'inbox' => { 'total' => mailbox.inbox.size, 'unread' => 0},
'sent' => { 'total' => mailbox.sentbox.size, 'unread' => 0},
'trash' => { 'total' => mailbox.trash.size, 'unread' => 0}
}
@box = if params[:box].blank? || !@boxes.keys.include?(params[:box])
'inbox'
else
params[:box]
end
end
def check_current_subject_in_conversation
@conversation = Mailboxer::Conversation.find_by(id: params[:id])
if @conversation.nil? || !@conversation.is_participant?(current_member)
redirect_to conversations_path(box: box)
return
end
end
end
Use guard clause
class ConversationsController < ApplicationController
respond_to :html
before_action :authenticate_member!
before_action :set_box
before_action :check_current_subject_in_conversation, only: %i(show update destroy)
def index
@conversations = if @box.eql? "inbox"
mailbox.inbox
elsif @box.eql? "sent"
mailbox.sentbox
else
mailbox.trash
end.paginate(page: params[:page])
respond_with @conversations
end
def show
@receipts = mailbox.receipts_for(@conversation)
@receipts.mark_as_read
@participants = @conversation.participants
end
def update
@conversation.untrash(current_member)
redirect_to conversations_path(box: params[:box])
end
def destroy
@conversation = Mailboxer::Conversation.find(params[:id])
@conversation.move_to_trash(current_member)
redirect_to conversations_path(box: params[:box])
end
private
def mailbox
current_member.mailbox
end
def set_box
@boxes = {
'inbox' => { 'total' => mailbox.inbox.size, 'unread' => 0},
'sent' => { 'total' => mailbox.sentbox.size, 'unread' => 0},
'trash' => { 'total' => mailbox.trash.size, 'unread' => 0}
}
@box = if params[:box].blank? || !@boxes.keys.include?(params[:box])
'inbox'
else
params[:box]
end
end
def check_current_subject_in_conversation
@conversation = Mailboxer::Conversation.find_by(id: params[:id])
return unless @conversation.nil? || !@conversation.is_participant?(current_member)
redirect_to conversations_path(box: box)
return
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module Ladder
VERSION = "1.3.2"
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Auckland'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.i18n.enforce_available_locales = true
config.action_mailer.default_url_options = { :host => 'ladders.pw' }
config.exceptions_app = self.routes
config.generators do |g|
g.test_framework :mini_test, :spec => true, :fixture => false
end
end
end
Release 1.4.0
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
module Ladder
VERSION = "1.4.0"
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
config.time_zone = 'Auckland'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.i18n.enforce_available_locales = true
config.action_mailer.default_url_options = { :host => 'ladders.pw' }
config.exceptions_app = self.routes
config.generators do |g|
g.test_framework :mini_test, :spec => true, :fixture => false
end
end
end
|
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
# @API Conversations
#
# API for creating, accessing and updating user conversations.
class ConversationsController < ApplicationController
include ConversationsHelper
include SearchHelper
include Api::V1::Conversation
before_filter :require_user, :except => [:public_feed]
before_filter :reject_student_view_student
before_filter :get_conversation, :only => [:show, :update, :destroy, :add_recipients, :remove_messages]
before_filter :infer_scope, :only => [:index, :show, :create, :update, :add_recipients, :add_message, :remove_messages]
before_filter :normalize_recipients, :only => [:create, :add_recipients]
before_filter :infer_tags, :only => [:create, :add_message, :add_recipients]
# whether it's a bulk private message, or a big group conversation,
# batch up all delayed jobs to make this more responsive to the user
batch_jobs_in_actions :only => :create
add_crumb(proc { I18n.t 'crumbs.messages', "Conversations" }) { |c| c.send :conversations_url }
# @API List conversations
# Returns the list of conversations for the current user, most recent ones first.
#
# @argument scope [optional, "unread"|"starred"|"archived"]
# When set, only return conversations of the specified type. For example,
# set to "unread" to return only conversations that haven't been read.
# The default behavior is to return all non-archived conversations (i.e.
# read and unread).
#
# @argument filter [optional, course_id|group_id|user_id]
# When set, only return conversations for the specified course, group
# or user. The id should be prefixed with its type, e.g. "user_123" or
# "course_456"
#
# @argument interleave_submissions Boolean, default false. If true, the
# message_count will also include these submission-based messages in the
# total. See the show action for more information.
#
# @argument include_all_conversation_ids Boolean, default false. If true,
# the top-level element of the response will be an object rather than
# an array, and will have the keys "conversations" which will contain the
# paged conversation data, and "conversation_ids" which will contain the
# ids of all conversations under this scope/filter in the same order.
#
# @response_field id The unique identifier for the conversation.
# @response_field workflow_state The current state of the conversation
# (read, unread or archived)
# @response_field last_message A <=100 character preview from the most
# recent message
# @response_field last_message_at The timestamp of the latest message
# @response_field message_count The number of messages in this conversation
# @response_field subscribed Indicates whether the user is actively
# subscribed to the conversation
# @response_field private Indicates whether this is a private conversation
# (i.e. audience of one)
# @response_field starred Whether the conversation is starred
# @response_field properties Additional conversation flags (last_author,
# attachments, media_objects). Each listed property means the flag is
# set to true (i.e. the current user is the most recent author, there
# are attachments, or there are media objects)
# @response_field audience Array of user ids who are involved in the
# conversation, ordered by participation level, then alphabetical.
# Excludes current user, unless this is a monologue.
# @response_field audience_contexts Most relevant shared contexts (courses
# and groups) between current user and other participants. If there is
# only one participant, it will also include that user's enrollment(s)/
# membership type(s) in each course/group
# @response_field avatar_url URL to appropriate icon for this conversation
# (custom, individual or group avatar, depending on audience)
# @response_field participants Array of users (id, name) participating in
# the conversation. Includes current user.
# @response_field visible Boolean, indicates whether the conversation is
# visible under the current scope and filter. This attribute is always
# true in the index API response, and is primarily useful in create/update
# responses so that you can know if the record should be displayed in
# the UI. The default scope is assumed, unless a scope or filter is passed
# to the create/update API call.
#
# @example_response
# [
# {
# "id": 2,
# "workflow_state": "unread",
# "last_message": "sure thing, here's the file",
# "last_message_at": "2011-09-02T12:00:00Z",
# "message_count": 2,
# "subscribed": true,
# "private": true,
# "starred": false,
# "properties": ["attachments"],
# "audience": [2],
# "audience_contexts": {"courses": {"1": ["StudentEnrollment"]}, "groups": {}},
# "avatar_url": "https://canvas.instructure.com/images/messages/avatar-group-50.png",
# "participants": [{"id": 1, "name": "Joe TA"}, {"id": 2, "name": "Jane Teacher"}],
# "visible": true
# }
# ]
def index
if request.format == :json
conversations = Api.paginate(@conversations_scope, self, api_v1_conversations_url)
# optimize loading the most recent messages for each conversation into a single query
ConversationParticipant.preload_latest_messages(conversations, @current_user.id)
@conversations_json = conversations.map{ |c| conversation_json(c, @current_user, session, :include_participant_avatars => false, :include_participant_contexts => false, :visible => true) }
if params[:include_all_conversation_ids]
@conversations_json = {:conversations => @conversations_json, :conversation_ids => @conversations_scope.conversation_ids}
end
render :json => @conversations_json
else
if @current_user.shard != Shard.current
flash[:notice] = 'Conversations are not yet cross-shard enabled'
return redirect_to dashboard_url
end
return redirect_to conversations_path(:scope => params[:redirect_scope]) if params[:redirect_scope]
load_all_contexts :permissions => [:manage_user_notes]
notes_enabled = @current_user.associated_accounts.any?{|a| a.enable_user_notes }
can_add_notes_for_account = notes_enabled && @current_user.associated_accounts.any?{|a| a.grants_right?(@current_user, nil, :manage_students) }
js_env(:CONVERSATIONS => {
:USER => conversation_users_json([@current_user], @current_user, session, :include_participant_contexts => false).first,
:CONTEXTS => @contexts,
:NOTES_ENABLED => notes_enabled,
:CAN_ADD_NOTES_FOR_ACCOUNT => can_add_notes_for_account,
:SHOW_INTRO => !@current_user.watched_conversations_intro?,
:FOLDER_ID => @current_user.conversation_attachments_folder.id,
:MEDIA_COMMENTS_ENABLED => feature_enabled?(:kaltura),
})
end
end
# @API Create a conversation
# Create a new conversation with one or more recipients. If there is already
# an existing private conversation with the given recipients, it will be
# reused.
#
# @argument recipients[] An array of recipient ids. These may be user ids
# or course/group ids prefixed with "course_" or "group_" respectively,
# e.g. recipients[]=1&recipients[]=2&recipients[]=course_3
# @argument body The message to be sent
# @argument group_conversation [true|false] Ignored if there is just one
# recipient, defaults to false. If true, this will be a group conversation
# (i.e. all recipients will see all messages and replies). If false,
# individual private conversations will be started with each recipient.
# @argument attachment_ids[] An array of attachments ids. These must be
# files that have been previously uploaded to the sender's "conversation
# attachments" folder.
# @argument media_comment_id Media comment id of an audio of video file to
# be associated with this message.
# @argument media_comment_type ["audio"|"video"] Type of the associated
# media file
# @argument mode ["sync"|"async"] Determines whether the messages will be
# created/sent synchronously or asynchronously. Defaults to sync, and this
# option is ignored if this is a group conversation or there is just one
# recipient (i.e. it must be a bulk private message). When sent async, the
# response will be an empty array (batch status can be queried via the
# {api:ConversationsController#batches batches API})
# @argument scope [optional, "unread"|"starred"|"archived"]
# Used when generating "visible" in the API response. See the explanation
# under the {api:ConversationsController#index index API action}
# @argument filter [optional, course_id|group_id|user_id]
# Used when generating "visible" in the API response. See the explanation
# under the {api:ConversationsController#index index API action}
def create
return render_error('recipients', 'blank') if params[:recipients].blank?
return render_error('recipients', 'invalid') if @recipients.blank?
return render_error('body', 'blank') if params[:body].blank?
batch_private_messages = !value_to_boolean(params[:group_conversation]) && @recipients.size > 1
recipient_ids = @recipients.keys
message = build_message
if batch_private_messages
mode = params[:mode] == 'async' ? :async : :sync
batch = ConversationBatch.generate(message, recipient_ids, mode, :user_map => @recipients, :tags => @tags)
if mode == :async
headers['X-Conversation-Batch-Id'] = batch.id.to_s
return render :json => [], :status => :accepted
end
# reload and preload stuff
conversations = ConversationParticipant.find(:all, :conditions => {:id => batch.conversations.map(&:id)}, :include => [:conversation], :order => "visible_last_authored_at DESC, last_message_at DESC, id DESC")
Conversation.preload_participants(conversations.map(&:conversation))
ConversationParticipant.preload_latest_messages(conversations, @current_user.id)
visibility_map = infer_visibility(*conversations)
render :json => conversations.map{ |c| conversation_json(c, @current_user, session, :include_participant_avatars => false, :include_participant_contexts => false, :visible => visibility_map[c.conversation_id]) }, :status => :created
else
@conversation = @current_user.initiate_conversation(recipient_ids)
@conversation.add_message(message, :tags => @tags)
render :json => [conversation_json(@conversation.reload, @current_user, session, :include_indirect_participants => true, :messages => [message])], :status => :created
end
end
# @API
# Returns any currently running conversation batches for the current user.
# Conversation batches are created when a bulk private message is sent
# asynchronously (see the mode argument to the {api:ConversationsController#create create API action}).
#
# @example_response
# [
# {
# "id": 1,
# "workflow_state": "created",
# "completion": 0.1234,
# "tags": [],
# "message":
# {
# "id": 1,
# "created_at": "2011-09-02T10:00:00Z",
# "body": "quick reminder, no class tomorrow",
# "author_id": 1,
# "generated": false,
# "media_comment": null,
# "forwarded_messages": [],
# "attachments": []
# }
# }
# ]
def batches
batches = Api.paginate(@current_user.conversation_batches.in_progress,
self,
api_v1_conversations_batches_url,
:order => :id)
render :json => batches.map{ |m| conversation_batch_json(m, @current_user, session) }
end
# @API Get a single conversation
# Returns information for a single conversation. Response includes all
# fields that are present in the list/index action, as well as messages,
# submissions, and extended participant information.
#
# @argument interleave_submissions Boolean, default false. If true,
# submission data will be returned as first class messages interleaved
# with other messages. The submission details (comments, assignment, etc.)
# will be stored as the submission property on the message. Note that if
# set, the message_count will also include these messages in the total.
# @argument scope [optional, "unread"|"starred"|"archived"]
# Used when generating "visible" in the API response. See the explanation
# under the {api:ConversationsController#index index API action}
# @argument filter [optional, course_id|group_id|user_id]
# Used when generating "visible" in the API response. See the explanation
# under the {api:ConversationsController#index index API action}
# @argument auto_mark_as_read Boolean, default true. If true, unread
# conversations will be automatically marked as read. This will default
# to false in a future API release, so clients should explicitly send
# true if that is the desired behavior.
#
# @response_field participants Array of relevant users. Includes current
# user. If there are forwarded messages in this conversation, the authors
# of those messages will also be included, even if they are not
# participating in this conversation. Fields include:
# @response_field messages Array of messages, newest first. Fields include:
# id:: The unique identifier for the message
# created_at:: The timestamp of the message
# body:: The actual message body
# author_id:: The id of the user who sent the message (see audience, participants)
# generated:: If true, indicates this is a system-generated message (e.g. "Bob added Alice to the conversation")
# media_comment:: Audio/video comment data for this message (if applicable). Fields include: display_name, content-type, media_id, media_type, url
# forwarded_messages:: If this message contains forwarded messages, they will be included here (same format as this list). Note that those messages may have forwarded messages of their own, etc.
# attachments:: Array of attachments for this message. Fields include: display_name, content-type, filename, url
# @response_field submissions Array of assignment submissions having
# comments relevant to this conversation. These should be interleaved with
# the messages when displaying to the user. See the {api:SubmissionsApiController#index Submissions API documentation}
# for details on the fields included. This response includes
# the submission_comments and assignment associations.
#
# @example_response
# {
# "id": 2,
# "workflow_state": "unread",
# "last_message": "sure thing, here's the file",
# "last_message_at": "2011-09-02T12:00:00-06:00",
# "message_count": 2,
# "subscribed": true,
# "private": true,
# "starred": false,
# "properties": ["attachments"],
# "audience": [2],
# "audience_contexts": {"courses": {"1": []}, "groups": {}},
# "avatar_url": "https://canvas.instructure.com/images/messages/avatar-50.png",
# "participants": [{"id": 1, "name": "Joe TA"}, {"id": 2, "name": "Jane Teacher"}, {"id": 3, "name": "Bob Student"}],
# "messages":
# [
# {
# "id": 3,
# "created_at": "2011-09-02T12:00:00Z",
# "body": "sure thing, here's the file",
# "author_id": 2,
# "generated": false,
# "media_comment": null,
# "forwarded_messages": [],
# "attachments": [{"id": 1, "display_name": "notes.doc", "uuid": "abcdefabcdefabcdefabcdefabcdef"}]
# },
# {
# "id": 2,
# "created_at": "2011-09-02T11:00:00Z",
# "body": "hey, bob didn't get the notes. do you have a copy i can give him?",
# "author_id": 2,
# "generated": false,
# "media_comment": null,
# "forwarded_messages":
# [
# {
# "id": 1,
# "created_at": "2011-09-02T10:00:00Z",
# "body": "can i get a copy of the notes? i was out",
# "author_id": 3,
# "generated": false,
# "media_comment": null,
# "forwarded_messages": [],
# "attachments": []
# }
# ],
# "attachments": []
# }
# ],
# "submissions": []
# }
def show
unless request.xhr? || params[:format] == 'json'
scope = if @conversation.archived?
'archived'
elsif @conversation.visible_last_authored_at && !@conversation.last_message_at
'sent'
else
'default'
end
return redirect_to conversations_path(:scope => scope, :id => @conversation.conversation_id, :message => params[:message])
end
@conversation.update_attribute(:workflow_state, "read") if @conversation.unread? && auto_mark_as_read?
ActiveRecord::Base::ConnectionSpecification.with_environment(:slave) do
messages = @conversation.messages
ConversationMessage.send(:preload_associations, messages, :asset)
submissions = messages.map(&:submission).compact
Submission.send(:preload_associations, submissions, [:assignment, :submission_comments])
if interleave_submissions
submissions = nil
else
messages = messages.select{ |message| message.submission.nil? }
end
render :json => conversation_json(@conversation,
@current_user,
session,
:include_indirect_participants => true,
:messages => messages,
:submissions => submissions)
end
end
# @API Edit a conversation
# Updates attributes for a single conversation.
#
# @argument conversation[workflow_state] ["read"|"unread"|"archived"] Change the state of this conversation
# @argument conversation[subscribed] [true|false] Toggle the current user's subscription to the conversation (only valid for group conversations). If unsubscribed, the user will still have access to the latest messages, but the conversation won't be automatically flagged as unread, nor will it jump to the top of the inbox.
# @argument conversation[starred] [true|false] Toggle the starred state of the current user's view of the conversation.
# @argument scope [optional, "unread"|"starred"|"archived"]
# Used when generating "visible" in the API response. See the explanation
# under the {api:ConversationsController#index index API action}
# @argument filter [optional, course_id|group_id|user_id]
# Used when generating "visible" in the API response. See the explanation
# under the {api:ConversationsController#index index API action}
#
# @example_response
# {
# "id": 2,
# "workflow_state": "read",
# "last_message": "sure thing, here's the file",
# "last_message_at": "2011-09-02T12:00:00-06:00",
# "message_count": 2,
# "subscribed": true,
# "private": true,
# "starred": false,
# "properties": ["attachments"],
# "audience": [2],
# "audience_contexts": {"courses": {"1": []}, "groups": {}},
# "avatar_url": "https://canvas.instructure.com/images/messages/avatar-50.png",
# "participants": [{"id": 1, "name": "Joe TA"}]
# }
def update
if @conversation.update_attributes(params[:conversation])
render :json => conversation_json(@conversation, @current_user, session)
else
render :json => @conversation.errors, :status => :bad_request
end
end
# @API Mark all as read
# Mark all conversations as read.
def mark_all_as_read
@current_user.mark_all_conversations_as_read!
render :json => {}
end
# @API Delete a conversation
# Delete this conversation and its messages. Note that this only deletes
# this user's view of the conversation.
#
# Response includes same fields as UPDATE action
#
# @example_response
# {
# "id": 2,
# "workflow_state": "read",
# "last_message": null,
# "last_message_at": null,
# "message_count": 0,
# "subscribed": true,
# "private": true,
# "starred": false,
# "properties": []
# }
def destroy
@conversation.remove_messages(:all)
render :json => conversation_json(@conversation, @current_user, session, :visible => false)
end
# @API Add recipients
# Add recipients to an existing group conversation. Response is similar to
# the GET/show action, except that omits submissions and only includes the
# latest message (e.g. "joe was added to the conversation by bob")
#
# @argument recipients[] An array of recipient ids. These may be user ids
# or course/group ids prefixed with "course_" or "group_" respectively,
# e.g. recipients[]=1&recipients[]=2&recipients[]=course_3
#
# @example_response
# {
# "id": 2,
# "workflow_state": "read",
# "last_message": "let's talk this over with jim",
# "last_message_at": "2011-09-02T12:00:00-06:00",
# "message_count": 2,
# "subscribed": true,
# "private": false,
# "starred": null,
# "properties": [],
# "audience": [2, 3, 4],
# "audience_contexts": {"courses": {"1": []}, "groups": {}},
# "avatar_url": "https://canvas.instructure.com/images/messages/avatar-group-50.png",
# "participants": [{"id": 1, "name": "Joe TA"}, {"id": 2, "name": "Jane Teacher"}, {"id": 3, "name": "Bob Student"}, {"id": 4, "name": "Jim Admin"}],
# "messages":
# [
# {
# "id": 4,
# "created_at": "2011-09-02T12:10:00Z",
# "body": "Jim was added to the conversation by Joe TA",
# "author_id": 1,
# "generated": true,
# "media_comment": null,
# "forwarded_messages": [],
# "attachments": []
# }
# ]
# }
#
def add_recipients
if @recipients.present?
@conversation.add_participants(@recipients.keys, :tags => @tags, :root_account_id => @domain_root_account.id)
render :json => conversation_json(@conversation.reload, @current_user, session, :messages => [@conversation.messages.first])
else
render :json => {}, :status => :bad_request
end
end
# @API Add a message
# Add a message to an existing conversation. Response is similar to the
# GET/show action, except that omits submissions and only includes the
# latest message (i.e. what we just sent)
#
# @argument body The message to be sent
# @argument attachment_ids[] An array of attachments ids. These must be
# files that have been previously uploaded to the sender's "conversation
# attachments" folder.
# @argument media_comment_id Media comment id of an audio of video file to
# be associated with this message.
# @argument media_comment_type ["audio"|"video"] Type of the associated
# media file
#
# @example_response
# {
# "id": 2,
# "workflow_state": "unread",
# "last_message": "let's talk this over with jim",
# "last_message_at": "2011-09-02T12:00:00-06:00",
# "message_count": 2,
# "subscribed": true,
# "private": false,
# "starred": null,
# "properties": [],
# "audience": [2, 3],
# "audience_contexts": {"courses": {"1": []}, "groups": {}},
# "avatar_url": "https://canvas.instructure.com/images/messages/avatar-group-50.png",
# "participants": [{"id": 1, "name": "Joe TA"}, {"id": 2, "name": "Jane Teacher"}, {"id": 3, "name": "Bob Student"}],
# "messages":
# [
# {
# "id": 3,
# "created_at": "2011-09-02T12:00:00Z",
# "body": "let's talk this over with jim",
# "author_id": 2,
# "generated": false,
# "media_comment": null,
# "forwarded_messages": [],
# "attachments": []
# }
# ]
# }
#
def add_message
get_conversation(true)
if params[:body].present?
message = build_message
@conversation.add_message message, :tags => @tags
render :json => conversation_json(@conversation.reload, @current_user, session, :messages => [message])
else
render :json => {}, :status => :bad_request
end
end
# @API Delete a message
# Delete messages from this conversation. Note that this only affects this user's view of the conversation.
# If all messages are deleted, the conversation will be as well (equivalent to DELETE)
#
# @argument remove Array of message ids to be deleted
#
# @example_response
# {
# "id": 2,
# "workflow_state": "read",
# "last_message": "sure thing, here's the file",
# "last_message_at": "2011-09-02T12:00:00-06:00",
# "message_count": 1,
# "subscribed": true,
# "private": true,
# "starred": null,
# "properties": ["attachments"]
# }
def remove_messages
if params[:remove]
to_delete = []
@conversation.messages.each do |message|
to_delete << message if params[:remove].include?(message.id.to_s)
end
@conversation.remove_messages(*to_delete)
render :json => conversation_json(@conversation, @current_user, session)
end
end
# @API Find recipients
#
# Deprecated, see the {api:SearchController#recipients Find recipients endpoint} in the Search API
def find_recipients; end
def public_feed
return unless get_feed_context(:only => [:user])
@current_user = @context
load_all_contexts
feed = Atom::Feed.new do |f|
f.title = t('titles.rss_feed', "Conversations Feed")
f.links << Atom::Link.new(:href => conversations_url, :rel => 'self')
f.updated = Time.now
f.id = conversations_url
end
@entries = []
@conversation_contexts = {}
@current_user.conversations.each do |conversation|
@entries.concat(conversation.messages.human)
if @conversation_contexts[conversation.conversation.id].blank?
@conversation_contexts[conversation.conversation.id] = feed_context_content(conversation)
end
end
@entries = @entries.sort_by{|e| e.created_at}.reverse
@entries.each do |entry|
feed.entries << entry.to_atom(:additional_content => @conversation_contexts[entry.conversation.id])
end
respond_to do |format|
format.atom { render :text => feed.to_xml }
end
end
def feed_context_content(conversation)
content = ""
audience = conversation.other_participants
audience_names = audience.map(&:name)
audience_contexts = contexts_for(audience, conversation.context_tags) # will be 0, 1, or 2 contexts
audience_context_names = [:courses, :groups].inject([]) { |ary, context_key|
ary + audience_contexts[context_key].keys.map { |k| @contexts[context_key][k] && @contexts[context_key][k][:name] }
}.reject(&:blank?)
content += "<hr />"
content += "<div>#{t('conversation_context', "From a conversation with")} "
participant_list_cutoff = 2
if audience_names.length <= participant_list_cutoff
content += "#{ERB::Util.h(audience_names.to_sentence)}"
else
others_string = t('other_recipients', {
:one => "and 1 other",
:other => "and %{count} others"
},
:count => audience_names.length - participant_list_cutoff)
content += "#{ERB::Util.h(audience_names[0...participant_list_cutoff].join(", "))} #{others_string}"
end
if !audience_context_names.empty?
content += " (#{ERB::Util.h(audience_context_names.to_sentence)})"
end
content += "</div>"
content
end
def watched_intro
unless @current_user.watched_conversations_intro?
@current_user.watched_conversations_intro
@current_user.save
end
render :json => {}
end
private
def render_error(attribute, message)
render :json => [{
:attribute => attribute,
:message => message,
}],
:status => :bad_request
end
def infer_scope
@conversations_scope = case params[:scope]
when 'unread'
@current_user.conversations.unread
when 'starred'
@current_user.conversations.starred
when 'sent'
@current_user.all_conversations.sent
when 'archived'
@current_user.conversations.archived
else
params[:scope] = 'inbox'
@current_user.conversations.default
end
filters = param_array(:filter)
@conversations_scope = @conversations_scope.for_masquerading_user(@real_current_user) if @real_current_user
@conversations_scope = @conversations_scope.tagged(*filters) if filters.present?
@set_visibility = true
end
def infer_visibility(*conversations)
result = Hash.new(false)
visible_conversations = @conversations_scope.find(:all,
:select => "conversation_id",
:conditions => {:conversation_id => conversations.map(&:conversation_id)}
)
visible_conversations.each { |c| result[c.conversation_id] = true }
if conversations.size == 1
result[conversations.first.conversation_id]
else
result
end
end
def normalize_recipients
if params[:recipients]
recipient_ids = params[:recipients]
if recipient_ids.is_a?(String)
params[:recipients] = recipient_ids = recipient_ids.split(/,/)
end
recipients = @current_user.messageable_users(:ids => recipient_ids.grep(/\A\d+\z/), :conversation_id => params[:from_conversation_id])
recipient_ids.grep(User::MESSAGEABLE_USER_CONTEXT_REGEX).map do |context|
recipients.concat @current_user.messageable_users(:context => context)
end
@recipients = recipients.inject({}){ |hash, user|
hash[user.id] ||= user
hash
}
end
end
def infer_tags
tags = param_array(:tags).concat(param_array(:recipients))
tags = SimpleTags.normalize_tags(tags)
tags += tags.grep(/\Agroup_(\d+)\z/){ g = Group.find_by_id($1.to_i) and g.context.asset_string }.compact
@tags = tags.uniq
end
def get_conversation(allow_deleted = false)
scope = @current_user.all_conversations
scope = scope.scoped(:conditions => "message_count > 0") unless allow_deleted
@conversation = scope.find_by_conversation_id(params[:id] || params[:conversation_id] || 0)
raise ActiveRecord::RecordNotFound unless @conversation
end
def build_message
Conversation.build_message(
@current_user,
params[:body],
:attachment_ids => params[:attachment_ids],
:forwarded_message_ids => params[:forwarded_message_ids],
:root_account_id => @domain_root_account.id,
:media_comment => infer_media_comment,
:generate_user_note => params[:user_note]
)
end
def infer_media_comment
media_id = params[:media_comment_id]
media_type = params[:media_comment_type]
if media_id.present? && media_type.present?
media_comment = MediaObject.by_media_id(media_id).by_media_type(media_type).first
unless media_comment
media_comment ||= MediaObject.new
media_comment.media_type = media_type
media_comment.media_id = media_id
media_comment.root_account_id = @domain_root_account.id
media_comment.user = @current_user
end
media_comment.context = @current_user
media_comment.save
media_comment
end
end
# TODO API v2: default to true, like we do in the UI
def interleave_submissions
value_to_boolean(params[:interleave_submissions]) || !api_request?
end
def include_private_conversation_enrollments
value_to_boolean(params[:include_private_conversation_enrollments]) || api_request?
end
# TODO API v2: default to false, like we do in the UI
def auto_mark_as_read?
params[:auto_mark_as_read] ||= api_request?
value_to_boolean(params[:auto_mark_as_read])
end
# look up the param and cast it to an array. treat empty string same as empty
def param_array(key)
Array(params[key].presence || []).compact
end
end
don't render json in a slave block
it might go to a discussion topic json, which will implicitly mark
it as read
Change-Id: Id4d3a62902852d130fa04aea8e2fa2402f760fd7
Reviewed-on: https://gerrit.instructure.com/15975
Reviewed-by: Brian Palmer <3cb4e1df5ec4da2c7c4af7c52cec8cf340a55a10@instructure.com>
QA-Review: Brian Palmer <3cb4e1df5ec4da2c7c4af7c52cec8cf340a55a10@instructure.com>
Tested-by: Brian Palmer <3cb4e1df5ec4da2c7c4af7c52cec8cf340a55a10@instructure.com>
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
# @API Conversations
#
# API for creating, accessing and updating user conversations.
class ConversationsController < ApplicationController
include ConversationsHelper
include SearchHelper
include Api::V1::Conversation
before_filter :require_user, :except => [:public_feed]
before_filter :reject_student_view_student
before_filter :get_conversation, :only => [:show, :update, :destroy, :add_recipients, :remove_messages]
before_filter :infer_scope, :only => [:index, :show, :create, :update, :add_recipients, :add_message, :remove_messages]
before_filter :normalize_recipients, :only => [:create, :add_recipients]
before_filter :infer_tags, :only => [:create, :add_message, :add_recipients]
# whether it's a bulk private message, or a big group conversation,
# batch up all delayed jobs to make this more responsive to the user
batch_jobs_in_actions :only => :create
add_crumb(proc { I18n.t 'crumbs.messages', "Conversations" }) { |c| c.send :conversations_url }
# @API List conversations
# Returns the list of conversations for the current user, most recent ones first.
#
# @argument scope [optional, "unread"|"starred"|"archived"]
# When set, only return conversations of the specified type. For example,
# set to "unread" to return only conversations that haven't been read.
# The default behavior is to return all non-archived conversations (i.e.
# read and unread).
#
# @argument filter [optional, course_id|group_id|user_id]
# When set, only return conversations for the specified course, group
# or user. The id should be prefixed with its type, e.g. "user_123" or
# "course_456"
#
# @argument interleave_submissions Boolean, default false. If true, the
# message_count will also include these submission-based messages in the
# total. See the show action for more information.
#
# @argument include_all_conversation_ids Boolean, default false. If true,
# the top-level element of the response will be an object rather than
# an array, and will have the keys "conversations" which will contain the
# paged conversation data, and "conversation_ids" which will contain the
# ids of all conversations under this scope/filter in the same order.
#
# @response_field id The unique identifier for the conversation.
# @response_field workflow_state The current state of the conversation
# (read, unread or archived)
# @response_field last_message A <=100 character preview from the most
# recent message
# @response_field last_message_at The timestamp of the latest message
# @response_field message_count The number of messages in this conversation
# @response_field subscribed Indicates whether the user is actively
# subscribed to the conversation
# @response_field private Indicates whether this is a private conversation
# (i.e. audience of one)
# @response_field starred Whether the conversation is starred
# @response_field properties Additional conversation flags (last_author,
# attachments, media_objects). Each listed property means the flag is
# set to true (i.e. the current user is the most recent author, there
# are attachments, or there are media objects)
# @response_field audience Array of user ids who are involved in the
# conversation, ordered by participation level, then alphabetical.
# Excludes current user, unless this is a monologue.
# @response_field audience_contexts Most relevant shared contexts (courses
# and groups) between current user and other participants. If there is
# only one participant, it will also include that user's enrollment(s)/
# membership type(s) in each course/group
# @response_field avatar_url URL to appropriate icon for this conversation
# (custom, individual or group avatar, depending on audience)
# @response_field participants Array of users (id, name) participating in
# the conversation. Includes current user.
# @response_field visible Boolean, indicates whether the conversation is
# visible under the current scope and filter. This attribute is always
# true in the index API response, and is primarily useful in create/update
# responses so that you can know if the record should be displayed in
# the UI. The default scope is assumed, unless a scope or filter is passed
# to the create/update API call.
#
# @example_response
# [
# {
# "id": 2,
# "workflow_state": "unread",
# "last_message": "sure thing, here's the file",
# "last_message_at": "2011-09-02T12:00:00Z",
# "message_count": 2,
# "subscribed": true,
# "private": true,
# "starred": false,
# "properties": ["attachments"],
# "audience": [2],
# "audience_contexts": {"courses": {"1": ["StudentEnrollment"]}, "groups": {}},
# "avatar_url": "https://canvas.instructure.com/images/messages/avatar-group-50.png",
# "participants": [{"id": 1, "name": "Joe TA"}, {"id": 2, "name": "Jane Teacher"}],
# "visible": true
# }
# ]
def index
if request.format == :json
conversations = Api.paginate(@conversations_scope, self, api_v1_conversations_url)
# optimize loading the most recent messages for each conversation into a single query
ConversationParticipant.preload_latest_messages(conversations, @current_user.id)
@conversations_json = conversations.map{ |c| conversation_json(c, @current_user, session, :include_participant_avatars => false, :include_participant_contexts => false, :visible => true) }
if params[:include_all_conversation_ids]
@conversations_json = {:conversations => @conversations_json, :conversation_ids => @conversations_scope.conversation_ids}
end
render :json => @conversations_json
else
if @current_user.shard != Shard.current
flash[:notice] = 'Conversations are not yet cross-shard enabled'
return redirect_to dashboard_url
end
return redirect_to conversations_path(:scope => params[:redirect_scope]) if params[:redirect_scope]
load_all_contexts :permissions => [:manage_user_notes]
notes_enabled = @current_user.associated_accounts.any?{|a| a.enable_user_notes }
can_add_notes_for_account = notes_enabled && @current_user.associated_accounts.any?{|a| a.grants_right?(@current_user, nil, :manage_students) }
js_env(:CONVERSATIONS => {
:USER => conversation_users_json([@current_user], @current_user, session, :include_participant_contexts => false).first,
:CONTEXTS => @contexts,
:NOTES_ENABLED => notes_enabled,
:CAN_ADD_NOTES_FOR_ACCOUNT => can_add_notes_for_account,
:SHOW_INTRO => !@current_user.watched_conversations_intro?,
:FOLDER_ID => @current_user.conversation_attachments_folder.id,
:MEDIA_COMMENTS_ENABLED => feature_enabled?(:kaltura),
})
end
end
# @API Create a conversation
# Create a new conversation with one or more recipients. If there is already
# an existing private conversation with the given recipients, it will be
# reused.
#
# @argument recipients[] An array of recipient ids. These may be user ids
# or course/group ids prefixed with "course_" or "group_" respectively,
# e.g. recipients[]=1&recipients[]=2&recipients[]=course_3
# @argument body The message to be sent
# @argument group_conversation [true|false] Ignored if there is just one
# recipient, defaults to false. If true, this will be a group conversation
# (i.e. all recipients will see all messages and replies). If false,
# individual private conversations will be started with each recipient.
# @argument attachment_ids[] An array of attachments ids. These must be
# files that have been previously uploaded to the sender's "conversation
# attachments" folder.
# @argument media_comment_id Media comment id of an audio of video file to
# be associated with this message.
# @argument media_comment_type ["audio"|"video"] Type of the associated
# media file
# @argument mode ["sync"|"async"] Determines whether the messages will be
# created/sent synchronously or asynchronously. Defaults to sync, and this
# option is ignored if this is a group conversation or there is just one
# recipient (i.e. it must be a bulk private message). When sent async, the
# response will be an empty array (batch status can be queried via the
# {api:ConversationsController#batches batches API})
# @argument scope [optional, "unread"|"starred"|"archived"]
# Used when generating "visible" in the API response. See the explanation
# under the {api:ConversationsController#index index API action}
# @argument filter [optional, course_id|group_id|user_id]
# Used when generating "visible" in the API response. See the explanation
# under the {api:ConversationsController#index index API action}
def create
return render_error('recipients', 'blank') if params[:recipients].blank?
return render_error('recipients', 'invalid') if @recipients.blank?
return render_error('body', 'blank') if params[:body].blank?
batch_private_messages = !value_to_boolean(params[:group_conversation]) && @recipients.size > 1
recipient_ids = @recipients.keys
message = build_message
if batch_private_messages
mode = params[:mode] == 'async' ? :async : :sync
batch = ConversationBatch.generate(message, recipient_ids, mode, :user_map => @recipients, :tags => @tags)
if mode == :async
headers['X-Conversation-Batch-Id'] = batch.id.to_s
return render :json => [], :status => :accepted
end
# reload and preload stuff
conversations = ConversationParticipant.find(:all, :conditions => {:id => batch.conversations.map(&:id)}, :include => [:conversation], :order => "visible_last_authored_at DESC, last_message_at DESC, id DESC")
Conversation.preload_participants(conversations.map(&:conversation))
ConversationParticipant.preload_latest_messages(conversations, @current_user.id)
visibility_map = infer_visibility(*conversations)
render :json => conversations.map{ |c| conversation_json(c, @current_user, session, :include_participant_avatars => false, :include_participant_contexts => false, :visible => visibility_map[c.conversation_id]) }, :status => :created
else
@conversation = @current_user.initiate_conversation(recipient_ids)
@conversation.add_message(message, :tags => @tags)
render :json => [conversation_json(@conversation.reload, @current_user, session, :include_indirect_participants => true, :messages => [message])], :status => :created
end
end
# @API
# Returns any currently running conversation batches for the current user.
# Conversation batches are created when a bulk private message is sent
# asynchronously (see the mode argument to the {api:ConversationsController#create create API action}).
#
# @example_response
# [
# {
# "id": 1,
# "workflow_state": "created",
# "completion": 0.1234,
# "tags": [],
# "message":
# {
# "id": 1,
# "created_at": "2011-09-02T10:00:00Z",
# "body": "quick reminder, no class tomorrow",
# "author_id": 1,
# "generated": false,
# "media_comment": null,
# "forwarded_messages": [],
# "attachments": []
# }
# }
# ]
def batches
batches = Api.paginate(@current_user.conversation_batches.in_progress,
self,
api_v1_conversations_batches_url,
:order => :id)
render :json => batches.map{ |m| conversation_batch_json(m, @current_user, session) }
end
# @API Get a single conversation
# Returns information for a single conversation. Response includes all
# fields that are present in the list/index action, as well as messages,
# submissions, and extended participant information.
#
# @argument interleave_submissions Boolean, default false. If true,
# submission data will be returned as first class messages interleaved
# with other messages. The submission details (comments, assignment, etc.)
# will be stored as the submission property on the message. Note that if
# set, the message_count will also include these messages in the total.
# @argument scope [optional, "unread"|"starred"|"archived"]
# Used when generating "visible" in the API response. See the explanation
# under the {api:ConversationsController#index index API action}
# @argument filter [optional, course_id|group_id|user_id]
# Used when generating "visible" in the API response. See the explanation
# under the {api:ConversationsController#index index API action}
# @argument auto_mark_as_read Boolean, default true. If true, unread
# conversations will be automatically marked as read. This will default
# to false in a future API release, so clients should explicitly send
# true if that is the desired behavior.
#
# @response_field participants Array of relevant users. Includes current
# user. If there are forwarded messages in this conversation, the authors
# of those messages will also be included, even if they are not
# participating in this conversation. Fields include:
# @response_field messages Array of messages, newest first. Fields include:
# id:: The unique identifier for the message
# created_at:: The timestamp of the message
# body:: The actual message body
# author_id:: The id of the user who sent the message (see audience, participants)
# generated:: If true, indicates this is a system-generated message (e.g. "Bob added Alice to the conversation")
# media_comment:: Audio/video comment data for this message (if applicable). Fields include: display_name, content-type, media_id, media_type, url
# forwarded_messages:: If this message contains forwarded messages, they will be included here (same format as this list). Note that those messages may have forwarded messages of their own, etc.
# attachments:: Array of attachments for this message. Fields include: display_name, content-type, filename, url
# @response_field submissions Array of assignment submissions having
# comments relevant to this conversation. These should be interleaved with
# the messages when displaying to the user. See the {api:SubmissionsApiController#index Submissions API documentation}
# for details on the fields included. This response includes
# the submission_comments and assignment associations.
#
# @example_response
# {
# "id": 2,
# "workflow_state": "unread",
# "last_message": "sure thing, here's the file",
# "last_message_at": "2011-09-02T12:00:00-06:00",
# "message_count": 2,
# "subscribed": true,
# "private": true,
# "starred": false,
# "properties": ["attachments"],
# "audience": [2],
# "audience_contexts": {"courses": {"1": []}, "groups": {}},
# "avatar_url": "https://canvas.instructure.com/images/messages/avatar-50.png",
# "participants": [{"id": 1, "name": "Joe TA"}, {"id": 2, "name": "Jane Teacher"}, {"id": 3, "name": "Bob Student"}],
# "messages":
# [
# {
# "id": 3,
# "created_at": "2011-09-02T12:00:00Z",
# "body": "sure thing, here's the file",
# "author_id": 2,
# "generated": false,
# "media_comment": null,
# "forwarded_messages": [],
# "attachments": [{"id": 1, "display_name": "notes.doc", "uuid": "abcdefabcdefabcdefabcdefabcdef"}]
# },
# {
# "id": 2,
# "created_at": "2011-09-02T11:00:00Z",
# "body": "hey, bob didn't get the notes. do you have a copy i can give him?",
# "author_id": 2,
# "generated": false,
# "media_comment": null,
# "forwarded_messages":
# [
# {
# "id": 1,
# "created_at": "2011-09-02T10:00:00Z",
# "body": "can i get a copy of the notes? i was out",
# "author_id": 3,
# "generated": false,
# "media_comment": null,
# "forwarded_messages": [],
# "attachments": []
# }
# ],
# "attachments": []
# }
# ],
# "submissions": []
# }
def show
unless request.xhr? || params[:format] == 'json'
scope = if @conversation.archived?
'archived'
elsif @conversation.visible_last_authored_at && !@conversation.last_message_at
'sent'
else
'default'
end
return redirect_to conversations_path(:scope => scope, :id => @conversation.conversation_id, :message => params[:message])
end
@conversation.update_attribute(:workflow_state, "read") if @conversation.unread? && auto_mark_as_read?
messages = submissions = nil
ActiveRecord::Base::ConnectionSpecification.with_environment(:slave) do
messages = @conversation.messages
ConversationMessage.send(:preload_associations, messages, :asset)
submissions = messages.map(&:submission).compact
Submission.send(:preload_associations, submissions, [:assignment, :submission_comments])
if interleave_submissions
submissions = nil
else
messages = messages.select{ |message| message.submission.nil? }
end
end
render :json => conversation_json(@conversation,
@current_user,
session,
:include_indirect_participants => true,
:messages => messages,
:submissions => submissions)
end
# @API Edit a conversation
# Updates attributes for a single conversation.
#
# @argument conversation[workflow_state] ["read"|"unread"|"archived"] Change the state of this conversation
# @argument conversation[subscribed] [true|false] Toggle the current user's subscription to the conversation (only valid for group conversations). If unsubscribed, the user will still have access to the latest messages, but the conversation won't be automatically flagged as unread, nor will it jump to the top of the inbox.
# @argument conversation[starred] [true|false] Toggle the starred state of the current user's view of the conversation.
# @argument scope [optional, "unread"|"starred"|"archived"]
# Used when generating "visible" in the API response. See the explanation
# under the {api:ConversationsController#index index API action}
# @argument filter [optional, course_id|group_id|user_id]
# Used when generating "visible" in the API response. See the explanation
# under the {api:ConversationsController#index index API action}
#
# @example_response
# {
# "id": 2,
# "workflow_state": "read",
# "last_message": "sure thing, here's the file",
# "last_message_at": "2011-09-02T12:00:00-06:00",
# "message_count": 2,
# "subscribed": true,
# "private": true,
# "starred": false,
# "properties": ["attachments"],
# "audience": [2],
# "audience_contexts": {"courses": {"1": []}, "groups": {}},
# "avatar_url": "https://canvas.instructure.com/images/messages/avatar-50.png",
# "participants": [{"id": 1, "name": "Joe TA"}]
# }
def update
if @conversation.update_attributes(params[:conversation])
render :json => conversation_json(@conversation, @current_user, session)
else
render :json => @conversation.errors, :status => :bad_request
end
end
# @API Mark all as read
# Mark all conversations as read.
def mark_all_as_read
@current_user.mark_all_conversations_as_read!
render :json => {}
end
# @API Delete a conversation
# Delete this conversation and its messages. Note that this only deletes
# this user's view of the conversation.
#
# Response includes same fields as UPDATE action
#
# @example_response
# {
# "id": 2,
# "workflow_state": "read",
# "last_message": null,
# "last_message_at": null,
# "message_count": 0,
# "subscribed": true,
# "private": true,
# "starred": false,
# "properties": []
# }
def destroy
@conversation.remove_messages(:all)
render :json => conversation_json(@conversation, @current_user, session, :visible => false)
end
# @API Add recipients
# Add recipients to an existing group conversation. Response is similar to
# the GET/show action, except that omits submissions and only includes the
# latest message (e.g. "joe was added to the conversation by bob")
#
# @argument recipients[] An array of recipient ids. These may be user ids
# or course/group ids prefixed with "course_" or "group_" respectively,
# e.g. recipients[]=1&recipients[]=2&recipients[]=course_3
#
# @example_response
# {
# "id": 2,
# "workflow_state": "read",
# "last_message": "let's talk this over with jim",
# "last_message_at": "2011-09-02T12:00:00-06:00",
# "message_count": 2,
# "subscribed": true,
# "private": false,
# "starred": null,
# "properties": [],
# "audience": [2, 3, 4],
# "audience_contexts": {"courses": {"1": []}, "groups": {}},
# "avatar_url": "https://canvas.instructure.com/images/messages/avatar-group-50.png",
# "participants": [{"id": 1, "name": "Joe TA"}, {"id": 2, "name": "Jane Teacher"}, {"id": 3, "name": "Bob Student"}, {"id": 4, "name": "Jim Admin"}],
# "messages":
# [
# {
# "id": 4,
# "created_at": "2011-09-02T12:10:00Z",
# "body": "Jim was added to the conversation by Joe TA",
# "author_id": 1,
# "generated": true,
# "media_comment": null,
# "forwarded_messages": [],
# "attachments": []
# }
# ]
# }
#
def add_recipients
if @recipients.present?
@conversation.add_participants(@recipients.keys, :tags => @tags, :root_account_id => @domain_root_account.id)
render :json => conversation_json(@conversation.reload, @current_user, session, :messages => [@conversation.messages.first])
else
render :json => {}, :status => :bad_request
end
end
# @API Add a message
# Add a message to an existing conversation. Response is similar to the
# GET/show action, except that omits submissions and only includes the
# latest message (i.e. what we just sent)
#
# @argument body The message to be sent
# @argument attachment_ids[] An array of attachments ids. These must be
# files that have been previously uploaded to the sender's "conversation
# attachments" folder.
# @argument media_comment_id Media comment id of an audio of video file to
# be associated with this message.
# @argument media_comment_type ["audio"|"video"] Type of the associated
# media file
#
# @example_response
# {
# "id": 2,
# "workflow_state": "unread",
# "last_message": "let's talk this over with jim",
# "last_message_at": "2011-09-02T12:00:00-06:00",
# "message_count": 2,
# "subscribed": true,
# "private": false,
# "starred": null,
# "properties": [],
# "audience": [2, 3],
# "audience_contexts": {"courses": {"1": []}, "groups": {}},
# "avatar_url": "https://canvas.instructure.com/images/messages/avatar-group-50.png",
# "participants": [{"id": 1, "name": "Joe TA"}, {"id": 2, "name": "Jane Teacher"}, {"id": 3, "name": "Bob Student"}],
# "messages":
# [
# {
# "id": 3,
# "created_at": "2011-09-02T12:00:00Z",
# "body": "let's talk this over with jim",
# "author_id": 2,
# "generated": false,
# "media_comment": null,
# "forwarded_messages": [],
# "attachments": []
# }
# ]
# }
#
def add_message
get_conversation(true)
if params[:body].present?
message = build_message
@conversation.add_message message, :tags => @tags
render :json => conversation_json(@conversation.reload, @current_user, session, :messages => [message])
else
render :json => {}, :status => :bad_request
end
end
# @API Delete a message
# Delete messages from this conversation. Note that this only affects this user's view of the conversation.
# If all messages are deleted, the conversation will be as well (equivalent to DELETE)
#
# @argument remove Array of message ids to be deleted
#
# @example_response
# {
# "id": 2,
# "workflow_state": "read",
# "last_message": "sure thing, here's the file",
# "last_message_at": "2011-09-02T12:00:00-06:00",
# "message_count": 1,
# "subscribed": true,
# "private": true,
# "starred": null,
# "properties": ["attachments"]
# }
def remove_messages
if params[:remove]
to_delete = []
@conversation.messages.each do |message|
to_delete << message if params[:remove].include?(message.id.to_s)
end
@conversation.remove_messages(*to_delete)
render :json => conversation_json(@conversation, @current_user, session)
end
end
# @API Find recipients
#
# Deprecated, see the {api:SearchController#recipients Find recipients endpoint} in the Search API
def find_recipients; end
def public_feed
return unless get_feed_context(:only => [:user])
@current_user = @context
load_all_contexts
feed = Atom::Feed.new do |f|
f.title = t('titles.rss_feed', "Conversations Feed")
f.links << Atom::Link.new(:href => conversations_url, :rel => 'self')
f.updated = Time.now
f.id = conversations_url
end
@entries = []
@conversation_contexts = {}
@current_user.conversations.each do |conversation|
@entries.concat(conversation.messages.human)
if @conversation_contexts[conversation.conversation.id].blank?
@conversation_contexts[conversation.conversation.id] = feed_context_content(conversation)
end
end
@entries = @entries.sort_by{|e| e.created_at}.reverse
@entries.each do |entry|
feed.entries << entry.to_atom(:additional_content => @conversation_contexts[entry.conversation.id])
end
respond_to do |format|
format.atom { render :text => feed.to_xml }
end
end
def feed_context_content(conversation)
content = ""
audience = conversation.other_participants
audience_names = audience.map(&:name)
audience_contexts = contexts_for(audience, conversation.context_tags) # will be 0, 1, or 2 contexts
audience_context_names = [:courses, :groups].inject([]) { |ary, context_key|
ary + audience_contexts[context_key].keys.map { |k| @contexts[context_key][k] && @contexts[context_key][k][:name] }
}.reject(&:blank?)
content += "<hr />"
content += "<div>#{t('conversation_context', "From a conversation with")} "
participant_list_cutoff = 2
if audience_names.length <= participant_list_cutoff
content += "#{ERB::Util.h(audience_names.to_sentence)}"
else
others_string = t('other_recipients', {
:one => "and 1 other",
:other => "and %{count} others"
},
:count => audience_names.length - participant_list_cutoff)
content += "#{ERB::Util.h(audience_names[0...participant_list_cutoff].join(", "))} #{others_string}"
end
if !audience_context_names.empty?
content += " (#{ERB::Util.h(audience_context_names.to_sentence)})"
end
content += "</div>"
content
end
def watched_intro
unless @current_user.watched_conversations_intro?
@current_user.watched_conversations_intro
@current_user.save
end
render :json => {}
end
private
def render_error(attribute, message)
render :json => [{
:attribute => attribute,
:message => message,
}],
:status => :bad_request
end
def infer_scope
@conversations_scope = case params[:scope]
when 'unread'
@current_user.conversations.unread
when 'starred'
@current_user.conversations.starred
when 'sent'
@current_user.all_conversations.sent
when 'archived'
@current_user.conversations.archived
else
params[:scope] = 'inbox'
@current_user.conversations.default
end
filters = param_array(:filter)
@conversations_scope = @conversations_scope.for_masquerading_user(@real_current_user) if @real_current_user
@conversations_scope = @conversations_scope.tagged(*filters) if filters.present?
@set_visibility = true
end
def infer_visibility(*conversations)
result = Hash.new(false)
visible_conversations = @conversations_scope.find(:all,
:select => "conversation_id",
:conditions => {:conversation_id => conversations.map(&:conversation_id)}
)
visible_conversations.each { |c| result[c.conversation_id] = true }
if conversations.size == 1
result[conversations.first.conversation_id]
else
result
end
end
def normalize_recipients
if params[:recipients]
recipient_ids = params[:recipients]
if recipient_ids.is_a?(String)
params[:recipients] = recipient_ids = recipient_ids.split(/,/)
end
recipients = @current_user.messageable_users(:ids => recipient_ids.grep(/\A\d+\z/), :conversation_id => params[:from_conversation_id])
recipient_ids.grep(User::MESSAGEABLE_USER_CONTEXT_REGEX).map do |context|
recipients.concat @current_user.messageable_users(:context => context)
end
@recipients = recipients.inject({}){ |hash, user|
hash[user.id] ||= user
hash
}
end
end
def infer_tags
tags = param_array(:tags).concat(param_array(:recipients))
tags = SimpleTags.normalize_tags(tags)
tags += tags.grep(/\Agroup_(\d+)\z/){ g = Group.find_by_id($1.to_i) and g.context.asset_string }.compact
@tags = tags.uniq
end
def get_conversation(allow_deleted = false)
scope = @current_user.all_conversations
scope = scope.scoped(:conditions => "message_count > 0") unless allow_deleted
@conversation = scope.find_by_conversation_id(params[:id] || params[:conversation_id] || 0)
raise ActiveRecord::RecordNotFound unless @conversation
end
def build_message
Conversation.build_message(
@current_user,
params[:body],
:attachment_ids => params[:attachment_ids],
:forwarded_message_ids => params[:forwarded_message_ids],
:root_account_id => @domain_root_account.id,
:media_comment => infer_media_comment,
:generate_user_note => params[:user_note]
)
end
def infer_media_comment
media_id = params[:media_comment_id]
media_type = params[:media_comment_type]
if media_id.present? && media_type.present?
media_comment = MediaObject.by_media_id(media_id).by_media_type(media_type).first
unless media_comment
media_comment ||= MediaObject.new
media_comment.media_type = media_type
media_comment.media_id = media_id
media_comment.root_account_id = @domain_root_account.id
media_comment.user = @current_user
end
media_comment.context = @current_user
media_comment.save
media_comment
end
end
# TODO API v2: default to true, like we do in the UI
def interleave_submissions
value_to_boolean(params[:interleave_submissions]) || !api_request?
end
def include_private_conversation_enrollments
value_to_boolean(params[:include_private_conversation_enrollments]) || api_request?
end
# TODO API v2: default to false, like we do in the UI
def auto_mark_as_read?
params[:auto_mark_as_read] ||= api_request?
value_to_boolean(params[:auto_mark_as_read])
end
# look up the param and cast it to an array. treat empty string same as empty
def param_array(key)
Array(params[key].presence || []).compact
end
end
|
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "active_resource/railtie"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module EmberRailsExample
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
end
end
Stop using ActiveResource. See http://stackoverflow.com/questions/16782198/cannot-load-railtie-after-upgrade-to-rails-4-per-ruby-railstutorial-org#answer-17276640
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
#require "active_resource/railtie"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module EmberRailsExample
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
end
end
|
#
# Copyright (C) 2013 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
# Put this in config/application.rb
require File.expand_path('../boot', __FILE__)
require_relative '../lib/canvas_yaml'
# Yes, it doesn't seem DRY to list these both in the if and else
# but this used to be "require 'rails/all'" which included sprockets.
# I needed to explicitly opt-out of sprockets but since I'm not sure
# about the other frameworks, I left this so it would be exactly the same
# as "require 'rails/all'" but without sprockets--even though it is a little
# different then the rails 3 else block. If the difference is not intended,
# they can be pulled out of the if/else
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
# require "sprockets/railtie" # Do not enable the Rails Asset Pipeline
require "rails/test_unit/railtie"
Bundler.require(*Rails.groups)
module CanvasRails
class Application < Rails::Application
$LOAD_PATH << config.root.to_s
config.encoding = 'utf-8'
require 'logging_filter'
config.filter_parameters.concat LoggingFilter.filtered_parameters
config.action_dispatch.rescue_responses['AuthenticationMethods::AccessTokenError'] = 401
config.action_dispatch.rescue_responses['AuthenticationMethods::AccessTokenScopeError'] = 401
config.action_dispatch.rescue_responses['AuthenticationMethods::LoggedOutError'] = 401
config.action_dispatch.rescue_responses['CanvasHttp::CircuitBreakerError'] = 502
config.action_dispatch.default_headers.delete('X-Frame-Options')
config.action_dispatch.default_headers['Referrer-Policy'] = 'no-referrer-when-downgrade'
config.action_controller.forgery_protection_origin_check = true
ActiveSupport.to_time_preserves_timezone = true
config.app_generators do |c|
c.test_framework :rspec
c.integration_tool :rspec
c.performance_tool :rspec
end
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Make Time.zone default to the specified zone, and make Active Record store time values
# in the database in UTC, and return them converted to the specified local zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Comment line to use default local time.
config.time_zone = 'UTC'
log_config = File.exist?(Rails.root + 'config/logging.yml') && Rails.application.config_for(:logging).with_indifferent_access
log_config = { 'logger' => 'rails', 'log_level' => 'debug' }.merge(log_config || {})
opts = {}
require 'canvas_logger'
config.log_level = log_config['log_level']
log_level = ActiveSupport::Logger.const_get(config.log_level.to_s.upcase)
opts[:skip_thread_context] = true if log_config['log_context'] == false
case log_config["logger"]
when "syslog"
require 'syslog_wrapper'
log_config["app_ident"] ||= "canvas-lms"
log_config["daemon_ident"] ||= "canvas-lms-daemon"
facilities = 0
(log_config["facilities"] || []).each do |facility|
facilities |= Syslog.const_get "LOG_#{facility.to_s.upcase}"
end
ident = ENV['RUNNING_AS_DAEMON'] == 'true' ? log_config["daemon_ident"] : log_config["app_ident"]
opts[:include_pid] = true if log_config["include_pid"] == true
config.logger = SyslogWrapper.new(ident, facilities, opts)
config.logger.level = log_level
else
log_path = config.paths['log'].first
if ENV['RUNNING_AS_DAEMON'] == 'true'
log_path = Rails.root+'log/delayed_job.log'
end
config.logger = CanvasLogger.new(log_path, log_level, opts)
end
# Activate observers that should always be running
config.active_record.observers = [:cacher, :stream_item_cache, :live_events_observer ]
config.active_record.allow_unsafe_raw_sql = :disabled
config.active_support.encode_big_decimal_as_string = false
config.paths['lib'].eager_load!
config.paths.add('app/middleware', eager_load: true, autoload_once: true)
# prevent directory->module inference in these directories from wreaking
# havoc on the app (e.g. stylesheets/base -> ::Base)
config.eager_load_paths -= %W(#{Rails.root}/app/coffeescripts
#{Rails.root}/app/stylesheets)
config.middleware.use Rack::Chunked
config.middleware.use Rack::Deflater, if: -> (*) {
::Canvas::DynamicSettings.find(tree: :private)["enable_rack_deflation"]
}
config.middleware.use Rack::Brotli, if: -> (*) {
::Canvas::DynamicSettings.find(tree: :private)["enable_rack_brotli"]
}
config.i18n.load_path << Rails.root.join('config', 'locales', 'locales.yml')
config.to_prepare do
require_dependency 'canvas/plugins/default_plugins'
ActiveSupport::JSON::Encoding.escape_html_entities_in_json = true
end
module PostgreSQLEarlyExtensions
module ConnectionHandling
def postgresql_connection(config)
conn_params = config.symbolize_keys
hosts = Array(conn_params[:host]).presence || [nil]
hosts.each_with_index do |host, index|
begin
conn_params[:host] = host
return super(conn_params)
rescue ::PG::Error
raise if index == hosts.length - 1
# else try next host
end
end
end
end
def initialize(connection, logger, connection_parameters, config)
unless config.key?(:prepared_statements)
config = config.dup
config[:prepared_statements] = false
end
super(connection, logger, connection_parameters, config)
end
def connect
hosts = Array(@connection_parameters[:host]).presence || [nil]
hosts.each_with_index do |host, index|
begin
connection_parameters = @connection_parameters.dup
connection_parameters[:host] = host
@connection = PG::Connection.connect(connection_parameters)
configure_connection
raise "Canvas requires PostgreSQL 9.5 or newer" unless postgresql_version >= 90500
break
rescue ::PG::Error => error
if error.message.include?("does not exist")
raise ActiveRecord::NoDatabaseError.new(error.message)
elsif index == hosts.length - 1
raise
end
# else try next host
end
end
end
end
module TypeMapInitializerExtensions
def query_conditions_for_initial_load
known_type_names = @store.keys.map { |n| "'#{n}'" } + @store.keys.map { |n| "'_#{n}'" }
<<~SQL % [known_type_names.join(", "),]
WHERE
t.typname IN (%s)
SQL
end
end
Autoextend.hook(:"ActiveRecord::Base",
PostgreSQLEarlyExtensions::ConnectionHandling,
singleton: true)
Autoextend.hook(:"ActiveRecord::ConnectionAdapters::PostgreSQLAdapter",
PostgreSQLEarlyExtensions,
method: :prepend)
Autoextend.hook(:"ActiveRecord::ConnectionAdapters::PostgreSQL::OID::TypeMapInitializer",
TypeMapInitializerExtensions,
method: :prepend)
SafeYAML.singleton_class.send(:attr_accessor, :safe_parsing)
module SafeYAMLWithFlag
def load(*args)
previous, self.safe_parsing = safe_parsing, true
super
ensure
self.safe_parsing = previous
end
end
SafeYAML.singleton_class.prepend(SafeYAMLWithFlag)
Psych.add_domain_type("ruby/object", "Class") do |_type, val|
if SafeYAML.safe_parsing && !Canvas::Migration.valid_converter_classes.include?(val)
raise "Cannot load class #{val} from YAML"
end
val.constantize
end
module PatchThorWarning
# active_model_serializers should be passing `type: :boolean` here:
# https://github.com/rails-api/active_model_serializers/blob/v0.9.0.alpha1/lib/active_model/serializer/generators/serializer/scaffold_controller_generator.rb#L10
# but we don't really care about the warning, it only affects using the rails
# generator for a resource
#
# Easiest way to avoid the warning for now is to patch thor
def validate_default_type!
return if switch_name == "--serializer"
super
end
end
Autoextend.hook(:"Thor::Option", PatchThorWarning, method: :prepend)
# Extend any base classes, even gem classes
Dir.glob("#{Rails.root}/lib/ext/**/*.rb").each { |file| require file }
# tell Rails to use the native XML parser instead of REXML
ActiveSupport::XmlMini.backend = 'Nokogiri'
class NotImplemented < StandardError; end
if defined?(PhusionPassenger)
PhusionPassenger.on_event(:after_installing_signal_handlers) do
Canvas::Reloader.trap_signal
end
PhusionPassenger.on_event(:starting_worker_process) do |forked|
if forked
# We're in smart spawning mode.
# Reset imperium because it's possible to accidentally share an open http
# socket between processes shortly after fork.
Imperium::Agent.reset_default_client
Imperium::Catalog.reset_default_client
Imperium::Client.reset_default_client
Imperium::Events.reset_default_client
Imperium::KV.reset_default_client
# it's really important to reset the default clients
# BEFORE letting dynamic setting pull a new one.
# do not change this order.
Canvas::DynamicSettings.on_fork!
else
# We're in direct spawning mode. We don't need to do anything.
end
end
else
config.to_prepare do
Canvas::Reloader.trap_signal
end
end
# Ensure that the automatic redis reconnection on fork works
# This is the default in redis-rb, but for some reason rails overrides it
# See e.g. https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22704
ActiveSupport::Cache::RedisCacheStore::DEFAULT_REDIS_OPTIONS[:reconnect_attempts] = 1
# don't wrap fields with errors with a <div class="fieldWithErrors" />,
# since that could leak information (e.g. valid vs invalid username on
# login page)
config.action_view.field_error_proc = Proc.new { |html_tag, instance| html_tag }
class ExceptionsApp
def call(env)
req = ActionDispatch::Request.new(env)
res = ApplicationController.make_response!(req)
ApplicationController.dispatch('rescue_action_dispatch_exception', req, res)
end
end
config.exceptions_app = ExceptionsApp.new
config.before_initialize do
config.action_controller.asset_host = Canvas::Cdn.method(:asset_host_for)
end
if config.action_dispatch.rack_cache != false
config.action_dispatch.rack_cache[:ignore_headers] =
%w[Set-Cookie X-Request-Context-Id X-Canvas-User-Id X-Canvas-Meta]
end
def validate_secret_key_config!
# no validation; we don't use Rails' CookieStore session middleware, so we
# don't care about secret_key_base
end
initializer "canvas.init_dynamic_settings", before: "canvas.extend_shard" do
settings = ConfigFile.load("consul")
if settings.present?
begin
Canvas::DynamicSettings.config = settings
rescue Imperium::UnableToConnectError
Rails.logger.warn("INITIALIZATION: can't reach consul, attempts to load DynamicSettings will fail")
end
end
end
initializer "canvas.extend_shard", before: "active_record.initialize_database" do
# have to do this before the default shard loads
Switchman::Shard.serialize :settings, Hash
Switchman.cache = -> { MultiCache.cache }
end
# Newer rails has this in rails proper
attr_writer :credentials
initializer "canvas.init_credentials", before: "active_record.initialize_database" do
self.credentials = Canvas::Credentials.new(credentials)
end
# we don't know what middleware to make SessionsTimeout follow until after
# we've loaded config/initializers/session_store.rb
initializer("extend_middleware_stack", after: :load_config_initializers) do |app|
app.config.middleware.insert_before(config.session_store, LoadAccount)
app.config.middleware.swap(ActionDispatch::RequestId, RequestContextGenerator)
app.config.middleware.insert_after(config.session_store, RequestContextSession)
app.config.middleware.insert_before(Rack::Head, RequestThrottle)
app.config.middleware.insert_before(Rack::MethodOverride, PreventNonMultipartParse)
end
end
end
fix secret_key_base non-validation in rails 6
Change-Id: I1ca76f20d80e9941f4a41f837271dbd8da0030ff
Reviewed-on: https://gerrit.instructure.com/c/canvas-lms/+/259140
Reviewed-by: Jacob Burroughs <8ecea6e385af5cf9f53123f5ca17fb5fd6a6d4b2@instructure.com>
Reviewed-by: Rob Orton <7e09c9d3e96378bf549fc283fd6e1e5b7014cc33@instructure.com>
Tested-by: Service Cloud Jenkins <9144042a601061f88f1e1d7a1753ea3e2972119d@instructure.com>
QA-Review: Cody Cutrer <c4fe2b1d90ef8f2548c8aeabfa633434316f0305@instructure.com>
Product-Review: Cody Cutrer <c4fe2b1d90ef8f2548c8aeabfa633434316f0305@instructure.com>
#
# Copyright (C) 2013 - present Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
# Put this in config/application.rb
require File.expand_path('../boot', __FILE__)
require_relative '../lib/canvas_yaml'
# Yes, it doesn't seem DRY to list these both in the if and else
# but this used to be "require 'rails/all'" which included sprockets.
# I needed to explicitly opt-out of sprockets but since I'm not sure
# about the other frameworks, I left this so it would be exactly the same
# as "require 'rails/all'" but without sprockets--even though it is a little
# different then the rails 3 else block. If the difference is not intended,
# they can be pulled out of the if/else
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
# require "sprockets/railtie" # Do not enable the Rails Asset Pipeline
require "rails/test_unit/railtie"
Bundler.require(*Rails.groups)
module CanvasRails
class Application < Rails::Application
$LOAD_PATH << config.root.to_s
config.encoding = 'utf-8'
require 'logging_filter'
config.filter_parameters.concat LoggingFilter.filtered_parameters
config.action_dispatch.rescue_responses['AuthenticationMethods::AccessTokenError'] = 401
config.action_dispatch.rescue_responses['AuthenticationMethods::AccessTokenScopeError'] = 401
config.action_dispatch.rescue_responses['AuthenticationMethods::LoggedOutError'] = 401
config.action_dispatch.rescue_responses['CanvasHttp::CircuitBreakerError'] = 502
config.action_dispatch.default_headers.delete('X-Frame-Options')
config.action_dispatch.default_headers['Referrer-Policy'] = 'no-referrer-when-downgrade'
config.action_controller.forgery_protection_origin_check = true
ActiveSupport.to_time_preserves_timezone = true
config.app_generators do |c|
c.test_framework :rspec
c.integration_tool :rspec
c.performance_tool :rspec
end
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# See Rails::Configuration for more options.
# Make Time.zone default to the specified zone, and make Active Record store time values
# in the database in UTC, and return them converted to the specified local zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Comment line to use default local time.
config.time_zone = 'UTC'
log_config = File.exist?(Rails.root + 'config/logging.yml') && Rails.application.config_for(:logging).with_indifferent_access
log_config = { 'logger' => 'rails', 'log_level' => 'debug' }.merge(log_config || {})
opts = {}
require 'canvas_logger'
config.log_level = log_config['log_level']
log_level = ActiveSupport::Logger.const_get(config.log_level.to_s.upcase)
opts[:skip_thread_context] = true if log_config['log_context'] == false
case log_config["logger"]
when "syslog"
require 'syslog_wrapper'
log_config["app_ident"] ||= "canvas-lms"
log_config["daemon_ident"] ||= "canvas-lms-daemon"
facilities = 0
(log_config["facilities"] || []).each do |facility|
facilities |= Syslog.const_get "LOG_#{facility.to_s.upcase}"
end
ident = ENV['RUNNING_AS_DAEMON'] == 'true' ? log_config["daemon_ident"] : log_config["app_ident"]
opts[:include_pid] = true if log_config["include_pid"] == true
config.logger = SyslogWrapper.new(ident, facilities, opts)
config.logger.level = log_level
else
log_path = config.paths['log'].first
if ENV['RUNNING_AS_DAEMON'] == 'true'
log_path = Rails.root+'log/delayed_job.log'
end
config.logger = CanvasLogger.new(log_path, log_level, opts)
end
# Activate observers that should always be running
config.active_record.observers = [:cacher, :stream_item_cache, :live_events_observer ]
config.active_record.allow_unsafe_raw_sql = :disabled
config.active_support.encode_big_decimal_as_string = false
config.paths['lib'].eager_load!
config.paths.add('app/middleware', eager_load: true, autoload_once: true)
# prevent directory->module inference in these directories from wreaking
# havoc on the app (e.g. stylesheets/base -> ::Base)
config.eager_load_paths -= %W(#{Rails.root}/app/coffeescripts
#{Rails.root}/app/stylesheets)
config.middleware.use Rack::Chunked
config.middleware.use Rack::Deflater, if: -> (*) {
::Canvas::DynamicSettings.find(tree: :private)["enable_rack_deflation"]
}
config.middleware.use Rack::Brotli, if: -> (*) {
::Canvas::DynamicSettings.find(tree: :private)["enable_rack_brotli"]
}
config.i18n.load_path << Rails.root.join('config', 'locales', 'locales.yml')
config.to_prepare do
require_dependency 'canvas/plugins/default_plugins'
ActiveSupport::JSON::Encoding.escape_html_entities_in_json = true
end
module PostgreSQLEarlyExtensions
module ConnectionHandling
def postgresql_connection(config)
conn_params = config.symbolize_keys
hosts = Array(conn_params[:host]).presence || [nil]
hosts.each_with_index do |host, index|
begin
conn_params[:host] = host
return super(conn_params)
rescue ::PG::Error
raise if index == hosts.length - 1
# else try next host
end
end
end
end
def initialize(connection, logger, connection_parameters, config)
unless config.key?(:prepared_statements)
config = config.dup
config[:prepared_statements] = false
end
super(connection, logger, connection_parameters, config)
end
def connect
hosts = Array(@connection_parameters[:host]).presence || [nil]
hosts.each_with_index do |host, index|
begin
connection_parameters = @connection_parameters.dup
connection_parameters[:host] = host
@connection = PG::Connection.connect(connection_parameters)
configure_connection
raise "Canvas requires PostgreSQL 9.5 or newer" unless postgresql_version >= 90500
break
rescue ::PG::Error => error
if error.message.include?("does not exist")
raise ActiveRecord::NoDatabaseError.new(error.message)
elsif index == hosts.length - 1
raise
end
# else try next host
end
end
end
end
module TypeMapInitializerExtensions
def query_conditions_for_initial_load
known_type_names = @store.keys.map { |n| "'#{n}'" } + @store.keys.map { |n| "'_#{n}'" }
<<~SQL % [known_type_names.join(", "),]
WHERE
t.typname IN (%s)
SQL
end
end
Autoextend.hook(:"ActiveRecord::Base",
PostgreSQLEarlyExtensions::ConnectionHandling,
singleton: true)
Autoextend.hook(:"ActiveRecord::ConnectionAdapters::PostgreSQLAdapter",
PostgreSQLEarlyExtensions,
method: :prepend)
Autoextend.hook(:"ActiveRecord::ConnectionAdapters::PostgreSQL::OID::TypeMapInitializer",
TypeMapInitializerExtensions,
method: :prepend)
SafeYAML.singleton_class.send(:attr_accessor, :safe_parsing)
module SafeYAMLWithFlag
def load(*args)
previous, self.safe_parsing = safe_parsing, true
super
ensure
self.safe_parsing = previous
end
end
SafeYAML.singleton_class.prepend(SafeYAMLWithFlag)
Psych.add_domain_type("ruby/object", "Class") do |_type, val|
if SafeYAML.safe_parsing && !Canvas::Migration.valid_converter_classes.include?(val)
raise "Cannot load class #{val} from YAML"
end
val.constantize
end
module PatchThorWarning
# active_model_serializers should be passing `type: :boolean` here:
# https://github.com/rails-api/active_model_serializers/blob/v0.9.0.alpha1/lib/active_model/serializer/generators/serializer/scaffold_controller_generator.rb#L10
# but we don't really care about the warning, it only affects using the rails
# generator for a resource
#
# Easiest way to avoid the warning for now is to patch thor
def validate_default_type!
return if switch_name == "--serializer"
super
end
end
Autoextend.hook(:"Thor::Option", PatchThorWarning, method: :prepend)
# Extend any base classes, even gem classes
Dir.glob("#{Rails.root}/lib/ext/**/*.rb").each { |file| require file }
# tell Rails to use the native XML parser instead of REXML
ActiveSupport::XmlMini.backend = 'Nokogiri'
class NotImplemented < StandardError; end
if defined?(PhusionPassenger)
PhusionPassenger.on_event(:after_installing_signal_handlers) do
Canvas::Reloader.trap_signal
end
PhusionPassenger.on_event(:starting_worker_process) do |forked|
if forked
# We're in smart spawning mode.
# Reset imperium because it's possible to accidentally share an open http
# socket between processes shortly after fork.
Imperium::Agent.reset_default_client
Imperium::Catalog.reset_default_client
Imperium::Client.reset_default_client
Imperium::Events.reset_default_client
Imperium::KV.reset_default_client
# it's really important to reset the default clients
# BEFORE letting dynamic setting pull a new one.
# do not change this order.
Canvas::DynamicSettings.on_fork!
else
# We're in direct spawning mode. We don't need to do anything.
end
end
else
config.to_prepare do
Canvas::Reloader.trap_signal
end
end
# Ensure that the automatic redis reconnection on fork works
# This is the default in redis-rb, but for some reason rails overrides it
# See e.g. https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22704
ActiveSupport::Cache::RedisCacheStore::DEFAULT_REDIS_OPTIONS[:reconnect_attempts] = 1
# don't wrap fields with errors with a <div class="fieldWithErrors" />,
# since that could leak information (e.g. valid vs invalid username on
# login page)
config.action_view.field_error_proc = Proc.new { |html_tag, instance| html_tag }
class ExceptionsApp
def call(env)
req = ActionDispatch::Request.new(env)
res = ApplicationController.make_response!(req)
ApplicationController.dispatch('rescue_action_dispatch_exception', req, res)
end
end
config.exceptions_app = ExceptionsApp.new
config.before_initialize do
config.action_controller.asset_host = Canvas::Cdn.method(:asset_host_for)
end
if config.action_dispatch.rack_cache != false
config.action_dispatch.rack_cache[:ignore_headers] =
%w[Set-Cookie X-Request-Context-Id X-Canvas-User-Id X-Canvas-Meta]
end
def validate_secret_key_base(_)
# no validation; we don't use Rails' CookieStore session middleware, so we
# don't care about secret_key_base
end
class DummyKeyGenerator
def self.generate_key(*)
end
end
def key_generator
DummyKeyGenerator
end
initializer "canvas.init_dynamic_settings", before: "canvas.extend_shard" do
settings = ConfigFile.load("consul")
if settings.present?
begin
Canvas::DynamicSettings.config = settings
rescue Imperium::UnableToConnectError
Rails.logger.warn("INITIALIZATION: can't reach consul, attempts to load DynamicSettings will fail")
end
end
end
initializer "canvas.extend_shard", before: "active_record.initialize_database" do
# have to do this before the default shard loads
Switchman::Shard.serialize :settings, Hash
Switchman.cache = -> { MultiCache.cache }
end
# Newer rails has this in rails proper
attr_writer :credentials
initializer "canvas.init_credentials", before: "active_record.initialize_database" do
self.credentials = Canvas::Credentials.new(credentials)
end
# we don't know what middleware to make SessionsTimeout follow until after
# we've loaded config/initializers/session_store.rb
initializer("extend_middleware_stack", after: :load_config_initializers) do |app|
app.config.middleware.insert_before(config.session_store, LoadAccount)
app.config.middleware.swap(ActionDispatch::RequestId, RequestContextGenerator)
app.config.middleware.insert_after(config.session_store, RequestContextSession)
app.config.middleware.insert_before(Rack::Head, RequestThrottle)
app.config.middleware.insert_before(Rack::MethodOverride, PreventNonMultipartParse)
end
end
end
|
class ConversationsController < ApplicationController
before_action :get_mailbox
before_action :get_conversation, except: [:index]
def index
@conversations = @mailbox.inbox.paginate(page: params[:page], per_page: 10)
end
def show
end
def reply
current_user.reply_to_conversation(@conversation, params[:body])
flash[:success] = 'Reply sent'
redirect_to conversation_path(@conversation)
end
private
def get_mailbox
@mailbox ||= current_user.mailbox
end
def get_conversation
@conversation ||= @mailbox.conversations.find(params[:id])
end
end
Modify index action for sentbox and trash'
class ConversationsController < ApplicationController
before_action :get_mailbox
before_action :get_conversation, except: [:index]
before_action :get_box, only: [:index]
def index
if @box.eql? "inbox"
@conversations = @mailbox.inbox
elsif @box.eql? "sent"
@conversations = @mailbox.sentbox
else
@conversations = @mailbox.trash
end
@conversations = @conversations.paginate(page: params[:page], per_page: 10)
end
def show
end
def reply
current_user.reply_to_conversation(@conversation, params[:body])
flash[:success] = 'Reply sent'
redirect_to conversation_path(@conversation)
end
private
def get_box
if params[:box].blank? or !["inbox","sent","trash"].include?(params[:box])
params[:box] = 'inbox'
end
@box = params[:box]
end
def get_mailbox
@mailbox ||= current_user.mailbox
end
def get_conversation
@conversation ||= @mailbox.conversations.find(params[:id])
end
end
|
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
require "active_model/railtie"
# require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module GithubRecommender
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
end
end
Activate AQL
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
require "active_model/railtie"
# require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module GithubRecommender
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.guacamole.experimental_features = [:aql_support]
end
end
|
# frozen_string_literal: true
class DigitalGiftsController < ApplicationController
before_action :set_digital_gift, only: %i[show sent]
skip_before_action :authenticate_user!, only: %i[api_create webhook]
skip_before_action :verify_authenticity_token, only: :webhook
# GET /digital_gifts
# GET /digital_gifts.json
def index
if current_user.admin?
@digital_gifts = DigitalGift.order(id: 'desc').includes(:reward).page(params[:page])
else
team_ids = current_user.team.users.map(&:id)
@digital_gifts = DigitalGift.where(user_id: team_ids).order(id: 'desc').includes(:reward).page(params[:page])
end
end
def webhook
@digital_gift = DigitalGift.find_by(gift_id: params[:payload][:id])
if @digtial_gift.nil? || !valid_request?(request)
render json: { success: false }
else
if params[:event].match? 'gift'
@digital_gift.giftrocket_status = params[:event].delete('gift.')
@digital_gift.save
end
render json: { success: true }
end
end
def sent
@digital_gift.sent = true
@digital_gift.sent_by = current_user.id
@digital_gift.sent_at = Time.current
@digital_gift.save
respond_to do |format|
format.js {}
end
end
# GET /digital_gifts/1
# GET /digital_gifts/1.json
def show
@comment = Comment.new commentable: @digital_gift
end
# GET /digital_gifts/new
def new
@digital_gift = DigitalGift.new
end
def create
# this is kinda horrific
klass = GIFTABLE_TYPES.fetch(dg_params[:giftable_type])
@giftable = klass.find(dg_params[:giftable_id])
@success = true
if @giftable.nil?
flash[:error] = 'No giftable object present'
@success = false
end
if params[:giftable_type] == 'Invitation' && !@giftable&.attended?
flash[:error] = "#{@giftable.person.full_name} isn't marked as 'attended'."
@success = false
end
if params[:giftable_type] == 'Invitation' && @giftable.rewards.find { |r| r.rewardable_type == 'DigitalGift' }.present?
flash[:error] = "#{@giftable.person.full_name} Already has a digital gift"
@success = false
end
# cover fees
if params[:amount].to_money + 2.to_money >= current_user.available_budget
flash[:error] = 'Insufficient Team Budget'
@success = false # placeholder for now
end
# so, the APIs are wonky
# if params[:amount].to_money >= DigitalGift.current_budget
# flash[:error] = 'Insufficient Gift Rocket Budget'
# @success = false # placeholder for now
# end
if @success
@dg = DigitalGift.new(user_id: current_user.id,
created_by: current_user.id,
amount: dg_params['amount'],
person_id: dg_params['person_id'],
giftable_type: dg_params['giftable_type'],
giftable_id: dg_params['giftable_id'])
@reward = Reward.new(user_id: current_user.id,
created_by: current_user.id,
person_id: dg_params['person_id'],
amount: dg_params['amount'],
reason: dg_params['reason'],
notes: dg_params['notes'],
giftable_type: dg_params['giftable_type'],
giftable_id: dg_params['giftable_id'],
finance_code: current_user&.team&.finance_code,
team: current_user&.team,
rewardable_type: 'DigitalGift')
if @dg.valid? # if it's not valid, error out
@dg.request_link # do the thing!
if @dg.save
@reward.rewardable_id = @dg.id
@success = @reward.save
@dg.reward_id = @reward.id # is this necessary?
@dg.save
end
else
flash[:error] = @dg.errors
@success = false
end
end
respond_to do |format|
format.js {}
end
end
def api_create
# apithis is horrific too
# https://blog.arkency.com/2014/07/4-ways-to-early-return-from-a-rails-controller/
validate_api_args
# https://api.rubyonrails.org/v4.1.4/classes/ActionController/Metal.html#method-i-performed-3F
return if performed?
if @research_session.can_survey? && !@research_session.is_invited?(@person)
@invitation = Invitation.new(aasm_state: 'attended',
person_id: @person.id,
research_session_id: @research_session.id)
@invitation.save
@digital_gift = DigitalGift.new(user_id: @user.id,
created_by: @user.id,
amount: api_params['amount'],
person_id: @person.id,
giftable_type: 'Invitation',
giftable_id: @invitation.id)
@reward = Reward.new(user_id: @user.id,
created_by: @user.id,
person_id: @person.id,
amount: api_params['amount'],
reason: 'survey',
giftable_type: 'Invitation',
giftable_id: @invitation.id,
finance_code: @user&.team&.finance_code,
team: @user&.team,
rewardable_type: 'DigitalGift')
if @digital_gift.valid?
@digital_gift.request_link # do the thing!
if @digital_gift.save
@reward.rewardable_id = @digital_gift.id
@success = @reward.save
@digital_gift.reward_id = @reward.id # is this necessary?
@digital_gift.save
render status: :created, json: { success: true, link: @digital_gift.link, msg: 'Successfully created a gift card for you!' }.to_json
end
else
Airbrake.notify("Can't create Digital Gift #{@digital_gift.attributes}, #{@digital_gift.errors.full_messages.join("\n")}")
render status: :unprocessable_entity, json: { success: false, msg: @digital_gift.errors.full_messages }.to_json
end
else
Airbrake.notify("Can't create Digital Gift #{@digital_gift.attributes}, #{@digital_gift.errors.full_messages.join("\n")}")
render status: :unprocessable_entity, json: { success: false, msg: 'Something has gone wrong. we will be in touch soon!', errors: @digital_gift.errors.full_messages }.to_json
end
end
def validate_api_args
@user = User.find_by(token: request.headers['AUTHORIZATION']) if request.headers['AUTHORIZATION'].present?
render(status: :unauthorized) && return if @user.blank? || !@user.admin?
@research_session = ResearchSession.find(api_params['research_session_id'])
phone = PhonyRails.normalize_number(CGI.unescape(api_params['phone_number']))
@person = Person.active.find_by(phone_number: phone)
if @person.blank? || @research_session.blank?
Airbrake.notify("person: #{@person}, rs: #{@research_session}, params:#{api_params}")
render(status: :not_found) && return
end
# $2 fee possibly
if @user.available_budget + 2.to_money < api_params['amount'].to_money
Airbrake.notify("Can't create Digital Gift, insufficient budget! #{@digital_gift.attributes}, #{@digital_gift.errors.full_messages.join("\n")}")
render(status: :unprocessable_entity, json: { success: false, msg: 'Something has gone wrong, we will be in touch soon.' }.to_json) && return
end
# should check if we've already given a digital gift for this research session
end
# GET /digital_gifts/1/edit
# def edit; end
# we don't create, destroy or update these via controller
# # POST /digital_gifts
# # POST /digital_gifts.json
# def create
# @digital_gift = DigitalGift.new(digital_gift_params)
# respond_to do |format|
# if @digital_gift.save
# format.html { redirect_to @digital_gift, notice: 'DigitalGift was @successfully created.' }
# format.json { render :show, status: :created, location: @digital_gift }
# else
# format.html { render :new }
# format.json { render json: @digital_gift.errors, status: :unprocessable_entity }
# end
# end
# end
# # PATCH/PUT /digital_gifts/1
# # PATCH/PUT /digital_gifts/1.json
# def update
# respond_to do |format|
# if @digital_gift.update(digital_gift_params)
# format.html { redirect_to @digital_gift, notice: 'DigitalGift was @successfully updated.' }
# format.json { render :show, status: :ok, location: @digital_gift }
# else
# format.html { render :edit }
# format.json { render json: @digital_gift.errors, status: :unprocessable_entity }
# end
# end
# end
# DELETE /digital_gifts/1
# DELETE /digital_gifts/1.json
# def destroy
# @digital_gift.destroy
# respond_to do |format|
# format.html { redirect_to digital_gifts_url, notice: 'DigitalGift was @successfully destroyed.' }
# format.json { head :no_content }
# end
# end
private
def webhook_params
params.permit(:payload, :event, :digital_gift)
end
def api_params
params.permit(:person_id,
:api_token,
:research_session_id,
:phone_number,
:amount)
end
def dg_params
params.permit(:person_id,
:user_id,
:notes,
:reason,
:amount,
:giftable_type,
:giftable_id)
end
# Use callbacks to share common setup or constraints between actions.
def set_digital_gift
@digital_gift = DigitalGift.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def digital_gift_params
params.fetch(:digital_gift, {})
end
def valid_request?(request)
signature_header = request.headers['GiftRocket-Webhook-Signature']
algorithm, received_signature = signature_header.split('=', 2)
raise Exception.new('Invalid algorithm') if algorithm != 'sha256'
expected_signature = OpenSSL::HMAC.hexdigest(
OpenSSL::Digest.new(algorithm), ENV['GIFT_ROCKET_WEBHOOK'], request.body.read
)
received_signature == expected_signature
end
end
adding sent for API style digital gifts
# frozen_string_literal: true
class DigitalGiftsController < ApplicationController
before_action :set_digital_gift, only: %i[show sent]
skip_before_action :authenticate_user!, only: %i[api_create webhook]
skip_before_action :verify_authenticity_token, only: :webhook
# GET /digital_gifts
# GET /digital_gifts.json
def index
if current_user.admin?
@digital_gifts = DigitalGift.order(id: 'desc').includes(:reward).page(params[:page])
else
team_ids = current_user.team.users.map(&:id)
@digital_gifts = DigitalGift.where(user_id: team_ids).order(id: 'desc').includes(:reward).page(params[:page])
end
end
def webhook
@digital_gift = DigitalGift.find_by(gift_id: params[:payload][:id])
if @digtial_gift.nil? || !valid_request?(request)
render json: { success: false }
else
if params[:event].match? 'gift'
@digital_gift.giftrocket_status = params[:event].delete('gift.')
@digital_gift.save
end
render json: { success: true }
end
end
def sent
@digital_gift.sent = true
@digital_gift.sent_by = current_user.id
@digital_gift.sent_at = Time.current
@digital_gift.save
respond_to do |format|
format.js {}
end
end
# GET /digital_gifts/1
# GET /digital_gifts/1.json
def show
@comment = Comment.new commentable: @digital_gift
end
# GET /digital_gifts/new
def new
@digital_gift = DigitalGift.new
end
def create
# this is kinda horrific
klass = GIFTABLE_TYPES.fetch(dg_params[:giftable_type])
@giftable = klass.find(dg_params[:giftable_id])
@success = true
if @giftable.nil?
flash[:error] = 'No giftable object present'
@success = false
end
if params[:giftable_type] == 'Invitation' && !@giftable&.attended?
flash[:error] = "#{@giftable.person.full_name} isn't marked as 'attended'."
@success = false
end
if params[:giftable_type] == 'Invitation' && @giftable.rewards.find { |r| r.rewardable_type == 'DigitalGift' }.present?
flash[:error] = "#{@giftable.person.full_name} Already has a digital gift"
@success = false
end
# cover fees
if params[:amount].to_money + 2.to_money >= current_user.available_budget
flash[:error] = 'Insufficient Team Budget'
@success = false # placeholder for now
end
# so, the APIs are wonky
# if params[:amount].to_money >= DigitalGift.current_budget
# flash[:error] = 'Insufficient Gift Rocket Budget'
# @success = false # placeholder for now
# end
if @success
@dg = DigitalGift.new(user_id: current_user.id,
created_by: current_user.id,
amount: dg_params['amount'],
person_id: dg_params['person_id'],
giftable_type: dg_params['giftable_type'],
giftable_id: dg_params['giftable_id'])
@reward = Reward.new(user_id: current_user.id,
created_by: current_user.id,
person_id: dg_params['person_id'],
amount: dg_params['amount'],
reason: dg_params['reason'],
notes: dg_params['notes'],
giftable_type: dg_params['giftable_type'],
giftable_id: dg_params['giftable_id'],
finance_code: current_user&.team&.finance_code,
team: current_user&.team,
rewardable_type: 'DigitalGift')
if @dg.valid? # if it's not valid, error out
@dg.request_link # do the thing!
if @dg.save
@reward.rewardable_id = @dg.id
@success = @reward.save
@dg.reward_id = @reward.id # is this necessary?
@dg.save
end
else
flash[:error] = @dg.errors
@success = false
end
end
respond_to do |format|
format.js {}
end
end
def api_create
# apithis is horrific too
# https://blog.arkency.com/2014/07/4-ways-to-early-return-from-a-rails-controller/
validate_api_args
# https://api.rubyonrails.org/v4.1.4/classes/ActionController/Metal.html#method-i-performed-3F
return if performed?
if @research_session.can_survey? && !@research_session.is_invited?(@person)
@invitation = Invitation.new(aasm_state: 'attended',
person_id: @person.id,
research_session_id: @research_session.id)
@invitation.save
@digital_gift = DigitalGift.new(user_id: @user.id,
created_by: @user.id,
amount: api_params['amount'],
person_id: @person.id,
giftable_type: 'Invitation',
giftable_id: @invitation.id)
@reward = Reward.new(user_id: @user.id,
created_by: @user.id,
person_id: @person.id,
amount: api_params['amount'],
reason: 'survey',
giftable_type: 'Invitation',
giftable_id: @invitation.id,
finance_code: @user&.team&.finance_code,
team: @user&.team,
rewardable_type: 'DigitalGift')
if @digital_gift.valid?
@digital_gift.request_link # do the thing!
if @digital_gift.save
@reward.rewardable_id = @digital_gift.id
@success = @reward.save
@digital_gift.reward_id = @reward.id # is this necessary?
@digital_gift.sent = true
@digital_gift.sent_at = Time.current
@digital_gift.sent_by = @user.id
@digital_gift.save
render status: :created, json: { success: true, link: @digital_gift.link, msg: 'Successfully created a gift card for you!' }.to_json
end
else
Airbrake.notify("Can't create Digital Gift #{@digital_gift.attributes}, #{@digital_gift.errors.full_messages.join("\n")}")
render status: :unprocessable_entity, json: { success: false, msg: @digital_gift.errors.full_messages }.to_json
end
else
Airbrake.notify("Can't create Digital Gift #{@digital_gift.attributes}, #{@digital_gift.errors.full_messages.join("\n")}")
render status: :unprocessable_entity, json: { success: false, msg: 'Something has gone wrong. we will be in touch soon!', errors: @digital_gift.errors.full_messages }.to_json
end
end
def validate_api_args
@user = User.find_by(token: request.headers['AUTHORIZATION']) if request.headers['AUTHORIZATION'].present?
render(status: :unauthorized) && return if @user.blank? || !@user.admin?
@research_session = ResearchSession.find(api_params['research_session_id'])
phone = PhonyRails.normalize_number(CGI.unescape(api_params['phone_number']))
@person = Person.active.find_by(phone_number: phone)
if @person.blank? || @research_session.blank?
Airbrake.notify("person: #{@person}, rs: #{@research_session}, params:#{api_params}")
render(status: :not_found) && return
end
# $2 fee possibly
if @user.available_budget + 2.to_money < api_params['amount'].to_money
Airbrake.notify("Can't create Digital Gift, insufficient budget! #{@digital_gift.attributes}, #{@digital_gift.errors.full_messages.join("\n")}")
render(status: :unprocessable_entity, json: { success: false, msg: 'Something has gone wrong, we will be in touch soon.' }.to_json) && return
end
# should check if we've already given a digital gift for this research session
end
# GET /digital_gifts/1/edit
# def edit; end
# we don't create, destroy or update these via controller
# # POST /digital_gifts
# # POST /digital_gifts.json
# def create
# @digital_gift = DigitalGift.new(digital_gift_params)
# respond_to do |format|
# if @digital_gift.save
# format.html { redirect_to @digital_gift, notice: 'DigitalGift was @successfully created.' }
# format.json { render :show, status: :created, location: @digital_gift }
# else
# format.html { render :new }
# format.json { render json: @digital_gift.errors, status: :unprocessable_entity }
# end
# end
# end
# # PATCH/PUT /digital_gifts/1
# # PATCH/PUT /digital_gifts/1.json
# def update
# respond_to do |format|
# if @digital_gift.update(digital_gift_params)
# format.html { redirect_to @digital_gift, notice: 'DigitalGift was @successfully updated.' }
# format.json { render :show, status: :ok, location: @digital_gift }
# else
# format.html { render :edit }
# format.json { render json: @digital_gift.errors, status: :unprocessable_entity }
# end
# end
# end
# DELETE /digital_gifts/1
# DELETE /digital_gifts/1.json
# def destroy
# @digital_gift.destroy
# respond_to do |format|
# format.html { redirect_to digital_gifts_url, notice: 'DigitalGift was @successfully destroyed.' }
# format.json { head :no_content }
# end
# end
private
def webhook_params
params.permit(:payload, :event, :digital_gift)
end
def api_params
params.permit(:person_id,
:api_token,
:research_session_id,
:phone_number,
:amount)
end
def dg_params
params.permit(:person_id,
:user_id,
:notes,
:reason,
:amount,
:giftable_type,
:giftable_id)
end
# Use callbacks to share common setup or constraints between actions.
def set_digital_gift
@digital_gift = DigitalGift.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def digital_gift_params
params.fetch(:digital_gift, {})
end
def valid_request?(request)
signature_header = request.headers['GiftRocket-Webhook-Signature']
algorithm, received_signature = signature_header.split('=', 2)
raise Exception.new('Invalid algorithm') if algorithm != 'sha256'
expected_signature = OpenSSL::HMAC.hexdigest(
OpenSSL::Digest.new(algorithm), ENV['GIFT_ROCKET_WEBHOOK'], request.body.read
)
received_signature == expected_signature
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Classroom
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
# Add bower assets to the path
root.join('vendor', 'assets', 'bower_components').to_s.tap do |bower_path|
config.sass.load_paths << bower_path
config.assets.paths << bower_path
end
# Append directories to autoload paths
config.autoload_paths += Dir["#{Rails.root}/lib"]
# Precompile Fonts
# Compile all font types except octicons-local
config.assets.precompile << %r(octicons/octicons/octicons+\.(?:svg|eot|woff|ttf)$)
# Configure the generators
config.generators do |g|
g.test_framework :rspec, fixture: false
end
# GC Profiler for analytics
GC::Profiler.enable
# Use SideKiq for background jobs
config.active_job.queue_adapter = :sidekiq
# Health checks endpoint for monitoring
if ENV['PINGLISH_ENABLED'] == 'true'
config.middleware.use Pinglish do |ping|
ping.check :db do
ActiveRecord::Base.connection.tables.size
'ok'
end
ping.check :memcached do
Rails.cache.dalli.checkout.alive!
'ok'
end
ping.check :redis do
Sidekiq.redis { |redis| redis.ping }
'ok'
end
ping.check :elasticsearch do
Chewy.client.cluster.health['status'] == 'green' ? 'ok' : false
end
end
end
end
end
Raise meaningful exception instead of returning false when ES is down.
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Classroom
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
# Add bower assets to the path
root.join('vendor', 'assets', 'bower_components').to_s.tap do |bower_path|
config.sass.load_paths << bower_path
config.assets.paths << bower_path
end
# Append directories to autoload paths
config.autoload_paths += Dir["#{Rails.root}/lib"]
# Precompile Fonts
# Compile all font types except octicons-local
config.assets.precompile << %r(octicons/octicons/octicons+\.(?:svg|eot|woff|ttf)$)
# Configure the generators
config.generators do |g|
g.test_framework :rspec, fixture: false
end
# GC Profiler for analytics
GC::Profiler.enable
# Use SideKiq for background jobs
config.active_job.queue_adapter = :sidekiq
# Health checks endpoint for monitoring
if ENV['PINGLISH_ENABLED'] == 'true'
config.middleware.use Pinglish do |ping|
ping.check :db do
ActiveRecord::Base.connection.tables.size
'ok'
end
ping.check :memcached do
Rails.cache.dalli.checkout.alive!
'ok'
end
ping.check :redis do
Sidekiq.redis { |redis| redis.ping }
'ok'
end
ping.check :elasticsearch do
status = Chewy.client.cluster.health['status'] || 'unavailable'
if status == 'green'
'ok'
else
raise "Elasticsearch status is #{status}"
end
end
end
end
end
end
|
module Effective
module Providers
module Free
extend ActiveSupport::Concern
def free
@order ||= Order.find(params[:id])
EffectiveOrders.authorized?(self, :update, @order)
unless @order.free?
flash[:danger] = 'Unable to process free order with a non-zero total'
redirect_to effective_orders.order_path(@order)
return
end
order_purchased(
details: 'free order. no payment required.',
provider: 'free',
card: 'none',
purchased_url: params[:purchased_url],
declined_url: params[:declined_url],
)
end
def free_params
params.require(:effective_order).permit(:purchased_url, :declined_url)
end
end
end
end
Don't send payment receipt when order is free
module Effective
module Providers
module Free
extend ActiveSupport::Concern
def free
@order ||= Order.find(params[:id])
EffectiveOrders.authorized?(self, :update, @order)
unless @order.free?
flash[:danger] = 'Unable to process free order with a non-zero total'
redirect_to effective_orders.order_path(@order)
return
end
order_purchased(
details: 'free order. no payment required.',
provider: 'free',
card: 'none',
purchased_url: params[:purchased_url],
declined_url: params[:declined_url],
email: false
)
end
def free_params
params.require(:effective_order).permit(:purchased_url, :declined_url)
end
end
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Theodinproject
class Application < Rails::Application
config.assets.initialize_on_precompile = false
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
end
end
remove asset precompile false
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Theodinproject
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
end
end
|
# Copyright © 2012 The Pennsylvania State University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class GenericFilesController < ApplicationController
include Hydra::Controller::ControllerBehavior
include Hydra::AssetsControllerHelper # for apply_depositor_metadata method
include Hydra::Controller::UploadBehavior # for add_posted_blob_to_asset method
include ScholarSphere::Noid # for normalize_identifier method
rescue_from AbstractController::ActionNotFound, :with => :render_404
# actions: audit, index, create, new, edit, show, update, destroy, permissions, citation
before_filter :authenticate_user!, :except => [:show, :citation]
before_filter :has_access?, :except => [:show]
before_filter :enforce_access_controls
before_filter :find_by_id, :except => [:index, :create, :new]
prepend_before_filter :normalize_identifier, :except => [:index, :create, :new]
# routed to /files/new
def new
@generic_file = GenericFile.new
@batch_noid = ScholarSphere::Noid.noidify(ScholarSphere::IdService.mint)
end
# routed to /files/:id/edit
def edit
@terms = @generic_file.get_terms
@groups = current_user.groups
end
# routed to /files/:id
def index
@generic_files = GenericFile.find(:all, :rows => GenericFile.count)
render :json => @generic_files.map(&:to_jq_upload).to_json
end
# routed to /files/:id (DELETE)
def destroy
pid = @generic_file.noid
@generic_file.delete
begin
Resque.enqueue(ContentDeleteEventJob, pid, current_user.login)
rescue Redis::CannotConnectError
logger.error "Redis is down!"
end
redirect_to dashboard_path, :notice => render_to_string(:partial=>'generic_files/asset_deleted_flash', :locals => { :generic_file => @generic_file })
end
# routed to /files (POST)
def create
begin
retval = " "
# check error condition No files
if !params.has_key?(:files)
retval = render :json => [{:error => "Error! No file to save"}].to_json
# check error condition empty file
elsif ((params[:files][0].respond_to?(:tempfile)) && (params[:files][0].tempfile.size == 0))
retval = render :json => [{ :name => params[:files][0].original_filename, :error => "Error! Zero Length File!"}].to_json
elsif ((params[:files][0].respond_to?(:size)) && (params[:files][0].size == 0))
retval = render :json => [{ :name => params[:files][0].original_filename, :error => "Error! Zero Length File!"}].to_json
elsif (params[:terms_of_service] != '1')
retval = render :json => [{ :name => params[:files][0].original_filename, :error => "You must accept the terms of service!"}].to_json
# process file
else
create_and_save_generic_file
if @generic_file
begin
Resque.enqueue(ContentDepositEventJob, @generic_file.pid, current_user.login)
rescue Redis::CannotConnectError
logger.error "Redis is down!"
end
respond_to do |format|
format.html {
retval = render :json => [@generic_file.to_jq_upload].to_json,
:content_type => 'text/html',
:layout => false
}
format.json {
retval = render :json => [@generic_file.to_jq_upload].to_json
}
end
else
retval = render :json => [{:error => "Error creating generic file."}].to_json
end
end
rescue => error
logger.warn "GenericFilesController::create rescued error #{error.backtrace}"
retval = render :json => [{:error => "Error occurred while creating generic file."}].to_json
ensure
# remove the tempfile (only if it is a temp file)
params[:files][0].tempfile.delete if params[:files][0].respond_to?(:tempfile)
end
return retval
end
# routed to /files/:id/citation
def citation
end
# routed to /files/:id
def show
perms = permissions_solr_doc_for_id(@generic_file.pid)
@can_read = can? :read, perms
@can_edit = can? :edit, perms
@events = @generic_file.events(100)
respond_to do |format|
format.html
format.endnote { render :text => @generic_file.export_as_endnote }
end
end
# routed to /files/:id/audit (POST)
def audit
render :json=>@generic_file.audit
end
# routed to /files/:id (PUT)
def update
version_event = false
if params.has_key?(:revision) and params[:revision] != @generic_file.content.latest_version.versionID
revision = @generic_file.content.get_version(params[:revision])
@generic_file.add_file_datastream(revision.content, :dsid => 'content')
version_event = true
begin
Resque.enqueue(ContentRestoredVersionEventJob, @generic_file.pid, current_user.login, params[:revision])
rescue Redis::CannotConnectError
logger.error "Redis is down!"
end
end
if params.has_key?(:filedata)
return unless virus_check(params[:filedata]) == 0
add_posted_blob_to_asset(@generic_file, params[:filedata])
version_event = true
begin
Resque.enqueue(ContentNewVersionEventJob, @generic_file.pid, current_user.login)
rescue Redis::CannotConnectError
logger.error "Redis is down!"
end
end
@generic_file.update_attributes(params[:generic_file].reject { |k,v| %w{ Filedata Filename revision part_of date_modified date_uploaded format }.include? k})
@generic_file.set_visibility(params)
@generic_file.date_modified = Time.now.ctime
@generic_file.save
# do not trigger an update event if a version event has already been triggered
begin
Resque.enqueue(ContentUpdateEventJob, @generic_file.pid, current_user.login) unless version_event
rescue Redis::CannotConnectError
logger.error "Redis is down!"
end
record_version_committer(@generic_file, current_user)
redirect_to edit_generic_file_path(:tab => params[:redirect_tab]), :notice => render_to_string(:partial=>'generic_files/asset_updated_flash', :locals => { :generic_file => @generic_file })
end
# routed to /files/:id/permissions (POST)
def permissions
ScholarSphere::GenericFile::Permissions.parse_permissions(params)
@generic_file.update_attributes(params[:generic_file].reject { |k,v| %w{ Filedata Filename revision}.include? k})
@generic_file.save
Resque.enqueue(ContentUpdateEventJob, @generic_file.pid, current_user.login)
redirect_to edit_generic_file_path, :notice => render_to_string(:partial=>'generic_files/asset_updated_flash', :locals => { :generic_file => @generic_file })
end
protected
def record_version_committer(generic_file, user)
version = generic_file.content.latest_version
# content datastream not (yet?) present
return if version.nil?
VersionCommitter.create(:obj_id => version.pid,
:datastream_id => version.dsid,
:version_id => version.versionID,
:committer_login => user.login)
end
def find_by_id
@generic_file = GenericFile.find(params[:id])
end
def virus_check( file)
stat = ClamAV.instance.scanfile(file.path)
flash[:error] = "Virus checking did not pass for #{file.original_filename} status = #{stat}" unless stat == 0
logger.warn "Virus checking did not pass for #{file.inspect} status = #{stat}" unless stat == 0
return stat
end
def create_and_save_generic_file
unless params.has_key?(:files)
logger.warn "!!!! No Files !!!!"
return
end
file = params[:files][0]
return nil unless virus_check(file) == 0
@generic_file = GenericFile.new
@generic_file.terms_of_service = params[:terms_of_service]
# if we want to be able to save zero length files then we can use this to make the file 1 byte instead of zero length and fedora will take it
#if (file.tempfile.size == 0)
# logger.warn "Encountered an empty file... Creating a new temp file with on space."
# f = Tempfile.new ("emptyfile")
# f.write " "
# f.rewind
# file.tempfile = f
#end
add_posted_blob_to_asset(@generic_file,file)
apply_depositor_metadata(@generic_file)
@generic_file.date_uploaded = Time.now.ctime
@generic_file.date_modified = Time.now.ctime
@generic_file.relative_path = params[:relative_path] if params.has_key?(:relative_path)
@generic_file.creator = current_user.name
if params.has_key?(:batch_id)
batch_id = ScholarSphere::Noid.namespaceize(params[:batch_id])
@generic_file.add_relationship("isPartOf", "info:fedora/#{batch_id}")
else
logger.warn "unable to find batch to attach to"
end
save_tries = 0
begin
@generic_file.save
rescue RSolr::Error::Http => error
logger.warn "GenericFilesController::create_and_save_generic_file Caught RSOLR error #{error.inspect}"
save_tries++
# fail for good if the tries is greater than 3
rescue_action_without_handler(error) if save_tries >=3
sleep 0.01
retry
end
record_version_committer(@generic_file, current_user)
begin
Resque.enqueue(UnzipJob, @generic_file.pid) if file.content_type == 'application/zip'
rescue Redis::CannotConnectError
logger.error "Redis is down!"
end
return @generic_file
end
end
refs #1485: checkpoint, trying to work around BL 3.7 issue w/ calling add_facet_params outside the BL controller context
# -*- coding: utf-8 -*-
# Copyright © 2012 The Pennsylvania State University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class GenericFilesController < ApplicationController
include Hydra::Controller::ControllerBehavior
include Hydra::AssetsControllerHelper # for apply_depositor_metadata method
include Hydra::Controller::UploadBehavior # for add_posted_blob_to_asset method
include Blacklight::Configurable # comply with BL 3.7
include ScholarSphere::Noid # for normalize_identifier method
# This is needed as of BL 3.7
self.copy_blacklight_config_from(CatalogController)
rescue_from AbstractController::ActionNotFound, :with => :render_404
# actions: audit, index, create, new, edit, show, update, destroy, permissions, citation
before_filter :authenticate_user!, :except => [:show, :citation]
before_filter :has_access?, :except => [:show]
before_filter :enforce_access_controls
before_filter :find_by_id, :except => [:index, :create, :new]
prepend_before_filter :normalize_identifier, :except => [:index, :create, :new]
# routed to /files/new
def new
@generic_file = GenericFile.new
@batch_noid = ScholarSphere::Noid.noidify(ScholarSphere::IdService.mint)
end
# routed to /files/:id/edit
def edit
@terms = @generic_file.get_terms
@groups = current_user.groups
end
# routed to /files/:id
def index
@generic_files = GenericFile.find(:all, :rows => GenericFile.count)
render :json => @generic_files.map(&:to_jq_upload).to_json
end
# routed to /files/:id (DELETE)
def destroy
pid = @generic_file.noid
@generic_file.delete
begin
Resque.enqueue(ContentDeleteEventJob, pid, current_user.login)
rescue Redis::CannotConnectError
logger.error "Redis is down!"
end
redirect_to dashboard_path, :notice => render_to_string(:partial=>'generic_files/asset_deleted_flash', :locals => { :generic_file => @generic_file })
end
# routed to /files (POST)
def create
begin
retval = " "
# check error condition No files
if !params.has_key?(:files)
retval = render :json => [{:error => "Error! No file to save"}].to_json
# check error condition empty file
elsif ((params[:files][0].respond_to?(:tempfile)) && (params[:files][0].tempfile.size == 0))
retval = render :json => [{ :name => params[:files][0].original_filename, :error => "Error! Zero Length File!"}].to_json
elsif ((params[:files][0].respond_to?(:size)) && (params[:files][0].size == 0))
retval = render :json => [{ :name => params[:files][0].original_filename, :error => "Error! Zero Length File!"}].to_json
elsif (params[:terms_of_service] != '1')
retval = render :json => [{ :name => params[:files][0].original_filename, :error => "You must accept the terms of service!"}].to_json
# process file
else
create_and_save_generic_file
if @generic_file
begin
Resque.enqueue(ContentDepositEventJob, @generic_file.pid, current_user.login)
rescue Redis::CannotConnectError
logger.error "Redis is down!"
end
respond_to do |format|
format.html {
retval = render :json => [@generic_file.to_jq_upload].to_json,
:content_type => 'text/html',
:layout => false
}
format.json {
retval = render :json => [@generic_file.to_jq_upload].to_json
}
end
else
retval = render :json => [{:error => "Error creating generic file."}].to_json
end
end
rescue => error
logger.warn "GenericFilesController::create rescued error #{error.backtrace}"
retval = render :json => [{:error => "Error occurred while creating generic file."}].to_json
ensure
# remove the tempfile (only if it is a temp file)
params[:files][0].tempfile.delete if params[:files][0].respond_to?(:tempfile)
end
return retval
end
# routed to /files/:id/citation
def citation
end
# routed to /files/:id
def show
perms = permissions_solr_doc_for_id(@generic_file.pid)
@can_read = can? :read, perms
@can_edit = can? :edit, perms
@events = @generic_file.events(100)
respond_to do |format|
format.html
format.endnote { render :text => @generic_file.export_as_endnote }
end
end
# routed to /files/:id/audit (POST)
def audit
render :json=>@generic_file.audit
end
# routed to /files/:id (PUT)
def update
version_event = false
if params.has_key?(:revision) and params[:revision] != @generic_file.content.latest_version.versionID
revision = @generic_file.content.get_version(params[:revision])
@generic_file.add_file_datastream(revision.content, :dsid => 'content')
version_event = true
begin
Resque.enqueue(ContentRestoredVersionEventJob, @generic_file.pid, current_user.login, params[:revision])
rescue Redis::CannotConnectError
logger.error "Redis is down!"
end
end
if params.has_key?(:filedata)
return unless virus_check(params[:filedata]) == 0
add_posted_blob_to_asset(@generic_file, params[:filedata])
version_event = true
begin
Resque.enqueue(ContentNewVersionEventJob, @generic_file.pid, current_user.login)
rescue Redis::CannotConnectError
logger.error "Redis is down!"
end
end
@generic_file.update_attributes(params[:generic_file].reject { |k,v| %w{ Filedata Filename revision part_of date_modified date_uploaded format }.include? k})
@generic_file.set_visibility(params)
@generic_file.date_modified = Time.now.ctime
@generic_file.save
# do not trigger an update event if a version event has already been triggered
begin
Resque.enqueue(ContentUpdateEventJob, @generic_file.pid, current_user.login) unless version_event
rescue Redis::CannotConnectError
logger.error "Redis is down!"
end
record_version_committer(@generic_file, current_user)
redirect_to edit_generic_file_path(:tab => params[:redirect_tab]), :notice => render_to_string(:partial=>'generic_files/asset_updated_flash', :locals => { :generic_file => @generic_file })
end
# routed to /files/:id/permissions (POST)
def permissions
ScholarSphere::GenericFile::Permissions.parse_permissions(params)
@generic_file.update_attributes(params[:generic_file].reject { |k,v| %w{ Filedata Filename revision}.include? k})
@generic_file.save
Resque.enqueue(ContentUpdateEventJob, @generic_file.pid, current_user.login)
redirect_to edit_generic_file_path, :notice => render_to_string(:partial=>'generic_files/asset_updated_flash', :locals => { :generic_file => @generic_file })
end
protected
def record_version_committer(generic_file, user)
version = generic_file.content.latest_version
# content datastream not (yet?) present
return if version.nil?
VersionCommitter.create(:obj_id => version.pid,
:datastream_id => version.dsid,
:version_id => version.versionID,
:committer_login => user.login)
end
def find_by_id
@generic_file = GenericFile.find(params[:id])
end
def virus_check( file)
stat = ClamAV.instance.scanfile(file.path)
flash[:error] = "Virus checking did not pass for #{file.original_filename} status = #{stat}" unless stat == 0
logger.warn "Virus checking did not pass for #{file.inspect} status = #{stat}" unless stat == 0
return stat
end
def create_and_save_generic_file
unless params.has_key?(:files)
logger.warn "!!!! No Files !!!!"
return
end
file = params[:files][0]
return nil unless virus_check(file) == 0
@generic_file = GenericFile.new
@generic_file.terms_of_service = params[:terms_of_service]
# if we want to be able to save zero length files then we can use this to make the file 1 byte instead of zero length and fedora will take it
#if (file.tempfile.size == 0)
# logger.warn "Encountered an empty file... Creating a new temp file with on space."
# f = Tempfile.new ("emptyfile")
# f.write " "
# f.rewind
# file.tempfile = f
#end
add_posted_blob_to_asset(@generic_file,file)
apply_depositor_metadata(@generic_file)
@generic_file.date_uploaded = Time.now.ctime
@generic_file.date_modified = Time.now.ctime
@generic_file.relative_path = params[:relative_path] if params.has_key?(:relative_path)
@generic_file.creator = current_user.name
if params.has_key?(:batch_id)
batch_id = ScholarSphere::Noid.namespaceize(params[:batch_id])
@generic_file.add_relationship("isPartOf", "info:fedora/#{batch_id}")
else
logger.warn "unable to find batch to attach to"
end
save_tries = 0
begin
@generic_file.save
rescue RSolr::Error::Http => error
logger.warn "GenericFilesController::create_and_save_generic_file Caught RSOLR error #{error.inspect}"
save_tries++
# fail for good if the tries is greater than 3
rescue_action_without_handler(error) if save_tries >=3
sleep 0.01
retry
end
record_version_committer(@generic_file, current_user)
begin
Resque.enqueue(UnzipJob, @generic_file.pid) if file.content_type == 'application/zip'
rescue Redis::CannotConnectError
logger.error "Redis is down!"
end
return @generic_file
end
end
|
=begin
Copyright 2012 Shared Learning Collaborative, LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end
require 'test_helper'
class AppTest < ActiveSupport::TestCase
test "get apps" do
apps = App.all
assert_not_nil(apps)
assert_equal(apps.size, 2)
end
test "new app" do
app = App.new
assert_not_nil(app)
end
test "apps can filter the admin app" do
full = App.all
filtered = App.all_but_admin
assert(full.size == filtered.size + 1, "We shouldn't have one less app")
filtered.each do |app|
assert(!app.respond_to?(:endpoints), "No app should have endpoints")
end
end
test "app version validation" do
app = build_app
app.version = nil
assert !app.valid?, "App shouldn't be valid with a nil version"
app.version = ""
assert !app.valid?, "App shouldn't be valid with an empty string"
app.version = "123445123412341235123412351512323513413413412351362354134663241"
assert !app.valid?, "App should be less than 25 characters"
app.version = "Super duper!"
assert !app.valid?, "App can't contain special characters"
app.version = "Waffles"
assert app.valid?, "App is valid with a string"
end
# test "save app" do
# app = App.new(@app_fixtures["new"])
# assert(app.valid?, "This should be valid")
# assert_nothing_raised(Exception) { app.save }
# end
private
def build_app
app = App.new
app.name = "name"
app.description = "description"
app.redirect_uri = "https://derp"
app.application_url = "https://derp"
app.vendor = "McDonalds"
app.version = "1.0"
app
end
end
Fixed rails unit test
=begin
Copyright 2012 Shared Learning Collaborative, LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end
require 'test_helper'
class AppTest < ActiveSupport::TestCase
test "get apps" do
apps = App.all
assert_not_nil(apps)
assert_equal(apps.size, 2)
end
test "new app" do
app = App.new
assert_not_nil(app)
end
test "apps can filter the admin app" do
full = App.all
filtered = App.all_but_admin
assert(full.size == filtered.size + 1, "We shouldn't have one less app")
filtered.each do |app|
assert(!app.respond_to?(:endpoints), "No app should have endpoints")
end
end
test "app version validation" do
app = build_app
app.version = nil
assert !app.valid?, "App shouldn't be valid with a nil version"
app.version = ""
assert !app.valid?, "App shouldn't be valid with an empty string"
app.version = "123445123412341235123412351512323513413413412351362354134663241"
assert !app.valid?, "App should be less than 25 characters"
app.version = "Super duper!"
assert !app.valid?, "App can't contain special characters"
app.version = "Waffles"
assert app.valid?, "App is valid with a string"
end
# test "save app" do
# app = App.new(@app_fixtures["new"])
# assert(app.valid?, "This should be valid")
# assert_nothing_raised(Exception) { app.save }
# end
private
def build_app
app = App.new
app.name = "name"
app.description = "description"
app.redirect_uri = "https://derp"
app.application_url = "https://derp"
app.vendor = "McDonalds"
app.version = "1.0"
app.administration_url = "https://morederp"
app.image_url = "https://morederp"
app
end
end |
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Mapotempo
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
# Application config
config.i18n.enforce_available_locales = true
I18n.config.enforce_available_locales = true
config.assets.initialize_on_precompile = true
config.middleware.use Rack::Config do |env|
env['api.tilt.root'] = Rails.root.join 'app', 'api', 'views'
end
config.middleware.use Rack::Cors do
allow do
origins '*'
# location of your API
resource '/api/*', headers: :any, methods: [:get, :post, :options, :put, :delete, :patch]
end
end
config.lograge.enabled = true
config.lograge.custom_options = lambda do |event|
unwanted_keys = %w[format action controller]
customer_id = event.payload[:customer_id]
params = event.payload[:params].reject { |key,_| unwanted_keys.include? key }
{customer_id: customer_id, time: event.time, params: params}
end
# Application config
config.product_name = 'Mapotempo'
config.product_contact = 'frederic@mapotempo.com'
config.optimize_cache = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'optimizer', expires_in: 60*60*24*10)
config.optimize_url = 'http://localhost:4567/0.1/optimize_tsptw'
config.optimize_time = 30000
config.optimize_cluster_size = 5
config.optimize_soft_upper_bound = 3
config.geocode_code_cache = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'geocode', expires_in: 60*60*24*10)
config.geocode_reverse_cache = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'geocode_reverse', expires_in: 60*60*24*10)
config.geocode_complete_cache = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'geocode_complete', expires_in: 60*60*24*10)
config.geocode_ign_referer = 'localhost'
config.geocode_ign_key = nil
config.geocode_complete = false # Build time setting
config.osrm_cache_request = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'osrm_request', expires_in: 60*60*24*1)
config.osrm_cache_result = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'osrm_result', expires_in: 60*60*24*1)
config.here_cache_request = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'here_request', expires_in: 60*60*24*1)
config.here_cache_result = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'here_result', expires_in: 60*60*24*1)
config.here_api_url = 'https://route.nlp.nokia.com/routing'
config.here_api_app_id = nil
config.here_api_app_code = nil
config.tomtom_api_url = 'https://soap.business.tomtom.com/v1.23'
config.tomtom_api_key = nil
config.masternaut_api_url = 'http://ws.webservices.masternaut.fr/MasterWS/services'
config.alyacom_api_url = 'http://partners.alyacom.fr/ws'
config.alyacom_api_key = nil
config.delayed_job_use = false
config.self_care = true # Allow subscription and resiliation by the user himself
config.welcome_url = nil
config.help_url = nil
config.geocoding_accuracy_success = 0.98
config.geocoding_accuracy_warning = 0.9
end
end
ActionView::Base.field_error_proc = Proc.new do |html_tag, instance|
class_attr_index = html_tag.index 'class="'
if class_attr_index
html_tag.insert class_attr_index+7, 'ui-state-error '
else
html_tag.insert html_tag.index('>'), ' class="ui-state-error"'
end
end
module ActiveRecord
module Validations
class AssociatedBubblingValidator < ActiveModel::EachValidator
def validate_each(record, attribute, value)
(value.is_a?(Enumerable) || value.is_a?(ActiveRecord::Associations::CollectionProxy) ? value : [value]).each do |v|
unless v.valid?
v.errors.full_messages.each do |msg|
record.errors.add(attribute, msg, options.merge(:value => value))
end
end
end
end
end
module ClassMethods
def validates_associated_bubbling(*attr_names)
validates_with AssociatedBubblingValidator, _merge_attributes(attr_names)
end
end
end
end
class TwitterBootstrapFormFor::FormBuilder
def submit(value=nil, options={}, icon=false)
value, options = nil, value if value.is_a?(Hash)
options[:class] ||= 'btn btn-primary'
value ||= submit_default_value
@template.button_tag(options) {
if icon != nil
icon ||= 'fa-floppy-o'
@template.concat @template.content_tag('i', nil, class: "fa #{icon} fa-fw")
end
@template.concat ' '
@template.concat value
}
end
end
Add missing mailer config for devise
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Mapotempo
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
# Application config
config.i18n.enforce_available_locales = true
I18n.config.enforce_available_locales = true
config.assets.initialize_on_precompile = true
config.middleware.use Rack::Config do |env|
env['api.tilt.root'] = Rails.root.join 'app', 'api', 'views'
end
config.middleware.use Rack::Cors do
allow do
origins '*'
# location of your API
resource '/api/*', headers: :any, methods: [:get, :post, :options, :put, :delete, :patch]
end
end
config.lograge.enabled = true
config.lograge.custom_options = lambda do |event|
unwanted_keys = %w[format action controller]
customer_id = event.payload[:customer_id]
params = event.payload[:params].reject { |key,_| unwanted_keys.include? key }
{customer_id: customer_id, time: event.time, params: params}
end
# Application config
config.product_name = 'Mapotempo'
config.product_contact = 'frederic@mapotempo.com'
config.action_mailer.default_url_options = {host: 'localhost'}
config.optimize_cache = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'optimizer', expires_in: 60*60*24*10)
config.optimize_url = 'http://localhost:4567/0.1/optimize_tsptw'
config.optimize_time = 30000
config.optimize_cluster_size = 5
config.optimize_soft_upper_bound = 3
config.geocode_code_cache = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'geocode', expires_in: 60*60*24*10)
config.geocode_reverse_cache = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'geocode_reverse', expires_in: 60*60*24*10)
config.geocode_complete_cache = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'geocode_complete', expires_in: 60*60*24*10)
config.geocode_ign_referer = 'localhost'
config.geocode_ign_key = nil
config.geocode_complete = false # Build time setting
config.osrm_cache_request = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'osrm_request', expires_in: 60*60*24*1)
config.osrm_cache_result = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'osrm_result', expires_in: 60*60*24*1)
config.here_cache_request = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'here_request', expires_in: 60*60*24*1)
config.here_cache_result = ActiveSupport::Cache::FileStore.new(Dir.tmpdir, namespace: 'here_result', expires_in: 60*60*24*1)
config.here_api_url = 'https://route.nlp.nokia.com/routing'
config.here_api_app_id = nil
config.here_api_app_code = nil
config.tomtom_api_url = 'https://soap.business.tomtom.com/v1.23'
config.tomtom_api_key = nil
config.masternaut_api_url = 'http://ws.webservices.masternaut.fr/MasterWS/services'
config.alyacom_api_url = 'http://partners.alyacom.fr/ws'
config.alyacom_api_key = nil
config.delayed_job_use = false
config.self_care = true # Allow subscription and resiliation by the user himself
config.welcome_url = nil
config.help_url = nil
config.geocoding_accuracy_success = 0.98
config.geocoding_accuracy_warning = 0.9
end
end
ActionView::Base.field_error_proc = Proc.new do |html_tag, instance|
class_attr_index = html_tag.index 'class="'
if class_attr_index
html_tag.insert class_attr_index+7, 'ui-state-error '
else
html_tag.insert html_tag.index('>'), ' class="ui-state-error"'
end
end
module ActiveRecord
module Validations
class AssociatedBubblingValidator < ActiveModel::EachValidator
def validate_each(record, attribute, value)
(value.is_a?(Enumerable) || value.is_a?(ActiveRecord::Associations::CollectionProxy) ? value : [value]).each do |v|
unless v.valid?
v.errors.full_messages.each do |msg|
record.errors.add(attribute, msg, options.merge(:value => value))
end
end
end
end
end
module ClassMethods
def validates_associated_bubbling(*attr_names)
validates_with AssociatedBubblingValidator, _merge_attributes(attr_names)
end
end
end
end
class TwitterBootstrapFormFor::FormBuilder
def submit(value=nil, options={}, icon=false)
value, options = nil, value if value.is_a?(Hash)
options[:class] ||= 'btn btn-primary'
value ||= submit_default_value
@template.button_tag(options) {
if icon != nil
icon ||= 'fa-floppy-o'
@template.concat @template.content_tag('i', nil, class: "fa #{icon} fa-fw")
end
@template.concat ' '
@template.concat value
}
end
end
|
module Georgia
class PagesController < ApplicationController
include Georgia::Concerns::Helpers
before_filter :prepare_new_page, only: [:search]
before_filter :prepare_page, only: [:show, :edit, :settings, :update, :copy, :preview, :draft]
def show
authorize @page
redirect_to [:edit, @page]
end
# Edit current revision
def edit
authorize @page
redirect_to [:edit, @page, @page.current_revision]
end
# Edit current page
def settings
authorize @page
end
# Create page, load first current revision and js redirect to revisions#edit
def create
@page = model.new(slug: params[:title].try(:parameterize))
authorize @page
if @page.save
@page.revisions.create(template: Georgia.templates.first) do |rev|
rev.contents << Georgia::Content.new(locale: I18n.default_locale, title: params[:title])
end
@page.update_attribute(:current_revision, @page.revisions.first)
CreateActivity.new(@page, :create, owner: current_user).call
respond_to do |format|
format.html { redirect_to edit_page_revision_path(@page, @page.current_revision), notice: "#{@page.title} was successfully created." }
format.js { render layout: false }
end
else
respond_to do |format|
format.html { redirect_to :back, alert: "Oups. Something went wrong." }
format.js { render layout: false }
end
end
end
# Update page settings
def update
authorize @page
model.update_tree(params[:page_tree]) if params[:page_tree]
clean_textext_tag_list_format if params[:page][:tag_list].present?
if @page.update(page_params)
CreateActivity.new(@page, :update, owner: current_user).call
respond_to do |format|
format.html { redirect_to [:settings, @page], notice: "#{@page.title} was successfully updated." }
format.js { head :ok }
end
else
respond_to do |format|
format.html { redirect_to [:settings, @page], alert: "Oups. Something went wrong." }
format.js { head :internal_server_error }
end
end
end
# Creates a copy of a page and redirects to its revisions#edit
def copy
authorize @page
@copy = @page.copy
CreateActivity.new(@page, :copy, owner: current_user).call
redirect_to edit_page_revision_path(@copy, @copy.current_revision), notice: "#{instance_name.humanize} successfully copied. Do not forget to change your url"
end
# Destroys page and its revisions from page
# Also used to destroy multiple pages from table checkboxes
def destroy
back_url = url_for(controller: controller_name, action: :search)
@pages = model.where(id: params[:id])
authorize @pages
pages_count = @pages.length
if @pages.destroy_all
render_success("#{instance_name.humanize.pluralize(pages_count)} successfully deleted.")
else
render_error("Oups. Something went wrong.")
end
end
# Publishes multiple pages from table checkboxes
def publish
set_pages
authorize @pages
unless @pages.map(&:publish).include?(false)
@pages.each do |page|
CreateActivity.new(page, :publish, owner: current_user).call
end
render_success("Successfully published.")
else
render_error("Oups. Something went wrong.")
end
end
# Unpublishes multiple pages from table checkboxes
def unpublish
set_pages
authorize @pages
unless @pages.map(&:unpublish).include?(false)
@pages.each do |page|
CreateActivity.new(page, :unpublish, owner: current_user).call
end
render_success("Successfully unpublished.")
else
render_error("Oups. Something went wrong.")
end
end
# Sorts subpages/children from pages#settings
# FIXME: This should be taken care of in pages#update
def sort
if params[:page]
params[:page].each_with_index do |id, index|
model.update_all({position: index+1}, {id: id})
end
end
render nothing: true
end
def index
authorize Georgia::Page
redirect_to [:search, model]
end
def search
authorize Georgia::Page
session[:search_params] = params
search_conditions = model.search_conditions(params)
@search = model.search(search_conditions).page(params[:page])
@pages = Georgia::PageDecorator.decorate_collection(@search.records)
end
private
def prepare_new_page
@page = model.new
end
def prepare_page
@page = model.find(params[:id]).decorate
end
def page_params
params.require(:page).permit(:slug, :parent_id, tag_list: [])
end
def clean_textext_tag_list_format
params[:page][:tag_list] = JSON.parse(params[:page][:tag_list])
end
def set_pages
@pages = model.where(id: params[:id])
end
def render_success success_message
@status_message = success_message
@status = :notice
respond_to do |format|
format.html { redirect_to :back, notice: @status_message }
format.js { render layout: false }
format.json { render json: { ids: @pages.map(&:id), message: @status_message, status: @status } }
end
end
def render_error error_message
@status_message = error_message
@status = :alert
respond_to do |format|
format.html { redirect_to :back, alert: @status_message }
format.js { render layout: false }
format.json { render json: { message: @status_message, status: @status } }
end
end
end
end
Add hook for extra permitted params on page settings
module Georgia
class PagesController < ApplicationController
include Georgia::Concerns::Helpers
before_filter :prepare_new_page, only: [:search]
before_filter :prepare_page, only: [:show, :edit, :settings, :update, :copy, :preview, :draft]
def show
authorize @page
redirect_to [:edit, @page]
end
# Edit current revision
def edit
authorize @page
redirect_to [:edit, @page, @page.current_revision]
end
# Edit current page
def settings
authorize @page
end
# Create page, load first current revision and js redirect to revisions#edit
def create
@page = model.new(slug: params[:title].try(:parameterize))
authorize @page
if @page.save
@page.revisions.create(template: Georgia.templates.first) do |rev|
rev.contents << Georgia::Content.new(locale: I18n.default_locale, title: params[:title])
end
@page.update_attribute(:current_revision, @page.revisions.first)
CreateActivity.new(@page, :create, owner: current_user).call
respond_to do |format|
format.html { redirect_to edit_page_revision_path(@page, @page.current_revision), notice: "#{@page.title} was successfully created." }
format.js { render layout: false }
end
else
respond_to do |format|
format.html { redirect_to :back, alert: "Oups. Something went wrong." }
format.js { render layout: false }
end
end
end
# Update page settings
def update
authorize @page
model.update_tree(params[:page_tree]) if params[:page_tree]
clean_textext_tag_list_format if params[:page][:tag_list].present?
if @page.update(page_params)
CreateActivity.new(@page, :update, owner: current_user).call
respond_to do |format|
format.html { redirect_to [:settings, @page], notice: "#{@page.title} was successfully updated." }
format.js { head :ok }
end
else
respond_to do |format|
format.html { redirect_to [:settings, @page], alert: "Oups. Something went wrong." }
format.js { head :internal_server_error }
end
end
end
# Creates a copy of a page and redirects to its revisions#edit
def copy
authorize @page
@copy = @page.copy
CreateActivity.new(@page, :copy, owner: current_user).call
redirect_to edit_page_revision_path(@copy, @copy.current_revision), notice: "#{instance_name.humanize} successfully copied. Do not forget to change your url"
end
# Destroys page and its revisions from page
# Also used to destroy multiple pages from table checkboxes
def destroy
back_url = url_for(controller: controller_name, action: :search)
@pages = model.where(id: params[:id])
authorize @pages
pages_count = @pages.length
if @pages.destroy_all
render_success("#{instance_name.humanize.pluralize(pages_count)} successfully deleted.")
else
render_error("Oups. Something went wrong.")
end
end
# Publishes multiple pages from table checkboxes
def publish
set_pages
authorize @pages
unless @pages.map(&:publish).include?(false)
@pages.each do |page|
CreateActivity.new(page, :publish, owner: current_user).call
end
render_success("Successfully published.")
else
render_error("Oups. Something went wrong.")
end
end
# Unpublishes multiple pages from table checkboxes
def unpublish
set_pages
authorize @pages
unless @pages.map(&:unpublish).include?(false)
@pages.each do |page|
CreateActivity.new(page, :unpublish, owner: current_user).call
end
render_success("Successfully unpublished.")
else
render_error("Oups. Something went wrong.")
end
end
# Sorts subpages/children from pages#settings
# FIXME: This should be taken care of in pages#update
def sort
if params[:page]
params[:page].each_with_index do |id, index|
model.update_all({position: index+1}, {id: id})
end
end
render nothing: true
end
def index
authorize Georgia::Page
redirect_to [:search, model]
end
def search
authorize Georgia::Page
session[:search_params] = params
search_conditions = model.search_conditions(params)
@search = model.search(search_conditions).page(params[:page])
@pages = Georgia::PageDecorator.decorate_collection(@search.records)
end
private
def prepare_new_page
@page = model.new
end
def prepare_page
@page = model.find(params[:id]).decorate
end
def page_params
params.require(:page).permit(permitted_page_params)
end
def permitted_page_params
[:slug, :parent_id, { tag_list: [] }] + permitted_extra_params
end
# Override in subclass when extra fields are present
def permitted_extra_params
[]
end
def clean_textext_tag_list_format
params[:page][:tag_list] = JSON.parse(params[:page][:tag_list])
end
def set_pages
@pages = model.where(id: params[:id])
end
def render_success success_message
@status_message = success_message
@status = :notice
respond_to do |format|
format.html { redirect_to :back, notice: @status_message }
format.js { render layout: false }
format.json { render json: { ids: @pages.map(&:id), message: @status_message, status: @status } }
end
end
def render_error error_message
@status_message = error_message
@status = :alert
respond_to do |format|
format.html { redirect_to :back, alert: @status_message }
format.js { render layout: false }
format.json { render json: { message: @status_message, status: @status } }
end
end
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
require 'dam_uploader'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module SafiraDemo
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
end
end
Static assets
require File.expand_path('../boot', __FILE__)
require 'rails/all'
require 'dam_uploader'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module SafiraDemo
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
config.serve_static_assets = true
end
end
|
class Member::GroupsController < Member::BaseController
before_filter :find_group, :except => [:index, :new, :create]
before_filter :check_moderator, :except => [:index, :new, :create]
def index
@moderator_memberships = current_user.moderator_memberships
@plain_memberships = current_user.plain_memberships
end
def create
@group = Group.new(params[:group])
@group.author = current_user
@group.save
@group.join(current_user, true)
@group.activate_membership(current_user)
if @group.errors.empty?
if current_user.admin == true || Tog::Config['plugins.tog_social.group.moderation.creation'] != 'true'
@group.activate!
flash[:ok] = I18n.t("tog_social.groups.member.created")
redirect_to group_path(@group)
else
GroupMailer.deliver_activation_request(@group)
flash[:warning] = I18n.t("tog_social.groups.member.pending")
redirect_to groups_path
end
else
render :action => 'new'
end
end
def update
@group.update_attributes!(params[:group])
@group.tag_list = params[:group][:tag_list]
@group.save
flash[:ok] = I18n.t("tog_social.groups.member.updated", :name => @group.name)
redirect_to group_path(@group)
end
def reject_member
user = User.find(params[:user_id])
if !user
flash[:error] = I18n.t("tog_social.groups.member.user_doesnot_exists")
redirect_to pending_members_paths(@group)
return
end
@group.leave(user)
if @group.membership_of(user)
GroupMailer.deliver_reject_join_request(@group, current_user, user)
flash[:ok] = I18n.t("tog_social.groups.member.user_rejected", :name => user.profile.full_name)
else
flash[:error] = I18n.t("tog_social.groups.member.error")
end
redirect_to member_group_pending_members_url(@group)
end
def accept_member
user = User.find(params[:user_id])
if !user
flash[:error] = I18n.t("tog_social.groups.member.user_doesnot_exists")
redirect_to pending_members_paths(@group)
return
end
@group.activate_membership(user)
if @group.members.include? user
GroupMailer.deliver_accept_join_request(@group, current_user, user)
flash[:ok] = I18n.t("tog_social.groups.member.user_accepted", :name => user.profile.full_name)
else
flash[:error] = I18n.t("tog_social.groups.member.error")
end
redirect_to member_group_pending_members_url(@group)
end
protected
def find_group
@group = Group.find(params[:id]) if params[:id]
end
def check_moderator
unless @group.moderators.include? current_user
flash[:error] = I18n.t("tog_social.groups.member.not_moderator")
redirect_to groups_path(@group)
end
end
end
fixed bug for moderated group creation, checking property as boolean, not string
Signed-off-by: tog - extensible open source social network platform <a6699d8e1fe3bc0b367b46652c29410a5c13c11f@linkingpaths.com>
class Member::GroupsController < Member::BaseController
before_filter :find_group, :except => [:index, :new, :create]
before_filter :check_moderator, :except => [:index, :new, :create]
def index
@moderator_memberships = current_user.moderator_memberships
@plain_memberships = current_user.plain_memberships
end
def create
@group = Group.new(params[:group])
@group.author = current_user
@group.save
@group.join(current_user, true)
@group.activate_membership(current_user)
if @group.errors.empty?
if current_user.admin == true || Tog::Config['plugins.tog_social.group.moderation.creation'] != true
@group.activate!
flash[:ok] = I18n.t("tog_social.groups.member.created")
redirect_to group_path(@group)
else
GroupMailer.deliver_activation_request(@group)
flash[:warning] = I18n.t("tog_social.groups.member.pending")
redirect_to groups_path
end
else
render :action => 'new'
end
end
def update
@group.update_attributes!(params[:group])
@group.tag_list = params[:group][:tag_list]
@group.save
flash[:ok] = I18n.t("tog_social.groups.member.updated", :name => @group.name)
redirect_to group_path(@group)
end
def reject_member
user = User.find(params[:user_id])
if !user
flash[:error] = I18n.t("tog_social.groups.member.user_doesnot_exists")
redirect_to pending_members_paths(@group)
return
end
@group.leave(user)
if @group.membership_of(user)
GroupMailer.deliver_reject_join_request(@group, current_user, user)
flash[:ok] = I18n.t("tog_social.groups.member.user_rejected", :name => user.profile.full_name)
else
flash[:error] = I18n.t("tog_social.groups.member.error")
end
redirect_to member_group_pending_members_url(@group)
end
def accept_member
user = User.find(params[:user_id])
if !user
flash[:error] = I18n.t("tog_social.groups.member.user_doesnot_exists")
redirect_to pending_members_paths(@group)
return
end
@group.activate_membership(user)
if @group.members.include? user
GroupMailer.deliver_accept_join_request(@group, current_user, user)
flash[:ok] = I18n.t("tog_social.groups.member.user_accepted", :name => user.profile.full_name)
else
flash[:error] = I18n.t("tog_social.groups.member.error")
end
redirect_to member_group_pending_members_url(@group)
end
protected
def find_group
@group = Group.find(params[:id]) if params[:id]
end
def check_moderator
unless @group.moderators.include? current_user
flash[:error] = I18n.t("tog_social.groups.member.not_moderator")
redirect_to groups_path(@group)
end
end
end |
require File.expand_path('../boot', __FILE__)
require 'rails/all'
#require 'ripple/railtie'
#require 'riak-sessions'
# If you have a Gemfile, require the gems listed there, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env) if defined?(Bundler)
module Hadean
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# "Pacific Time (US & Canada)"
# 'Central Time (US & Canada)'
config.time_zone = 'Eastern Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# JavaScript files you want as :defaults (application.js is always included).
# config.action_view.javascript_expansions[:defaults] = %w(jquery rails)
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure generators values
config.generators do |g|
g.test_framework :rspec, :fixture => true
g.fixture_replacement :factory_girl , :dir=>"spec/factories"
end
#config.session_store = ::Ripple::SessionStore
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password,
:password_confirmation,
:number,
:cc_number,
:card_number,
:verification_value]
end
end
white space
require File.expand_path('../boot', __FILE__)
require 'rails/all'
#require 'ripple/railtie'
#require 'riak-sessions'
# If you have a Gemfile, require the gems listed there, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env) if defined?(Bundler)
module Hadean
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# "Pacific Time (US & Canada)"
# 'Central Time (US & Canada)'
config.time_zone = 'Eastern Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# JavaScript files you want as :defaults (application.js is always included).
# config.action_view.javascript_expansions[:defaults] = %w(jquery rails)
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure generators values
config.generators do |g|
g.test_framework :rspec, :fixture => true
g.fixture_replacement :factory_girl , :dir=>"spec/factories"
end
#config.session_store = ::Ripple::SessionStore
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password,
:password_confirmation,
:number,
:cc_number,
:card_number,
:verification_value]
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
require 'gds_api/worldwide'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
APP_SLUG = 'contact'
SLUG_FORMAT = /[A-Za-z0-9\-_]+/
module Contacts
class Application < Rails::Application
require 'contacts'
config.autoload_paths += %W(
#{config.root}/app/models/website
)
config.assets.precompile += %w(
admin.css
admin.js
frontend.css
frontend.js
frontend/base-ie6.css
frontend/base-ie7.css
frontend/base-ie8.css
)
# Generators
config.generators do |g|
g.orm :active_record
g.template_engine :erb
g.test_framework :rspec, fixture: false
g.fixture_replacement :factory_girl, dir: 'spec/factories'
end
config.after_initialize do
Contacts.worldwide_api = GdsApi::Worldwide.new(Plek.current.find('whitehall-admin'))
end
end
end
Change assets prefix to contacts-assets
Matches changes made in puppet config
require File.expand_path('../boot', __FILE__)
require 'rails/all'
require 'gds_api/worldwide'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
APP_SLUG = 'contact'
SLUG_FORMAT = /[A-Za-z0-9\-_]+/
module Contacts
class Application < Rails::Application
require 'contacts'
config.autoload_paths += %W(
#{config.root}/app/models/website
)
config.assets.precompile += %w(
admin.css
admin.js
frontend.css
frontend.js
frontend/base-ie6.css
frontend/base-ie7.css
frontend/base-ie8.css
)
config.assets.prefix = '/contacts-assets'
# Generators
config.generators do |g|
g.orm :active_record
g.template_engine :erb
g.test_framework :rspec, fixture: false
g.fixture_replacement :factory_girl, dir: 'spec/factories'
end
config.after_initialize do
Contacts.worldwide_api = GdsApi::Worldwide.new(Plek.current.find('whitehall-admin'))
end
end
end
|
class NotificationsController < ApplicationController
include Webhookable
skip_before_action :verify_authenticity_token
def notify
client = Twilio::REST::Client.new ENV['TWILIO_ACCOUNT_SID'], ENV['TWILIO_AUTH_TOKEN']
message = client.messages.create from: ENV['TWILIO_PHONE'], to: ENV['CHAD_PHONE'], body: 'Learning to send SMS you are. This is great.', media_url: 'http://linode.rabasa.com/yoda.gif'
render plain: message.status
end
def incoming
phone_number = params[:From]
user = User.find_by(phone: phone_number)
@body = params[:Body].downcase
@in_array = ['in', 'i', 'y', 'yes']
@out_array = ['out', 'o', 'n', 'no']
# split message into array of words
@message_array = @body.split
if user && Game.last.status == "open"
game = Game.last
unless user.id == 1 && !@in_array.include?(@message_array[0]) && !@out_array.include?(@message_array[0])
output = process_game_reply(user, game)
end
# elsif user && Game.last.status == "closed"
# output = "Sorry, this game invite is expired. A new one should be created soon."
end
#if I am the sender...
if user.phone == "+19179684122"
@message_body = process_chad_text(@body)
#let's add some commands (refactor later)
if @command == "quote"
output = "This is a random quote."
elsif @command == "lane"
output = "This is for lane. "
send_me("Test of a second message")
elsif @command == "charles"
output = "This is for Charles."
elsif @command == "compliment"
output = "You are very handsome."
elsif @command == "shuffle"
send_team("Are you in this Tuesday?")
elsif @command == "shufflenew"
if Date.today.wday == 2
next_game = Game.create(game_date: Date.today, result: "TBD", status: "open", season_id: Season.last.id)
send_team("Next game is tonight.\n")
else
next_tuesday = Date.commercial(Date.today.year, 1+Date.today.cweek, 2)
format_date = next_game.game_date.strftime('%a, %b %d')
send_team("Next game is on #{format_date}.\n")
end
elsif @command == "shufflestatus"
else
# output = "No command found. Hi #{user.first_name}. You still rock."
end
else
output = "This is not the droid you are looking for."
end
#checking if output is nil just for me now
# if !output && user.id == 1
# output = "output is nil chad"
# end
respond(output)
end
def process_chad_text(body)
if body.match(/\@[a-zA-Z0-9]+/)
group_sign = body.match(/\@[a-zA-Z0-9]+/)[0]
@group_name = group_sign[1..-1]
end
if body.match(/\[[\d]+\]/)
num_invited_sign = body.match(/\[[\d]+\]/)[0]
@num_invited = num_invited_sign[1..-2].to_i
end
if body.match(/\#[a-zA-Z]+/)
tag_sign = body.match(/\#[a-zA-Z]+/)[0]
@tag = tag_sign[1..-1]
end
if body.match(/\/[a-zA-Z]+/)
command_sign = body.match(/\/[a-zA-Z]+/)[0]
@command = command_sign[1..-1]
end
message_array = body.split
sanitized_body = message_array.reject do |word|
word == group_sign || word == num_invited_sign || word == tag_sign || word == command_sign
end
sanitized_body.join(' ').downcase
#output original body without any coded text
end
def process_game_reply(u, g)
# define valid responses (positive and negative)
# output = "Beginning of process game reply method."
# create bench or playerobject based on response
# (maybe check if there is a response already?)
if @in_array.include?(@message_array[0])
GamePlayer.create(user_id: u.id, game_id: g.id)
elsif @out_array.include?(@message_array[0])
GameBench.create(user_id: u.id, game_id: g.id)
end
total_replies = g.game_players.count + g.game_benches.count
if total_replies <= 3
output = "Thanks #{u.first_name} for your response. \nYou were #{total_replies.ordinalize} to get back to me. \nStill waiting for #{5 - total_replies} more replies. Please standby."
send_me("For Chad: #{u.first_name} just replied with #{@message_array[0]}.")
#add current_season_total_played || current_season_total_benched on the user model
elsif total_replies == 4
output = "Thanks #{u.first_name} for your response. \nYou were #{total_replies.ordinalize} to get back to me. \nStill waiting for #{5 - total_replies} more reply. Please standby."
send_me("For Chad: #{u.first_name} just replied with #{@message_array[0]}.")
elsif total_replies == 5
# output = "Another output. Total replies: #{total_replies}"
if g.game_players.count == 4 && g.game_benches.count == 1
#ideal case scenario
message = "Well, that worked out. \n Playing: #{g.game_players.all.first.user.first_name}, #{g.game_players.all.second.user.first_name}, #{g.game_players.all.third.user.first_name}, #{g.game_players.all.fourth.user.first_name}. \n Bench: #{g.game_benches.all.first.user.first_name}\n Don't forget the tangs! Go Ethel!"
send_whole_team(message)
g.status = "closed"
g.save
elsif g.game_benches.count > 1 && g.game_benches.count <=5
message = "Yikes. We have #{g.game_benches.count} players sitting out. Please initiate the substitue protocol. Human intervention required! Text Mike, Courtney, Loren.. anyone. Just get to four!"
send_whole_team(message)
elsif g.game_players.count == 5
player_hash = {}
User.all.each do |user|
player_hash[user.id] = user.season_benched
end
#this sorts the hash by value which is number of season benched
player_hash.sort_by{|k, v| v}
#this get the first aka lowest value from the hash
lowest_benched_total = player_hash.values[0]
bench_array = []
player_hash.each do |player, count|
if count == lowest_benched_total
bench_array << player
end
end
last_game_id = Game.last.id - 1
if last_game_id > 0
if GameBench.find_by(game_id: last_game_id)
last_sub_id = GameBench.find_by(game_id: last_game_id).user_id
end
end
#exclude the last player who subbed from the array
if last_sub_id
bench_array.delete(last_sub_id)
end
#for test
# sub = User.find(1)
sub = User.find(bench_array.sample)
GamePlayer.find_by(user_id: sub.id, game_id: g.id).destroy
GameBench.create(user_id: sub.id, game_id: g.id)
message = "Randomly selected sub from #{player_hash.count} eligible players is #{sub.first_name}.\n In the game: #{g.game_players.all.first.user.first_name}, #{g.game_players.all.second.user.first_name}, #{g.game_players.all.third.user.first_name}, #{g.game_players.all.fourth.user.first_name}. \n Bench: #{g.game_benches.all.first.user.first_name} \n Have a great game and don't forget the tangs!"
send_whole_team(message)
g.status = "closed"
g.save
end
end
output
end
def send_team(message)
@client = Twilio::REST::Client.new ENV['TWILIO_ACCOUNT_SID'], ENV['TWILIO_AUTH_TOKEN']
from = ENV['TWILIO_PHONE']
User.all.each do |friend|
@client.account.messages.create(
:from => from,
:to => friend.phone,
:body => "From Shufflebot:\nHey #{friend.first_name}, #{message} [In] or [Out]?"
)
end
end
def send_whole_team(message)
@client = Twilio::REST::Client.new ENV['TWILIO_ACCOUNT_SID'], ENV['TWILIO_AUTH_TOKEN']
from = ENV['TWILIO_PHONE']
User.all.each do |friend|
@client.account.messages.create(
:from => from,
:to => friend.phone,
:body => "From Shufflebot: #{message}"
)
end
end
def send_me(message)
@client = Twilio::REST::Client.new ENV['TWILIO_ACCOUNT_SID'], ENV['TWILIO_AUTH_TOKEN']
from = ENV['TWILIO_PHONE']
@client.account.messages.create(
:from => from,
:to => "+19179684122",
:body => "#{message}"
)
end
def respond(message)
response = Twilio::TwiML::Response.new do |r|
r.Message message
end
render text: response.text
end
end
add send status command
class NotificationsController < ApplicationController
include Webhookable
skip_before_action :verify_authenticity_token
def notify
client = Twilio::REST::Client.new ENV['TWILIO_ACCOUNT_SID'], ENV['TWILIO_AUTH_TOKEN']
message = client.messages.create from: ENV['TWILIO_PHONE'], to: ENV['CHAD_PHONE'], body: 'Learning to send SMS you are. This is great.', media_url: 'http://linode.rabasa.com/yoda.gif'
render plain: message.status
end
def incoming
phone_number = params[:From]
user = User.find_by(phone: phone_number)
@body = params[:Body].downcase
@in_array = ['in', 'i', 'y', 'yes']
@out_array = ['out', 'o', 'n', 'no']
# split message into array of words
@message_array = @body.split
if user && Game.last.status == "open"
game = Game.last
unless user.id == 1 && !@in_array.include?(@message_array[0]) && !@out_array.include?(@message_array[0])
output = process_game_reply(user, game)
end
# elsif user && Game.last.status == "closed"
# output = "Sorry, this game invite is expired. A new one should be created soon."
end
#if I am the sender...
if user.phone == "+19179684122"
@message_body = process_chad_text(@body)
#let's add some commands (refactor later)
if @command == "quote"
output = "This is a random quote."
elsif @command == "lane"
output = "This is for lane. "
send_me("Test of a second message")
elsif @command == "charles"
output = "This is for Charles."
elsif @command == "compliment"
output = "You are very handsome."
elsif @command == "shuffle"
send_team("Are you in this Tuesday?")
elsif @command == "shufflestatus"
g = Game.last
player_names = []
bench_names = []
g.game_players.each do |gp|
player_names << gp.user.first_name
end
g.game_benches.each do |gb|
bench_names << gb.user.first_name
end
message = "Playing: #{player_names}\n Bench: #{bench_names}"
send_me(message)
elsif @command == "shufflenew"
if Date.today.wday == 2
next_game = Game.create(game_date: Date.today, result: "TBD", status: "open", season_id: Season.last.id)
send_team("Next game is tonight.\n")
else
next_tuesday = Date.commercial(Date.today.year, 1+Date.today.cweek, 2)
format_date = next_game.game_date.strftime('%a, %b %d')
send_team("Next game is on #{format_date}.\n")
end
elsif @command == "shufflestatus"
else
# output = "No command found. Hi #{user.first_name}. You still rock."
end
else
output = "This is not the droid you are looking for."
end
#checking if output is nil just for me now
# if !output && user.id == 1
# output = "output is nil chad"
# end
respond(output)
end
def process_chad_text(body)
if body.match(/\@[a-zA-Z0-9]+/)
group_sign = body.match(/\@[a-zA-Z0-9]+/)[0]
@group_name = group_sign[1..-1]
end
if body.match(/\[[\d]+\]/)
num_invited_sign = body.match(/\[[\d]+\]/)[0]
@num_invited = num_invited_sign[1..-2].to_i
end
if body.match(/\#[a-zA-Z]+/)
tag_sign = body.match(/\#[a-zA-Z]+/)[0]
@tag = tag_sign[1..-1]
end
if body.match(/\/[a-zA-Z]+/)
command_sign = body.match(/\/[a-zA-Z]+/)[0]
@command = command_sign[1..-1]
end
message_array = body.split
sanitized_body = message_array.reject do |word|
word == group_sign || word == num_invited_sign || word == tag_sign || word == command_sign
end
sanitized_body.join(' ').downcase
#output original body without any coded text
end
def process_game_reply(u, g)
# define valid responses (positive and negative)
# output = "Beginning of process game reply method."
# create bench or playerobject based on response
# (maybe check if there is a response already?)
if @in_array.include?(@message_array[0])
GamePlayer.create(user_id: u.id, game_id: g.id)
elsif @out_array.include?(@message_array[0])
GameBench.create(user_id: u.id, game_id: g.id)
end
total_replies = g.game_players.count + g.game_benches.count
if total_replies <= 3
output = "Thanks #{u.first_name} for your response. \nYou were #{total_replies.ordinalize} to get back to me. \nStill waiting for #{5 - total_replies} more replies. Please standby."
send_me("For Chad: #{u.first_name} just replied with #{@message_array[0]}.")
#add current_season_total_played || current_season_total_benched on the user model
elsif total_replies == 4
output = "Thanks #{u.first_name} for your response. \nYou were #{total_replies.ordinalize} to get back to me. \nStill waiting for #{5 - total_replies} more reply. Please standby."
send_me("For Chad: #{u.first_name} just replied with #{@message_array[0]}.")
elsif total_replies == 5
# output = "Another output. Total replies: #{total_replies}"
if g.game_players.count == 4 && g.game_benches.count == 1
#ideal case scenario
message = "Well, that worked out. \n Playing: #{g.game_players.all.first.user.first_name}, #{g.game_players.all.second.user.first_name}, #{g.game_players.all.third.user.first_name}, #{g.game_players.all.fourth.user.first_name}. \n Bench: #{g.game_benches.all.first.user.first_name}\n Don't forget the tangs! Go Ethel!"
send_whole_team(message)
g.status = "closed"
g.save
elsif g.game_benches.count > 1 && g.game_benches.count <=5
message = "Yikes. We have #{g.game_benches.count} players sitting out. Please initiate the substitue protocol. Human intervention required! Text Mike, Courtney, Loren.. anyone. Just get to four!"
send_whole_team(message)
elsif g.game_players.count == 5
player_hash = {}
User.all.each do |user|
player_hash[user.id] = user.season_benched
end
#this sorts the hash by value which is number of season benched
player_hash.sort_by{|k, v| v}
#this get the first aka lowest value from the hash
lowest_benched_total = player_hash.values[0]
bench_array = []
player_hash.each do |player, count|
if count == lowest_benched_total
bench_array << player
end
end
last_game_id = Game.last.id - 1
if last_game_id > 0
if GameBench.find_by(game_id: last_game_id)
last_sub_id = GameBench.find_by(game_id: last_game_id).user_id
end
end
#exclude the last player who subbed from the array
if last_sub_id
bench_array.delete(last_sub_id)
end
#for test
# sub = User.find(1)
sub = User.find(bench_array.sample)
GamePlayer.find_by(user_id: sub.id, game_id: g.id).destroy
GameBench.create(user_id: sub.id, game_id: g.id)
message = "Randomly selected sub from #{player_hash.count} eligible players is #{sub.first_name}.\n In the game: #{g.game_players.all.first.user.first_name}, #{g.game_players.all.second.user.first_name}, #{g.game_players.all.third.user.first_name}, #{g.game_players.all.fourth.user.first_name}. \n Bench: #{g.game_benches.all.first.user.first_name} \n Have a great game and don't forget the tangs!"
send_whole_team(message)
g.status = "closed"
g.save
end
end
output
end
def send_team(message)
@client = Twilio::REST::Client.new ENV['TWILIO_ACCOUNT_SID'], ENV['TWILIO_AUTH_TOKEN']
from = ENV['TWILIO_PHONE']
User.all.each do |friend|
@client.account.messages.create(
:from => from,
:to => friend.phone,
:body => "From Shufflebot:\nHey #{friend.first_name}, #{message} [In] or [Out]?"
)
end
end
def send_whole_team(message)
@client = Twilio::REST::Client.new ENV['TWILIO_ACCOUNT_SID'], ENV['TWILIO_AUTH_TOKEN']
from = ENV['TWILIO_PHONE']
User.all.each do |friend|
@client.account.messages.create(
:from => from,
:to => friend.phone,
:body => "From Shufflebot: #{message}"
)
end
end
def send_me(message)
@client = Twilio::REST::Client.new ENV['TWILIO_ACCOUNT_SID'], ENV['TWILIO_AUTH_TOKEN']
from = ENV['TWILIO_PHONE']
@client.account.messages.create(
:from => from,
:to => "+19179684122",
:body => "#{message}"
)
end
def respond(message)
response = Twilio::TwiML::Response.new do |r|
r.Message message
end
render text: response.text
end
end |
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
# require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "active_resource/railtie"
require "rails/test_unit/railtie"
require "sprockets/railtie"
# If you have a Gemfile, require the gems listed there, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env) if defined?(Bundler)
module Publisher
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Enable the asset pipeline
config.assets.enabled = true
config.assets.version = '1.0'
config.assets.prefix = '/assets'
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib #{config.root}/app/presenters)
config.assets.precompile += Dir.glob(Rails.root.join('app', 'assets', 'stylesheets', '*.css'))
config.assets.precompile += Dir.glob(Rails.root.join('app', 'assets', 'javascripts', '*.js'))
config.assets.precompile += Dir.glob(Rails.root.join('app', 'assets', 'images', '*'))
config.generators do |g|
g.orm :mongoid
g.template_engine :erb # this could be :haml or whatever
g.test_framework :test_unit, :fixture => false # this could be :rpsec or whatever
end
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# JavaScript files you want as :defaults (application.js is always included).
# config.action_view.javascript_expansions[:defaults] = %w(jquery rails)
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
end
end
require 'open-uri'
require 'builder'
Hard-code lots of asset names
It seems that this is what the rails asset pipeline gods want?
require File.expand_path('../boot', __FILE__)
# Pick the frameworks you want:
# require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "active_resource/railtie"
require "rails/test_unit/railtie"
require "sprockets/railtie"
# If you have a Gemfile, require the gems listed there, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env) if defined?(Bundler)
module Publisher
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Enable the asset pipeline
config.assets.enabled = true
config.assets.version = '1.0'
config.assets.prefix = '/assets'
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib #{config.root}/app/presenters)
config.assets.precompile += %W(application.css bootstrap-responsive.min.css application.js
bootstrap.min.js form-tools.js jquery.min.js jquery.mustache.js multi-part.js
publications.js glyphicons-halflings-white.png glyphicons-halflings.png
header-crown.png icon-answer.png icon-guide.png icon-localtransaction.png
icon-place.png icon-programme.png icon-service.png icon-transaction.png)
config.generators do |g|
g.orm :mongoid
g.template_engine :erb # this could be :haml or whatever
g.test_framework :test_unit, :fixture => false # this could be :rpsec or whatever
end
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# JavaScript files you want as :defaults (application.js is always included).
# config.action_view.javascript_expansions[:defaults] = %w(jquery rails)
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
end
end
require 'open-uri'
require 'builder'
|
# Provides web and API actions relating to user notifications.
class NotificationsController < ApplicationController
before_action :authenticate_user!, :only => [:index]
# Authenticated web/API action. Retrieves a list of active notifications for the current user.
def index
@notifications = Notification.where(:user => current_user, :is_read => false).paginate(:page => params[:page], :per_page => 100).order('created_at DESC')
respond_to do |format|
format.html { render :index }
format.json { render :json => @notifications }
end
end
# Authenticated web/API action. Marks a single notification as read.
def read
@notification = Notification.find params[:id]
@notification.is_read = true
if @notification.save
respond_to do |format|
format.html {
flash[:notice] = "Marked as read."
render :index
}
format.json { render :json => { :status => 'success' } }
end
else
respond_to do |format|
format.html {
flash[:error] = "Failed to mark read."
render :index
}
format.json { render :json => { :status => 'failed' } }
end
end
end
# Authenticated web/API action. Marks all the current user's unread notifications as read.
def read_all
@notifications = Notification.where(:user => current_user, :is_read => false)
if @notifications.update_all(is_read: false)
respond_to do |format|
format.html {
flash[:notice] = "Marked all as read."
render :index
}
format.json { render :json => { :status => 'success' } }
end
else
respond_to do |format|
format.html {
flash[:error] = "Failed to mark all read."
render :index
}
format.json { render :json => { :status => 'failed' } }
end
end
end
end
correct notifications update
# Provides web and API actions relating to user notifications.
class NotificationsController < ApplicationController
before_action :authenticate_user!, :only => [:index]
# Authenticated web/API action. Retrieves a list of active notifications for the current user.
def index
@notifications = Notification.where(:user => current_user, :is_read => false).paginate(:page => params[:page], :per_page => 100).order('created_at DESC')
respond_to do |format|
format.html { render :index }
format.json { render :json => @notifications }
end
end
# Authenticated web/API action. Marks a single notification as read.
def read
@notification = Notification.find params[:id]
@notification.is_read = true
if @notification.save
respond_to do |format|
format.html {
flash[:notice] = "Marked as read."
render :index
}
format.json { render :json => { :status => 'success' } }
end
else
respond_to do |format|
format.html {
flash[:error] = "Failed to mark read."
render :index
}
format.json { render :json => { :status => 'failed' } }
end
end
end
# Authenticated web/API action. Marks all the current user's unread notifications as read.
def read_all
@notifications = Notification.where(:user => current_user, :is_read => false)
if @notifications.update_all(is_read: true)
respond_to do |format|
format.html {
flash[:notice] = "Marked all as read."
render :index
}
format.json { render :json => { :status => 'success' } }
end
else
respond_to do |format|
format.html {
flash[:error] = "Failed to mark all read."
render :index
}
format.json { render :json => { :status => 'failed' } }
end
end
end
end
|
# coding: utf-8
require File.expand_path('../boot', __FILE__)
require "action_controller/railtie"
#require "sequel-rails/railtie"
require "action_mailer/railtie"
require "active_record"
require_relative '../lib/carto/configuration'
require_relative '../lib/carto/carto_gears_support'
if defined?(Bundler)
Bundler.require(:default, :assets, Rails.env)
end
module CartoDB
class Application < Rails::Application
include Carto::Configuration
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# JavaScript files you want as :defaults (application.js is always included).
# config.action_view.javascript_expansions[:defaults] = %w(jquery rails)
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Filter out connector connection credentials. We'd rather filter just 'connector.connection',
# but version 3.x of Rails doesn't support nested parameter filtering.
config.filter_parameters += [:connection]
::Sequel.extension(:pagination)
::Sequel.extension(:connection_validator)
# Enable the asset pipeline
config.assets.enabled = false
FileUtils.mkdir_p(log_dir_path) unless File.directory?(log_dir_path)
config.paths['public'] = [public_uploads_path]
config.assets.paths << Rails.root.join('bower_components')
# Default setting is [/\w+\.(?!js|css).+/, /application.(css|js)$/]
config.assets.precompile = %w(
config.js
app.js
cdb.js
embed.js
dashboard_deps.js
dashboard.js
dashboard_templates.js
public_dashboard_deps.js
public_dashboard.js
data_library_deps.js
data_library.js
public_map.js
public_map_deps.js
editor.js
account_templates.js
account_deps.js
account.js
keys_templates.js
keys_deps.js
keys.js
models.js
organization_templates.js
organization_deps.js
organization.js
table.js
public_dashboard.js
public_like.js
tangram.min.js
old_common.js
old_common_without_core.js
templates.js
templates_mustache.js
specs.js
sessions.js
signup.js
confirmation_templates.js
confirmation.js
new_public_table.js
mobile_apps.js
mobile_apps_templates.js
mobile_apps_deps.js
explore_deps.js
explore.js
user_feed_deps.js
user_feed.js
tipsy.js
modernizr.js
statsc.js
builder.js
builder_vendor.js
builder_embed.js
builder_embed_vendor.js
dataset.js
dataset_vendor.js
common.js
deep_insights.css
cdb.css
cdb/themes/css/cartodb.css
cdb/themes/css/cartodb.ie.css
common.css
old_common.css
dashboard.css
cartodb.css
front.css
editor.css
common_editor3.css
editor3.css
public_editor3.css
table.css
leaflet.css
map.css
map/leaflet.ie.css
keys.css
organization.css
password_protected.css
public_dashboard.css
public_map.css
data_library.css
public_table.css
sessions.css
user_feed.css
explore.css
mobile_apps.css
api_keys.css
plugins/tipsy.css
*.jpg
*.ico
*.gif
*.png
*.eot
*.otf
*.svg
*.woff
*.ttf
*.swf
)
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
config.action_controller.relative_url_root = "/assets/#{frontend_version}"
custom_app_views_paths.reverse.each do |custom_views_path|
config.paths['app/views'].unshift(custom_views_path)
end
end
end
require 'csv'
require 'state_machine'
require 'cartodb/controller_flows/public/content'
require 'cartodb/controller_flows/public/datasets'
require 'cartodb/controller_flows/public/maps'
require 'cartodb/errors'
require 'cartodb/logger'
require 'cartodb/connection_pool'
require 'cartodb/pagination'
require 'cartodb/mini_sequel'
require 'cartodb/central'
# require 'importer/lib/cartodb-importer'
require 'importer/lib/cartodb-migrator'
require 'varnish/lib/cartodb-varnish'
$pool = CartoDB::ConnectionPool.new
Carto::CartoGearsSupport.new.gears.each do |gear|
require gear.full_path.join('lib', gear.name)
end
Added new stylesheets to precompiling list
# coding: utf-8
require File.expand_path('../boot', __FILE__)
require "action_controller/railtie"
#require "sequel-rails/railtie"
require "action_mailer/railtie"
require "active_record"
require_relative '../lib/carto/configuration'
require_relative '../lib/carto/carto_gears_support'
if defined?(Bundler)
Bundler.require(:default, :assets, Rails.env)
end
module CartoDB
class Application < Rails::Application
include Carto::Configuration
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# JavaScript files you want as :defaults (application.js is always included).
# config.action_view.javascript_expansions[:defaults] = %w(jquery rails)
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Filter out connector connection credentials. We'd rather filter just 'connector.connection',
# but version 3.x of Rails doesn't support nested parameter filtering.
config.filter_parameters += [:connection]
::Sequel.extension(:pagination)
::Sequel.extension(:connection_validator)
# Enable the asset pipeline
config.assets.enabled = false
FileUtils.mkdir_p(log_dir_path) unless File.directory?(log_dir_path)
config.paths['public'] = [public_uploads_path]
config.assets.paths << Rails.root.join('bower_components')
# Default setting is [/\w+\.(?!js|css).+/, /application.(css|js)$/]
config.assets.precompile = %w(
config.js
app.js
cdb.js
embed.js
dashboard_deps.js
dashboard.js
dashboard_templates.js
public_dashboard_deps.js
public_dashboard.js
data_library_deps.js
data_library.js
public_map.js
public_map_deps.js
editor.js
account_templates.js
account_deps.js
account.js
keys_templates.js
keys_deps.js
keys.js
models.js
organization_templates.js
organization_deps.js
organization.js
table.js
public_dashboard.js
public_like.js
tangram.min.js
old_common.js
old_common_without_core.js
templates.js
templates_mustache.js
specs.js
sessions.js
signup.js
confirmation_templates.js
confirmation.js
new_public_table.js
mobile_apps.js
mobile_apps_templates.js
mobile_apps_deps.js
explore_deps.js
explore.js
user_feed_deps.js
user_feed.js
tipsy.js
modernizr.js
statsc.js
builder.js
builder_vendor.js
builder_embed.js
builder_embed_vendor.js
dataset.js
dataset_vendor.js
common.js
tileo.css
deep_insights.css
cdb.css
cdb/themes/css/cartodb.css
cdb/themes/css/cartodb.ie.css
common.css
old_common.css
dashboard.css
cartodb.css
front.css
editor.css
common_editor3.css
editor3.css
public_editor3.css
table.css
leaflet.css
map.css
map/leaflet.ie.css
keys.css
organization.css
password_protected.css
public_dashboard.css
public_map.css
data_library.css
public_table.css
sessions.css
user_feed.css
explore.css
mobile_apps.css
api_keys.css
plugins/tipsy.css
*.jpg
*.ico
*.gif
*.png
*.eot
*.otf
*.svg
*.woff
*.ttf
*.swf
)
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
config.action_controller.relative_url_root = "/assets/#{frontend_version}"
custom_app_views_paths.reverse.each do |custom_views_path|
config.paths['app/views'].unshift(custom_views_path)
end
end
end
require 'csv'
require 'state_machine'
require 'cartodb/controller_flows/public/content'
require 'cartodb/controller_flows/public/datasets'
require 'cartodb/controller_flows/public/maps'
require 'cartodb/errors'
require 'cartodb/logger'
require 'cartodb/connection_pool'
require 'cartodb/pagination'
require 'cartodb/mini_sequel'
require 'cartodb/central'
# require 'importer/lib/cartodb-importer'
require 'importer/lib/cartodb-migrator'
require 'varnish/lib/cartodb-varnish'
$pool = CartoDB::ConnectionPool.new
Carto::CartoGearsSupport.new.gears.each do |gear|
require gear.full_path.join('lib', gear.name)
end
|
class NotificationsController < ApplicationController
before_action :set_notification, only: [:destroy]
def index
@notifications = Notification.where(user_id: current_user.id)
end
def new
@notification = Notification.new
end
def create
@notification = Notification.new(notification_params)
respond_to do |format|
if @notification.save
format.html { redirect_to @notification, notice: I18n.t('crud.saved') }
format.json { render :show, status: :created, location: @notification }
else
format.html { render :new }
format.json { render json: @notification.errors, status: :unprocessable_entity }
end
end
end
def destroy
@notification.destroy
respond_to do |format|
format.html { redirect_to notifications_url, I18n.t('crud.destroyed') }
format.json { head :no_content }
end
end
private
def set_notification
@notification = Notification.find(params[:id])
end
def notification_params
params.require(:notification).permit(:title, :description, :read, :limit, :user_id)
end
end
Notifications
class NotificationsController < ApplicationController
before_action :set_notification, only: [:destroy]
def index
@notifications = Notification.where(user_id: current_user.id)
end
def new
@notification = Notification.new
end
def create
@notification = Notification.new(notification_params)
respond_to do |format|
if @notification.save
format.html { redirect_to @notification, notice: I18n.t('crud.saved') }
format.json { render :show, status: :created, location: @notification }
else
format.html { render :new }
format.json { render json: @notification.errors, status: :unprocessable_entity }
end
end
end
def destroy
@notification.destroy
respond_to do |format|
format.html { redirect_to notifications_url, I18n.t('crud.destroyed') }
format.json { head :no_content }
end
end
def notification_menu
@notification_menu = Notification.where(read: nil, user_id: nil).last(5)
end
private
def set_notification
@notification = Notification.find(params[:id])
end
def notification_params
params.require(:notification).permit(:title, :description, :read, :limit, :user_id)
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
Bundler.require(*Rails.groups)
module Rnplay
class Application < Rails::Application
config.active_record.raise_in_transactional_callbacks = true
config.action_mailer.delivery_method = :postmark
config.action_mailer.postmark_settings = { :api_token => ENV['POSTMARK_KEY'] }
config.active_job.queue_adapter = :sidekiq
config.react.addons = true
# If we do not use this then render json: .. will escape & in urls and break
# the app qr code action
config.active_support.escape_html_entities_in_json = false
config.middleware.insert_before 0, "Rack::Cors", :debug => true, :logger => (-> { Rails.logger }) do
allow do
origins '*'
resource '/cors',
:headers => :any,
:methods => [:post],
:credentials => true,
:max_age => 0
resource '*',
:headers => :any,
:methods => [:get, :post, :delete, :put, :options, :head],
:max_age => 0
end
end
end
end
enable harmony for jsx transform
require File.expand_path('../boot', __FILE__)
require 'rails/all'
Bundler.require(*Rails.groups)
module Rnplay
class Application < Rails::Application
config.active_record.raise_in_transactional_callbacks = true
config.action_mailer.delivery_method = :postmark
config.action_mailer.postmark_settings = { :api_token => ENV['POSTMARK_KEY'] }
config.active_job.queue_adapter = :sidekiq
config.react.addons = true
config.react.jsx_transform_options = {
harmony: true
}
# If we do not use this then render json: .. will escape & in urls and break
# the app qr code action
config.active_support.escape_html_entities_in_json = false
config.middleware.insert_before 0, "Rack::Cors", :debug => true, :logger => (-> { Rails.logger }) do
allow do
origins '*'
resource '/cors',
:headers => :any,
:methods => [:post],
:credentials => true,
:max_age => 0
resource '*',
:headers => :any,
:methods => [:get, :post, :delete, :put, :options, :head],
:max_age => 0
end
end
end
end
|
class OrganizationsController < ApplicationController
self.page_cache_directory = File.join(Rails.root, 'public', 'cached_pages')
caches_page :index
# GET /organizations
# GET /organizations.json
def index
@organizations = Organization.all
end
end
Removing erroneous caching statements from org
class OrganizationsController < ApplicationController
# GET /organizations
# GET /organizations.json
def index
@organizations = Organization.all
end
end |
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Rschedule
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
my_date_formats = { :default => '%m-%d-%Y' }
Time::DATE_FORMATS.merge!(my_date_formats)
Date::DATE_FORMATS.merge!(my_date_formats)
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
# config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
end
end
fix default hour format
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module Rschedule
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
my_date_formats =
Time::DATE_FORMATS.merge!({ :default => '%H:%M' })
Date::DATE_FORMATS.merge!({ :default => '%m-%d-%Y' })
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
# config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
end
end
|
#encoding: utf-8
#############################################################################
#
# Estimancy, Open Source project estimation web application
# Copyright (c) 2014-2015 Estimancy (http://www.estimancy.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# ===================================================================
#
# ProjEstimate, Open Source project estimation web application
# Copyright (c) 2012-2013 Spirula (http://www.spirula.fr)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#############################################################################
class OrganizationsController < ApplicationController
load_resource
require 'rubygems'
require 'securerandom'
include ProjectsHelper
def generate_report
conditions = Hash.new
params[:report].each do |i|
unless i.last.blank? or i.last.nil?
conditions[i.first] = i.last
end
end
@organization = @current_organization
if params[:report_date][:start_date].blank? || params[:report_date][:end_date].blank?
@projects = @organization.projects.where(is_model: false).where(conditions).where("title like ?", "%#{params[:title]}%").all
else
@projects = @organization.projects.where(is_model: false).where(conditions).where(:start_date => Time.parse(params[:report_date][:start_date])..Time.parse(params[:report_date][:end_date])).where("title like '%?%'").all
end
csv_string = CSV.generate(:col_sep => I18n.t(:general_csv_separator)) do |csv|
if params[:with_header] == "checked"
csv << [
I18n.t(:project),
I18n.t(:label_project_version),
I18n.t(:label_product_name),
I18n.t(:description),
I18n.t(:start_date),
I18n.t(:platform_category),
I18n.t(:project_category),
I18n.t(:acquisition_category),
I18n.t(:project_area),
I18n.t(:state),
I18n.t(:creator),
] + @organization.fields.map(&:name)
end
tmp = Array.new
@projects.each do |project|
tmp = [
project.title,
project.version,
project.product_name,
ActionView::Base.full_sanitizer.sanitize(project.description),
project.start_date,
project.platform_category,
project.project_category,
project.acquisition_category,
project.project_area,
project.estimation_status,
project.creator
]
@organization.fields.each do |field|
pf = ProjectField.where(field_id: field.id, project_id: project.id).first
tmp = tmp + [ pf.nil? ? '-' : convert_with_precision(pf.value.to_f / field.coefficient.to_f, user_number_precision) ]
end
csv << tmp
end
end
send_data(csv_string, :type => 'text/csv; header=present', :disposition => "attachment; filename=Rapport-#{Time.now}.csv")
end
def report
@organization = Organization.find(params[:organization_id])
end
def authorization
@organization = Organization.find(params[:organization_id])
set_breadcrumbs "Organizations" => "/organizationals_params", @organization.to_s => ""
@groups = @organization.groups
@organization_permissions = Permission.order('name').defined.select{ |i| i.object_type == "organization_super_admin_objects" }
@global_permissions = Permission.order('name').defined.select{ |i| i.object_type == "general_objects" }
@permission_projects = Permission.order('name').defined.select{ |i| i.object_type == "project_dependencies_objects" }
@modules_permissions = Permission.order('name').defined.select{ |i| i.object_type == "module_objects" }
@master_permissions = Permission.order('name').defined.select{ |i| i.is_master_permission }
@permissions_classes_organization = @organization_permissions.map(&:category).uniq.sort
@permissions_classes_globals = @global_permissions.map(&:category).uniq.sort
@permissions_classes_projects = @permission_projects.map(&:category).uniq.sort
@permissions_classes_masters = @master_permissions.map(&:category).uniq.sort
@permissions_classes_modules = @modules_permissions.map(&:category).uniq.sort
@project_security_levels = @organization.project_security_levels
end
def setting
@organization = Organization.find(params[:organization_id])
set_breadcrumbs "Organizations" => "/organizationals_params", @organization.to_s => ""
@technologies = @organization.organization_technologies
@fields = @organization.fields
@work_element_types = @organization.work_element_types
@organization_profiles = @organization.organization_profiles
@organization_group = @organization.groups
@estimation_models = @organization.projects.where(:is_model => true)
end
def module_estimation
@organization = Organization.find(params[:organization_id])
set_breadcrumbs "Organizations" => "/organizationals_params", @organization.to_s => ""
@guw_models = @organization.guw_models
@wbs_activities = @organization.wbs_activities
@size_units = SizeUnit.all
@technologies = @organization.organization_technologies
@size_unit_types = @organization.size_unit_types
@amoa_models = @organization.amoa_models
end
def users
@organization = Organization.find(params[:organization_id])
set_breadcrumbs "Organizations" => "/organizationals_params", @organization.to_s => ""
end
def estimations
@organization = Organization.find(params[:organization_id])
if @organization.is_image_organization == true
redirect_to("/organizationals_params", flash: { error: "Vous ne pouvez pas accéder aux estimations d'une organization image"}) and return
end
set_breadcrumbs "Organizations" => "/organizationals_params", @organization.to_s => ""
@projects = @organization.projects.where(is_model: false).all
end
# New organization from image
def new_organization_from_image
end
# Method that execute the duplication: duplicate estimation model for organization
def execute_duplication(project_id, new_organization_id)
#begin
old_prj = Project.find(project_id)
new_organization = Organization.find(new_organization_id)
new_prj = old_prj.amoeba_dup #amoeba gem is configured in Project class model
new_prj.organization_id = new_organization_id
new_prj.title = old_prj.title
new_prj.description = old_prj.description
new_estimation_status = new_organization.estimation_statuses.where(copy_id: new_prj.estimation_status_id).first
new_estimation_status_id = new_estimation_status.nil? ? nil : new_estimation_status.id
new_prj.estimation_status_id = new_estimation_status_id
if old_prj.is_model
new_prj.is_model = true
else
new_prj.is_model = false
end
if new_prj.save
old_prj.save #Original project copy number will be incremented to 1
#Managing the component tree : PBS
pe_wbs_product = new_prj.pe_wbs_projects.products_wbs.first
# For PBS
new_prj_components = pe_wbs_product.pbs_project_elements
new_prj_components.each do |new_c|
new_ancestor_ids_list = []
new_c.ancestor_ids.each do |ancestor_id|
ancestor_id = PbsProjectElement.find_by_pe_wbs_project_id_and_copy_id(new_c.pe_wbs_project_id, ancestor_id).id
new_ancestor_ids_list.push(ancestor_id)
end
new_c.ancestry = new_ancestor_ids_list.join('/')
new_c.save
end
#Update the project securities for the current user who create the estimation from model
#if params[:action_name] == "create_project_from_template"
if old_prj.is_model
creator_securities = old_prj.creator.project_securities_for_select(new_prj.id)
unless creator_securities.nil?
creator_securities.update_attribute(:user_id, current_user.id)
end
end
#Other project securities for groups
new_prj.project_securities.where('group_id IS NOT NULL').each do |project_security|
new_security_level = new_organization.project_security_levels.where(copy_id: project_security.project_security_level_id).first
new_group = new_organization.groups.where(copy_id: project_security.group_id).first
if new_security_level.nil? || new_group.nil?
project_security.destroy
else
project_security.update_attributes(project_security_level_id: new_security_level.id, group_id: new_group.id)
end
end
#Other project securities for users
new_prj.project_securities.where('user_id IS NOT NULL').each do |project_security|
new_security_level = new_organization.project_security_levels.where(copy_id: project_security.project_security_level_id).first
if new_security_level.nil?
project_security.destroy
else
project_security.update_attributes(project_security_level_id: new_security_level.id)
end
end
# For ModuleProject associations
old_prj.module_projects.group(:id).each do |old_mp|
new_mp = ModuleProject.find_by_project_id_and_copy_id(new_prj.id, old_mp.id)
# ModuleProject Associations for the new project
old_mp.associated_module_projects.each do |associated_mp|
new_associated_mp = ModuleProject.where('project_id = ? AND copy_id = ?', new_prj.id, associated_mp.id).first
new_mp.associated_module_projects << new_associated_mp
end
# if the module_project view is nil
#if new_mp.view.nil?
# default_view = new_organization.views.where('pemodule_id = ? AND is_default_view = ?', new_mp.pemodule_id, true).first
# if default_view.nil?
# default_view = View.create(name: "#{new_mp} view", description: "", pemodule_id: new_mp.pemodule_id, organization_id: new_organization_id)
# end
# new_mp.update_attribute(:view_id, default_view.id)
#end
#Recreate view for all moduleproject as the projects are not is the same organization
#Copy the views and widgets for the new project
#mp_default_view =
#if old_mp.view.nil?
#
#else
#
#end
new_view = View.create(organization_id: new_organization_id, name: "#{new_prj.to_s} : view for #{new_mp.to_s}", description: "Please rename the view's name and description if needed.")
# We have to copy all the selected view's widgets in a new view for the current module_project
if old_mp.view
old_mp_view_widgets = old_mp.view.views_widgets.all
old_mp_view_widgets.each do |view_widget|
new_view_widget_mp = ModuleProject.find_by_project_id_and_copy_id(new_prj.id, view_widget.module_project_id)
new_view_widget_mp_id = new_view_widget_mp.nil? ? nil : new_view_widget_mp.id
widget_est_val = view_widget.estimation_value
unless widget_est_val.nil?
in_out = widget_est_val.in_out
widget_pe_attribute_id = widget_est_val.pe_attribute_id
unless new_view_widget_mp.nil?
new_estimation_value = new_view_widget_mp.estimation_values.where('pe_attribute_id = ? AND in_out=?', widget_pe_attribute_id, in_out).last
estimation_value_id = new_estimation_value.nil? ? nil : new_estimation_value.id
widget_copy = ViewsWidget.create(view_id: new_view.id, module_project_id: new_view_widget_mp_id, estimation_value_id: estimation_value_id, name: view_widget.name, show_name: view_widget.show_name,
icon_class: view_widget.icon_class, color: view_widget.color, show_min_max: view_widget.show_min_max, widget_type: view_widget.widget_type,
width: view_widget.width, height: view_widget.height, position: view_widget.position, position_x: view_widget.position_x, position_y: view_widget.position_y)
pf = ProjectField.where(project_id: new_prj.id, views_widget_id: view_widget.id).first
unless pf.nil?
new_field = new_organization.fields.where(copy_id: pf.field_id).first
pf.views_widget_id = widget_copy.id
pf.field_id = new_field.nil? ? nil : new_field.id
pf.save
end
end
end
end
end
#update the new module_project view
new_mp.update_attribute(:view_id, new_view.id)
###end
#Update the Unit of works's groups
new_mp.guw_unit_of_work_groups.each do |guw_group|
new_pbs_project_element = new_prj_components.find_by_copy_id(guw_group.pbs_project_element_id)
new_pbs_project_element_id = new_pbs_project_element.nil? ? nil : new_pbs_project_element.id
#technology
new_technology = new_organization.organization_technologies.where(copy_id: guw_group.organization_technology_id).first
new_technology_id = new_technology.nil? ? nil : new_technology.id
guw_group.update_attributes(pbs_project_element_id: new_pbs_project_element_id, organization_technology_id: new_technology_id)
# Update the group unit of works and attributes
guw_group.guw_unit_of_works.each do |guw_uow|
new_uow_mp = ModuleProject.find_by_project_id_and_copy_id(new_prj.id, guw_uow.module_project_id)
new_uow_mp_id = new_uow_mp.nil? ? nil : new_uow_mp.id
#PBS
new_pbs = new_prj_components.find_by_copy_id(guw_uow.pbs_project_element_id)
new_pbs_id = new_pbs.nil? ? nil : new_pbs.id
# GuwModel
new_guw_model = new_organization.guw_models.where(copy_id: guw_uow.guw_model_id).first
new_guw_model_id = new_guw_model.nil? ? nil : new_guw_model.id
# guw_work_unit
if !new_guw_model.nil?
new_guw_work_unit = new_guw_model.guw_work_units.where(copy_id: guw_uow.guw_work_unit_id).first
new_guw_work_unit_id = new_guw_work_unit.nil? ? nil : new_guw_work_unit.id
#Type
new_guw_type = new_guw_model.guw_types.where(copy_id: guw_uow.guw_type_id).first
new_guw_type_id = new_guw_type.nil? ? nil : new_guw_type.id
#Complexity
if !guw_uow.guw_complexity_id.nil? && !new_guw_type.nil?
new_complexity = new_guw_type.guw_complexities.where(copy_id: guw_uow.guw_complexity_id).first
new_complexity_id = new_complexity.nil? ? nil : new_complexity.id
else
new_complexity_id = nil
end
else
new_guw_work_unit_id = nil
new_guw_type_id = nil
new_complexity_id = nil
end
#Technology
uow_new_technology = new_organization.organization_technologies.where(copy_id: guw_uow.organization_technology_id).first
uow_new_technology_id = uow_new_technology.nil? ? nil : uow_new_technology.id
guw_uow.update_attributes(module_project_id: new_uow_mp_id, pbs_project_element_id: new_pbs_id, guw_model_id: new_guw_model_id,
guw_type_id: new_guw_type_id, guw_work_unit_id: new_guw_work_unit_id, guw_complexity_id: new_complexity_id,
organization_technology_id: uow_new_technology_id)
end
end
# UOW-INPUTS
new_mp.uow_inputs.each do |uo|
new_pbs_project_element = new_prj_components.find_by_copy_id(uo.pbs_project_element_id)
new_pbs_project_element_id = new_pbs_project_element.nil? ? nil : new_pbs_project_element.id
uo.update_attribute(:pbs_project_element_id, new_pbs_project_element_id)
end
#WBS-ACTIVITY-INPUTS
new_mp.wbs_activity_inputs.each do |activity_input|
new_wbs_activity = new_organization.wbs_activities.where(copy_id: activity_input.wbs_activity_id).first
unless new_wbs_activity.nil?
new_wbs_activity_ratio = new_wbs_activity.wbs_activity_ratios.where(copy_id: activity_input.wbs_activity_ratio_id).first
unless new_wbs_activity_ratio.nil?
activity_input.update_attributes(wbs_activity_id: new_wbs_activity.id, wbs_activity_ratio_id: new_wbs_activity_ratio.id)
end
end
end
["input", "output"].each do |io|
new_mp.pemodule.pe_attributes.each do |attr|
old_prj.pbs_project_elements.each do |old_component|
new_prj_components.each do |new_component|
ev = new_mp.estimation_values.where(pe_attribute_id: attr.id, in_out: io).first
unless ev.nil?
ev.string_data_low[new_component.id.to_i] = ev.string_data_low.delete old_component.id
ev.string_data_most_likely[new_component.id.to_i] = ev.string_data_most_likely.delete old_component.id
ev.string_data_high[new_component.id.to_i] = ev.string_data_high.delete old_component.id
ev.string_data_probable[new_component.id.to_i] = ev.string_data_probable.delete old_component.id
ev.save
end
end
end
end
end
end
else
new_prj = nil
end
#rescue
#new_prj = nil
#end
new_prj
end
# Create New organization from selected image organization
# Or duplicate current selected organization
def create_organization_from_image
authorize! :manage, Organization
case params[:action_name]
#Duplicate organization
when "copy_organization"
organization_image = Organization.find(params[:organization_id])
#Create the organization from image organization
when "new_organization_from_image"
organization_image_id = params[:organization_image]
if organization_image_id.nil?
flash[:warning] = "Veuillez sélectionner une organisation image pour continuer"
else
organization_image = Organization.find(organization_image_id)
@organization_name = params[:organization_name]
@firstname = params[:firstname]
@lastname = params[:lastname]
@email = params[:email]
@login_name = params[:identifiant]
@password = params[:password]
if @password.empty?
@password = SecureRandom.hex(8)
end
change_password_required = params[:change_password_required]
end
else
flash[:error] = "Aucune organization sélectionnée"
redirect_to :back and return
end
if organization_image.nil?
flash[:warning] = "Veuillez sélectionner une organisation pour continuer"
else
new_organization = organization_image.amoeba_dup
if params[:action_name] == "new_organization_from_image"
new_organization.name = @organization_name
elsif params[:action_name] == "copy_organization"
new_organization.description << "\n \n Cette organisation est une copie de l'organisation #{organization_image.name}."
new_organization.description << "\n #{I18n.l(Time.now)} : #{I18n.t(:organization_copied_by, username: current_user.name)}"
end
new_organization.is_image_organization = false
if new_organization.save
organization_image.save #Original organization copy number will be incremented to 1
#Copy the organization estimation_statuses workflow and groups/roles
new_estimation_statuses = new_organization.estimation_statuses
new_estimation_statuses.each do |estimation_status|
copied_status = EstimationStatus.find(estimation_status.copy_id)
#Get the to_transitions for the Statuses Workflow
copied_status.to_transition_statuses.each do |to_transition|
new_to_transition = new_estimation_statuses.where(copy_id: to_transition.id).first
unless new_to_transition.nil?
StatusTransition.create(from_transition_status_id: estimation_status.id, to_transition_status_id: new_to_transition.id)
end
end
end
#Get the estimation_statuses role / by group
new_organization.project_security_levels.each do |project_security_level|
project_security_level.estimation_status_group_roles.each do |group_role|
new_group = new_organization.groups.where(copy_id: group_role.group_id).first
estimation_status = new_organization.estimation_statuses.where(copy_id: group_role.estimation_status_id).first
unless estimation_status.nil?
group_role.update_attributes(organization_id: new_organization.id, estimation_status_id: estimation_status.id, group_id: new_group.id)
end
end
end
#Then copy the image organization estimation models
if params[:action_name] == "new_organization_from_image"
# Create a user in the Admin group of the new organization
admin_user = User.new(first_name: @firstname, last_name: @lastname, login_name: @login_name, email: @email, password: @password, password_confirmation: @password, super_admin: false)
# Add the user to the created organization
admin_group = new_organization.groups.where(name: '*USER').first #first_or_create(name: "*USER", organization_id: new_organization.id, description: "Groupe créé par défaut dans l'organisation pour la gestion des administrateurs")
unless admin_group.nil?
admin_user.groups << admin_group
admin_user.save
end
elsif params[:action_name] == "copy_organization"
# add users to groups
organization_image.groups.each do |group|
new_group = new_organization.groups.where(copy_id: group.id).first
unless new_group.nil?
new_group.users = group.users
new_group.save
end
end
end
# Copy the WBS-Activities modules's Models instances
organization_image.wbs_activities.each do |old_wbs_activity|
new_wbs_activity = old_wbs_activity.amoeba_dup #amoeba gem is configured in WbsActivity class model
new_wbs_activity.organization_id = new_organization.id
new_wbs_activity.transaction do
if new_wbs_activity.save
old_wbs_activity.save #Original WbsActivity copy number will be incremented to 1
#we also have to save to wbs_activity_ratio
old_wbs_activity.wbs_activity_ratios.each do |ratio|
ratio.save
end
#get new WBS Ratio elements
new_wbs_activity_ratio_elts = []
new_wbs_activity.wbs_activity_ratios.each do |ratio|
ratio.wbs_activity_ratio_elements.each do |ratio_elt|
new_wbs_activity_ratio_elts << ratio_elt
#Update ratio elements profiles
ratio_elt.wbs_activity_ratio_profiles.each do |activity_ratio_profile|
new_organization_profile = new_organization.organization_profiles.where(copy_id: activity_ratio_profile.organization_profile_id).first
unless new_organization_profile.nil?
activity_ratio_profile.update_attribute(:organization_profile_id, new_organization_profile.id)
end
end
end
end
#Managing the component tree
old_wbs_activity_elements = old_wbs_activity.wbs_activity_elements.order('ancestry_depth asc')
old_wbs_activity_elements.each do |old_elt|
new_elt = old_elt.amoeba_dup
new_elt.wbs_activity_id = new_wbs_activity.id
new_elt.save#(:validate => false)
unless new_elt.is_root?
new_ancestor_ids_list = []
new_elt.ancestor_ids.each do |ancestor_id|
ancestor = WbsActivityElement.find_by_wbs_activity_id_and_copy_id(new_elt.wbs_activity_id, ancestor_id)
unless ancestor.nil?
ancestor_id = ancestor.id
new_ancestor_ids_list.push(ancestor_id)
end
end
new_elt.ancestry = new_ancestor_ids_list.join('/')
corresponding_ratio_elts = new_wbs_activity_ratio_elts.select { |ratio_elt| ratio_elt.wbs_activity_element_id == new_elt.copy_id}.each do |ratio_elt|
ratio_elt.update_attribute('wbs_activity_element_id', new_elt.id)
end
new_elt.save(:validate => false)
end
end
else
flash[:error] = "#{new_wbs_activity.errors.full_messages.to_sentence}"
end
end
# Update all the new organization module_project's guw_model with the current guw_model
wbs_activity_copy_id = old_wbs_activity.id
new_organization.module_projects.where(wbs_activity_id: wbs_activity_copy_id).update_all(wbs_activity_id: new_wbs_activity.id)
end
# copy the organization's projects
organization_image.projects.all.each do |est_model|
#DuplicateWorker.perform(est_model.id, new_organization.id)
new_template = execute_duplication(est_model.id, new_organization.id)
unless new_template.nil?
new_template.is_model = est_model.is_model
#new_template.original_model_id = nil
new_template.save
end
end
#update the project's ancestry
new_organization.projects.all.each do |project|
unless project.original_model_id.nil?
new_original_model = new_organization.projects.where(copy_id: project.original_model_id).first
new_original_model_id = new_original_model.nil? ? nil : new_original_model.id
project.original_model_id = new_original_model_id
project.save
end
unless project.ancestry.nil?
new_ancestor_ids_list = []
project.ancestor_ids.each do |ancestor_id|
ancestor = new_organization.projects.where(copy_id: ancestor_id).first
unless ancestor.nil?
#ancestor_id = ancestor.id
new_ancestor_ids_list.push(ancestor.id)
end
end
project.ancestry = new_ancestor_ids_list.join('/')
project.save
end
end
# Update the Expert Judgement modules's Models instances
new_organization.expert_judgement_instances.each do |expert_judgment|
# Update all the new organization module_project's guw_model with the current guw_model
expert_judgment_copy_id = expert_judgment.copy_id
new_organization.module_projects.where(expert_judgement_instance_id: expert_judgment_copy_id).update_all(expert_judgement_instance_id: expert_judgment.id)
end
# Update the modules's GE Models instances
new_organization.ge_models.each do |ge_model|
# Update all the new organization module_project's guw_model with the current guw_model
ge_copy_id = ge_model.copy_id
new_organization.module_projects.where(ge_model_id: ge_copy_id).update_all(ge_model_id: ge_model.id)
end
# Copy the modules's GUW Models instances
new_organization.guw_models.each do |guw_model|
# Update all the new organization module_project's guw_model with the current guw_model
copy_id = guw_model.copy_id
new_organization.module_projects.where(guw_model_id: copy_id).update_all(guw_model_id: guw_model.id)
guw_model.guw_types.each do |guw_type|
# Copy the complexities technologies
guw_type.guw_complexities.each do |guw_complexity|
# Copy the complexities technologie
guw_complexity.guw_complexity_technologies.each do |guw_complexity_technology|
new_organization_technology = new_organization.organization_technologies.where(copy_id: guw_complexity_technology.organization_technology_id).first
unless new_organization_technology.nil?
guw_complexity_technology.update_attribute(:organization_technology_id, new_organization_technology.id)
end
end
# Copy the complexities units of works
guw_complexity.guw_complexity_work_units.each do |guw_complexity_work_unit|
new_guw_work_unit = guw_model.guw_work_units.where(copy_id: guw_complexity_work_unit.guw_work_unit_id).first
unless new_guw_work_unit.nil?
guw_complexity_work_unit.update_attribute(:guw_work_unit_id, new_guw_work_unit.id)
end
end
end
#Guw UnitOfWorkAttributes
guw_type.guw_unit_of_works.each do |guw_unit_of_work|
guw_unit_of_work.guw_unit_of_work_attributes.each do |guw_uow_attr|
new_guw_type = guw_model.guw_types.where(copy_id: guw_uow_attr.guw_type_id).first
new_guw_type_id = new_guw_type.nil? ? nil : new_guw_type.id
new_guw_attribute = guw_model.guw_attributes.where(copy_id: guw_uow_attr.guw_attribute_id).first
new_guw_attribute_id = new_guw_attribute.nil? ? nil : new_guw_attribute.id
guw_uow_attr.update_attributes(guw_type_id: new_guw_type_id, guw_attribute_id: new_guw_attribute_id)
end
end
# Copy the GUW-attribute-complexity
#guw_type.guw_type_complexities.each do |guw_type_complexity|
# guw_type_complexity.guw_attribute_complexities.each do |guw_attr_complexity|
#
# new_guw_attribute = guw_model.guw_attributes.where(copy_id: guw_attr_complexity.guw_attribute_id).first
# new_guw_attribute_id = new_guw_attribute.nil? ? nil : new_guw_attribute.id
#
# new_guw_type = guw_model.guw_types.where(copy_id: guw_type_complexity.guw_type_id).first
# new_guw_type_id = new_guw_type.nil? ? nil : new_guw_type.id
#
# guw_attr_complexity.update_attributes(guw_type_id: new_guw_type_id, guw_attribute_id: new_guw_attribute_id)
# end
#end
end
guw_model.guw_attributes.each do |guw_attribute|
guw_attribute.guw_attribute_complexities.each do |guw_attr_complexity|
new_guw_type = guw_model.guw_types.where(copy_id: guw_attr_complexity.guw_type_id).first
new_guw_type_id = new_guw_type.nil? ? nil : new_guw_type.id
new_guw_type_complexity = new_guw_type.guw_type_complexities.where(copy_id: guw_attr_complexity.guw_type_complexity_id).first
new_guw_type_complexity_id = new_guw_type_complexity.nil? ? nil : new_guw_type_complexity.id
guw_attr_complexity.update_attributes(guw_type_id: new_guw_type_id, guw_type_complexity_id: new_guw_type_complexity_id )
end
end
end
flash[:notice] = I18n.t(:notice_organization_successful_created)
else
flash[:error] = I18n.t('errors.messages.not_saved')
end
end
redirect_to :back
end
def new
authorize! :create_organizations, Organization
set_page_title 'Organizations'
@organization = Organization.new
@groups = @organization.groups
end
def edit
#authorize! :edit_organizations, Organization
authorize! :edit_organizations, Organization
set_page_title 'Organizations'
@organization = Organization.find(params[:id])
set_breadcrumbs "Organizations" => "/organizationals_params", @organization.to_s => ""
@attributes = PeAttribute.defined.all
@attribute_settings = AttributeOrganization.all(:conditions => {:organization_id => @organization.id})
@complexities = @organization.organization_uow_complexities
@factors = Factor.order("factor_type")
@ot = @organization.organization_technologies.first
@unitofworks = @organization.unit_of_works
@users = @organization.users
@fields = @organization.fields
@organization_profiles = @organization.organization_profiles
@work_element_types = @organization.work_element_types
end
def refresh_value_elements
@size_unit = SizeUnit.find(params[:size_unit_id])
@technologies = OrganizationTechnology.all
end
def create
authorize! :create_organizations, Organization
@organization = Organization.new(params[:organization])
# Organization's projects selected columns
@organization.project_selected_columns = Project.default_selected_columns
# Add current_user to the organization
@organization.users << current_user
#A la sauvegarde, on crée des sous traitants
if @organization.save
#Create default the size unit type's
size_unit_types = [
['New', 'new', ""],
['Modified', 'new', ""],
['Reused', 'new', ""],
]
size_unit_types.each do |i|
sut = SizeUnitType.create(:name => i[0], :alias => i[1], :description => i[2], :organization_id => @organization.id)
@organization.organization_technologies.each do |ot|
SizeUnit.all.each do |su|
TechnologySizeType.create(organization_id: sut.organization_id, organization_technology_id: ot.id, size_unit_id: su.id, size_unit_type_id: sut.id, value: 1)
end
end
end
uow = [
['Données', 'data', "Création, modification, suppression, duplication de composants d'une base de données (tables, fichiers). Une UO doit être comptée pour chaque entité métier. Seules les entités métier sont comptabilisées."],
['Traitement', 'traitement', 'Création, modification, suppression, duplication de composants de visualisation, gestion de données, activation de fonctionnalités avec une interface de type Caractère (terminal passif).'],
['Batch', 'batch', "Création, modification, suppression, duplication de composants d'extraction ou de MAJ de données d'une source de données persistante. Par convention, cette UO ne couvre pas les interfaces. Cette UO couvre le nettoyage et la purge des tables."],
['Interfaces', 'interface', "Création, modification, suppression, duplication de composants d'interface de type : Médiation, Conversion, Transcodification, Transformation (les transformations sont implémentées en langage de programmation). Les 'Historisation avec clés techniques générée' sont à comptabiliser en 'Règle de gestion'"]
]
uow.each do |i|
@organization.unit_of_works.create(:name => i[0], :alias => i[1], :description => i[2], :state => 'defined')
end
#A la création de l'organixation, on crée les complexités de facteurs à partir des defined ( les defined ont organization_id => nil)
OrganizationUowComplexity.where(organization_id: nil).each do |o|
ouc = OrganizationUowComplexity.new(name: o.name , organization_id: @organization.id, description: o.description, value: o.value, factor_id: o.factor_id, is_default: o.is_default, :state => 'defined')
ouc.save(validate: false)
end
#Et la, on crée les complexités des unités d'oeuvres par défaut
levels = [
['Simple', 'simple', "Simple", 1, "defined"],
['Moyen', 'moyen', "Moyen", 2, "defined"],
['Complexe', 'complexe', "Complexe", 4, "defined"]
]
levels.each do |i|
@organization.unit_of_works.each do |uow|
ouc = OrganizationUowComplexity.new(:name => i[0], :alias => i[1], :description => i[2], :state => i[4], :unit_of_work_id => uow.id, :organization_id => @organization.id)
ouc.save(validate: false)
@organization.size_unit_types.each do |sut|
SizeUnitTypeComplexity.create(size_unit_type_id: sut.id, organization_uow_complexity_id: ouc.id, value: i[3])
end
end
end
#A la sauvegarde, on copies des technologies
Technology.all.each do |technology|
ot = OrganizationTechnology.new(name: technology.name, alias: technology.name, description: technology.description, organization_id: @organization.id)
ot.save(validate: false)
end
# Add MasterData Profiles to Organization
Profile.all.each do |profile|
op = OrganizationProfile.new(organization_id: @organization.id, name: profile.name, description: profile.description, cost_per_hour: profile.cost_per_hour)
op.save
end
# Add some Estimations statuses in organization
estimation_statuses = [
['0', 'preliminary', "Préliminaire", "999999", "Statut initial lors de la création de l'estimation"],
['1', 'in_progress', "En cours", "3a87ad", "En cours de modification"],
['2', 'in_review', "Relecture", "f89406", "En relecture"],
['3', 'checkpoint', "Contrôle", "b94a48", "En phase de contrôle"],
['4', 'released', "Confirmé", "468847", "Phase finale d'une estimation qui arrive à terme et qui sera retenue comme une version majeure"],
['5', 'rejected', "Rejeté", "333333", "L'estimation dans ce statut est rejetée et ne sera pas poursuivi"]
]
estimation_statuses.each do |i|
status = EstimationStatus.create(organization_id: @organization.id, status_number: i[0], status_alias: i[1], name: i[2], status_color: i[3], description: i[4])
end
#Add a default view for widgets
view = View.create(:name => "Default view",
:description => "Default widgets's default view. If no view is selected for module project, this view will be automatically selected.",
:organization_id => @organization.id)
redirect_to redirect_apply(edit_organization_path(@organization)), notice: "#{I18n.t(:notice_organization_successful_created)}"
else
render action: 'new'
end
end
def update
authorize! :edit_organizations, Organization
@organization = Organization.find(params[:id])
if @organization.update_attributes(params[:organization])
OrganizationUowComplexity.where(organization_id: @organization.id).each do |ouc|
@organization.size_unit_types.each do |sut|
sutc = SizeUnitTypeComplexity.where(size_unit_type_id: sut.id, organization_uow_complexity_id: ouc.id).first
if sutc.nil?
SizeUnitTypeComplexity.create(size_unit_type_id: sut.id, organization_uow_complexity_id: ouc.id)
end
end
end
flash[:notice] = I18n.t (:notice_organization_successful_updated)
redirect_to redirect_apply(edit_organization_path(@organization), nil, '/organizationals_params')
else
@attributes = PeAttribute.defined.all
@attribute_settings = AttributeOrganization.all(:conditions => {:organization_id => @organization.id})
@complexities = @organization.organization_uow_complexities
@ot = @organization.organization_technologies.first
@unitofworks = @organization.unit_of_works
@factors = Factor.order("factor_type")
@technologies = OrganizationTechnology.all
@size_unit_types = SizeUnitType.all
@organization_profiles = @organization.organization_profiles
@groups = @organization.groups
@organization_group = @organization.groups
@wbs_activities = @organization.wbs_activities
@projects = @organization.projects
@fields = @organization.fields
@size_units = SizeUnit.all
@guw_models = @organization.guw_models
render action: 'edit'
end
end
def confirm_organization_deletion
@organization = Organization.find(params[:organization_id])
authorize! :manage, Organization
end
def destroy
authorize! :manage, Organization
@organization = Organization.find(params[:id])
@organization_id = @organization.id
case params[:commit]
when I18n.t('delete')
if params[:yes_confirmation] == 'selected'
@organization.destroy
if session[:organization_id] == params[:id]
session[:organization_id] = current_user.organizations.first #session[:organization_id] = nil
end
flash[:notice] = I18n.t(:notice_organization_successful_deleted)
redirect_to '/organizationals_params' and return
else
flash[:warning] = I18n.t('warning_need_organization_check_box_confirmation')
render :template => 'organizations/confirm_organization_deletion', :locals => {:organization_id => @organization_id}
end
when I18n.t('cancel')
redirect_to '/organizationals_params' and return
else
render :template => 'projects/confirm_organization_deletion', :locals => {:organization_id => @organization_id}
end
end
def destroy_save
authorize! :manage, Organization
@organization = Organization.find(params[:id])
# Before destroying, we should check if the organization is used by one or more projects/estimations before to be able to delete it.
if @organization.projects.empty? || @organization.projects.nil?
@organization.destroy
if session[:organization_id] == params[:id]
session[:organization_id] = nil
end
flash[:notice] = I18n.t(:notice_organization_successful_deleted)
else
flash[:warning] = I18n.t(:warning_organization_cannot_be_deleted, value: @organization.name)
end
redirect_to '/organizationals_params'
end
def organizationals_params
set_page_title 'Organizational Parameters'
set_breadcrumbs "Organizations" => "/organizationals_params", "Liste des organizations" => ""
if current_user.super_admin?
@organizations = Organization.all
elsif can?(:manage, :all)
@organizations = Organization.all.reject{|org| org.is_image_organization}
else
@organizations = current_user.organizations.all.reject{|org| org.is_image_organization}
end
@size_units = SizeUnit.all
@factors = Factor.order("factor_type")
end
def export
@organization = Organization.find(params[:organization_id])
csv_string = CSV.generate(:col_sep => ",") do |csv|
csv << ['Prénom', 'Nom', 'Email', 'Login', 'Groupes']
@organization.users.each do |user|
csv << [user.first_name, user.last_name, user.email, user.login_name] + user.groups.map(&:name)
end
end
send_data(csv_string.encode("ISO-8859-1"), :type => 'text/csv; header=present', :disposition => "attachment; filename='modele_import_utilisateurs.csv'")
end
def import_user
sep = "#{params[:separator].blank? ? I18n.t(:general_csv_separator) : params[:separator]}"
error_count = 0
file = params[:file]
encoding = params[:encoding]
#begin
CSV.open(file.path, 'r', :quote_char => "\"", :row_sep => :auto, :col_sep => sep, :encoding => "ISO-8859-1:ISO-8859-1") do |csv|
csv.each_with_index do |row, i|
unless i == 0
password = SecureRandom.hex(8)
user = User.where(login_name: row[3]).first
if user.nil?
u = User.new(first_name: row[0],
last_name: row[1],
email: row[2],
login_name: row[3],
id_connexion: row[3],
super_admin: false,
password: password,
password_confirmation: password,
language_id: params[:language_id].to_i,
initials: "#{row[0].first}#{row[1].first}",
time_zone: "Paris",
object_per_page: 50,
auth_type: "Application",
number_precision: 2)
u.save(validate: false)
OrganizationsUsers.create(organization_id: @current_organization.id,
user_id: u.id)
(row.size - 4).times do |i|
group = Group.where(name: row[4 + i], organization_id: @current_organization.id).first
begin
GroupsUsers.create(group_id: group.id,
user_id: u.id)
rescue
# nothing
end
end
end
end
end
end
#rescue
# flash[:error] = "Une erreur est survenue durant l'import du fichier. Vérifier l'encodage du fichier (ISO-8859-1 pour Windows, utf-8 pour Mac) ou le caractère de séparateur du fichier"
#end
redirect_to organization_users_path(@current_organization)
end
def set_technology_size_type_abacus
authorize! :edit_organizations, Organization
@organization = Organization.find(params[:organization])
@technologies = @organization.organization_technologies
@size_unit_types = @organization.size_unit_types
@size_units = SizeUnit.all
@technologies.each do |technology|
@size_unit_types.each do |sut|
@size_units.each do |size_unit|
#size_unit = params[:size_unit]["#{su.id}"].to_i
value = params[:abacus]["#{size_unit.id}"]["#{technology.id}"]["#{sut.id}"].to_f
unless value.nil?
t = TechnologySizeType.where( organization_id: @organization.id,
organization_technology_id: technology.id,
size_unit_id: size_unit.id,
size_unit_type_id: sut.id).first
if t.nil?
TechnologySizeType.create(organization_id: @organization.id,
organization_technology_id: technology.id,
size_unit_id: size_unit.id,
size_unit_type_id: sut.id,
value: value)
else
t.update_attributes(value: value)
end
end
end
end
end
redirect_to edit_organization_path(@organization, :anchor => 'tabs-abacus-sut')
end
def set_technology_size_unit_abacus
authorize! :edit_organizations, Organization
@organization = Organization.find(params[:organization])
@technologies = @organization.organization_technologies
@size_units = SizeUnit.all
@technologies.each do |technology|
@size_units.each do |size_unit|
value = params[:technology_size_units_abacus]["#{size_unit.id}"]["#{technology.id}"].to_f
unless value.nil?
t = TechnologySizeUnit.where( organization_id: @organization.id,
organization_technology_id: technology.id,
size_unit_id: size_unit.id).first
if t.nil?
TechnologySizeUnit.create(organization_id: @organization.id,
organization_technology_id: technology.id,
size_unit_id: size_unit.id,
value: value)
else
t.update_attributes(value: value)
end
end
end
end
redirect_to edit_organization_path(@organization, :anchor => 'tabs-abacus-tsu')
end
def set_abacus
authorize! :edit_organizations, Organization
@ot = OrganizationTechnology.find_by_id(params[:technology])
@complexities = @ot.organization.organization_uow_complexities
@unitofworks = @ot.unit_of_works
@unitofworks.each do |uow|
@complexities.each do |c|
a = AbacusOrganization.find_or_create_by_unit_of_work_id_and_organization_uow_complexity_id_and_organization_technology_id_and_organization_id(uow.id, c.id, @ot.id, params[:id])
begin
a.update_attribute(:value, params['abacus']["#{uow.id}"]["#{c.id}"])
rescue
# :()
end
end
end
redirect_to redirect_apply(edit_organization_path(@ot.organization_id, :anchor => 'tabs-abacus-tsu'), nil, '/organizationals_params')
end
def set_technology_uow_synthesis
authorize! :manage_modules_instances, ModuleProject
#@organization = Organization.find(params[:organization])
params[:abacus].each do |sut|
sut.last.each do |ot|
ot.last.each do |uow|
uow.last.each do |cplx|
sutc = SizeUnitTypeComplexity.where(size_unit_type_id: sut.first.to_i, organization_uow_complexity_id: cplx.first.to_i).first_or_create
sutc.value = cplx.last
sutc.save
end
end
end
end
redirect_to redirect_apply(organization_module_estimation_path(@organization, :anchor => 'taille'), nil, '/organizationals_params')
end
# Update the organization's projects available inline columns
def set_available_inline_columns
redirect_to organization_setting_path(@current_organization, :anchor => 'tabs-select-columns-list')
end
def update_available_inline_columns
puts "test"
# update selected column
#Organization.update_attribute(:project_selected_columns, params[:selected_inline_columns])
#Organization.update_attribute(:project_selected_columns, params[:selected_inline_columns])
selected_columns = params['selected_inline_columns']
query_classname = params['query_classname'].constantize
unless selected_columns.nil?
case params['query_classname']
when "Project"
@current_organization.project_selected_columns = selected_columns
when "Organization"
@current_organization.organization_selected_columns = selected_columns
end
@current_organization.save
end
end
#def import_abacus
# authorize! :edit_organizations, Organization
# @organization = Organization.find(params[:id])
#
# file = params[:file]
#
# case File.extname(file.original_filename)
# when ".ods"
# workbook = Roo::Spreadsheet.open(file.path, extension: :ods)
# when ".xls"
# workbook = Roo::Spreadsheet.open(file.path, extension: :xls)
# when ".xlsx"
# workbook = Roo::Spreadsheet.open(file.path, extension: :xlsx)
# when ".xlsm"
# workbook = Roo::Spreadsheet.open(file.path, extension: :xlsx)
# end
#
# workbook.sheets.each_with_index do |worksheet, k|
# #if sheet name blank, we use sheetN as default name
# name = worksheet
# if name != 'ReadMe' #The ReadMe sheet is only for guidance and don't have to be proceed
#
# @ot = OrganizationTechnology.find_or_create_by_name_and_alias_and_organization_id(:name => name,
# :alias => name,
# :organization_id => @organization.id)
#
# workbook.default_sheet=workbook.sheets[k]
# workbook.each_with_index do |row, i|
# row.each_with_index do |cell, j|
# unless row.nil?
# unless workbook.cell(1,j+1) == "Abacus" or workbook.cell(i+1,1) == "Abacus"
# if can? :manage, Organization
# @ouc = OrganizationUowComplexity.find_or_create_by_name_and_organization_id(:name => workbook.cell(1,j+1), :organization_id => @organization.id)
# end
#
# if can? :manage, Organization
# @uow = UnitOfWork.find_or_create_by_name_and_alias_and_organization_id(:name => workbook.cell(i+1,1), :alias => workbook.cell(i+1,1), :organization_id => @organization.id)
# unless @uow.organization_technologies.map(&:id).include?(@ot.id)
# @uow.organization_technologies << @ot
# end
# @uow.save
# end
#
# ao = AbacusOrganization.find_by_unit_of_work_id_and_organization_uow_complexity_id_and_organization_technology_id_and_organization_id(
# @uow.id,
# @ouc.id,
# @ot.id,
# @organization.id
# )
#
# if ao.nil?
# if can? :manage, Organization
# AbacusOrganization.create(
# :unit_of_work_id => @uow.id,
# :organization_uow_complexity_id => @ouc.id,
# :organization_technology_id => @ot.id,
# :organization_id => @organization.id,
# :value => workbook.cell(i+1, j+1))
# end
# else
# ao.update_attribute(:value, workbook.cell(i+1, j+1))
# end
# end
# end
# end
# end
# end
# end
#
# redirect_to redirect_apply(edit_organization_path(@organization.id), nil, '/organizationals_params')
#end
#
#def export_abacus
# authorize! :edit_organizations, Organization
#
# @organization = Organization.find(params[:id])
# p=Axlsx::Package.new
# wb=p.workbook
# @organization.organization_technologies.each_with_index do |ot|
# wb.add_worksheet(:name => ot.name) do |sheet|
# style_title = sheet.styles.add_style(:bg_color => 'B0E0E6', :sz => 14, :b => true, :alignment => {:horizontal => :center})
# style_title2 = sheet.styles.add_style(:sz => 14, :b => true, :alignment => {:horizontal => :center})
# style_title_red = sheet.styles.add_style(:bg_color => 'B0E0E6', :fg_color => 'FF0000', :sz => 14, :b => true, :i => true, :alignment => {:horizontal => :center})
# style_title_orange = sheet.styles.add_style(:bg_color => 'B0E0E6', :fg_color => 'FF8C00', :sz => 14, :b => true, :i => true, :alignment => {:horizontal => :center})
# style_title_right = sheet.styles.add_style(:bg_color => 'E6E6E6', :sz => 14, :b => true, :alignment => {:horizontal => :right})
# style_title_right_red = sheet.styles.add_style(:bg_color => 'E6E6E6', :fg_color => 'FF8C00', :sz => 14, :b => true, :i => true, :alignment => {:horizontal => :right})
# style_title_right_orange = sheet.styles.add_style(:bg_color => 'E6E6E6', :fg_color => 'FF8C00', :sz => 14, :b => true, :i => true, :alignment => {:horizontal => :right})
# style_data = sheet.styles.add_style(:sz => 12, :alignment => {:horizontal => :center}, :locked => false)
# style_date = sheet.styles.add_style(:format_code => 'YYYY-MM-DD HH:MM:SS')
# head = ['Abacus']
# head_style = [style_title2]
# @organization.organization_uow_complexities.each_with_index do |comp|
# head.push(comp.name)
# if comp.state == 'retired'
# head_style.push(style_title_red)
# elsif comp.state == 'draft'
# head_style.push(style_title_orange)
# else
# head_style.push(style_title)
# end
# end
# row=sheet.add_row(head, :style => head_style)
# ot.unit_of_works.each_with_index do |uow|
# uow_row = []
# if uow.state == 'retired'
# uow_row_style=[style_title_right_red]
# elsif uow.state == 'draft'
# uow_row_style=[style_title_right_orange]
# else
# uow_row_style=[style_title_right]
# end
# uow_row = [uow.name]
#
# @organization.organization_uow_complexities.each_with_index do |comp2, i|
# if AbacusOrganization.where(:unit_of_work_id => uow.id, :organization_uow_complexity_id => comp2.id, :organization_technology_id => ot.id, :organization_id => @organization.id).first.nil?
# data = ''
# else
# data = AbacusOrganization.where(:unit_of_work_id => uow.id,
# :organization_uow_complexity_id => comp2.id,
# :organization_technology_id => ot.id, :organization_id => @organization.id).first.value
# end
# uow_row_style.push(style_data)
# uow_row.push(data)
# end
# row=sheet.add_row(uow_row, :style => uow_row_style)
# end
# sheet.sheet_protection.delete_rows = true
# sheet.sheet_protection.delete_columns = true
# sheet.sheet_protection.format_cells = true
# sheet.sheet_protection.insert_columns = false
# sheet.sheet_protection.insert_rows = false
# sheet.sheet_protection.select_locked_cells = false
# sheet.sheet_protection.select_unlocked_cells = false
# sheet.sheet_protection.objects = false
# sheet.sheet_protection.sheet = true
# end
# end
# wb.add_worksheet(:name => 'ReadMe') do |sheet|
# style_title2 = sheet.styles.add_style(:sz => 14, :b => true, :alignment => {:horizontal => :center})
# style_title_right = sheet.styles.add_style(:bg_color => 'E6E6E6', :sz => 13, :b => true, :alignment => {:horizontal => :right})
# style_date = sheet.styles.add_style(:format_code => 'YYYY-MM-DD HH:MM:SS', :alignment => {:horizontal => :left})
# style_text = sheet.styles.add_style(:alignment => {:wrapText => :true})
# style_field = sheet.styles.add_style(:bg_color => 'F5F5F5', :sz => 12, :b => true)
#
# sheet.add_row(['This File is an export of a ProjEstimate abacus'], :style => style_title2)
# sheet.merge_cells 'A1:F1'
# sheet.add_row(['Organization: ', "#{@organization.name} (#{@organization.id})", @organization.description], :style => [style_title_right, 0, style_text])
# sheet.add_row(['Date: ', Time.now], :style => [style_title_right, style_date])
# sheet.add_row([' '])
# sheet.merge_cells 'A5:F5'
# sheet.add_row(['There is one sheet by technology. Each sheet is organized with the complexity by column and the Unit Of work by row.'])
# sheet.merge_cells 'A6:F6'
# sheet.add_row(['For the complexity and the Unit Of Work state, we are using the following color code : Red=Retired, Orange=Draft).'])
# sheet.merge_cells 'A7:F7'
# sheet.add_row(['In order to allow this abacus to be re-imported into ProjEstimate and to prevent users from accidentally changing the structure of the sheets, workbooks have been protected.'])
# sheet.merge_cells 'A8:F8'
# sheet.add_row(['Advanced users can remove the protection (there is no password). For further information you can have a look on the ProjEstimate Help.'])
# row=sheet.add_row(['For ProjEstimate Help, Click to go'])
# sheet.add_hyperlink :location => 'http://forge.estimancy.com/projects/pe/wiki/Organizations', :ref => "A#{row.index+1}"
# sheet.add_row([' '])
# sheet.add_row([' '])
# sheet.add_row(['Technologies'], :style => [style_title_right])
# sheet.add_row(['Alias', 'Name', 'Description', 'State', 'Productivity Ratio'], :style => style_field)
# @organization.organization_technologies.each_with_index do |ot|
# sheet.add_row([ot.alias, ot.name, ot.description, ot.state, ot.productivity_ratio], :style => [0, 0, style_text])
# end
# sheet.add_row([' '])
# sheet.add_row(['Complexities'], :style => [style_title_right])
# sheet.add_row(['Display Order', 'Name', 'Description', 'State'], :style => style_field)
# @organization.organization_uow_complexities.each_with_index do |comp|
# sheet.add_row([comp.display_order, comp.name, comp.description, comp.state], :style => [0, 0, style_text])
# end
# sheet.add_row([' '])
# sheet.add_row(['Units OF Works'], :style => [style_title_right])
# sheet.add_row(['Alias', 'Name', 'Description', 'State'], :style => style_field)
# @organization.unit_of_works.each_with_index do |uow|
# sheet.add_row([uow.alias, uow.name, uow.description, uow.state], :style => [0, 0, style_text])
# end
# sheet.column_widths 20, 32, 80, 10, 18
# end
# send_data p.to_stream.read, :filename => @organization.name+'.xlsx'
#end
# Duplicate the organization
# Function de delete after => is replaced by the create_from_image fucntion
def duplicate_organization
authorize! :manage_master_data, :all
original_organization = Organization.find(params[:organization_id])
new_organization = original_organization.amoeba_dup
if new_organization.save
original_organization.save #Original organization copy number will be incremented to 1
flash[:notice] = I18n.t(:organization_successfully_copied)
else
flash[:error] = "#{ I18n.t(:errors_when_copying_organization)} : #{new_organization.errors.full_messages.join(', ')}"
end
redirect_to organizationals_params_path
end
def show
authorize! :show_organizations, Organization
end
end
Import/Export enhancement
#encoding: utf-8
#############################################################################
#
# Estimancy, Open Source project estimation web application
# Copyright (c) 2014-2015 Estimancy (http://www.estimancy.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# ===================================================================
#
# ProjEstimate, Open Source project estimation web application
# Copyright (c) 2012-2013 Spirula (http://www.spirula.fr)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#############################################################################
class OrganizationsController < ApplicationController
load_resource
require 'rubygems'
require 'securerandom'
include ProjectsHelper
def generate_report
conditions = Hash.new
params[:report].each do |i|
unless i.last.blank? or i.last.nil?
conditions[i.first] = i.last
end
end
@organization = @current_organization
if params[:report_date][:start_date].blank? || params[:report_date][:end_date].blank?
@projects = @organization.projects.where(is_model: false).where(conditions).where("title like ?", "%#{params[:title]}%").all
else
@projects = @organization.projects.where(is_model: false).where(conditions).where(:start_date => Time.parse(params[:report_date][:start_date])..Time.parse(params[:report_date][:end_date])).where("title like '%?%'").all
end
csv_string = CSV.generate(:col_sep => I18n.t(:general_csv_separator)) do |csv|
if params[:with_header] == "checked"
csv << [
I18n.t(:project),
I18n.t(:label_project_version),
I18n.t(:label_product_name),
I18n.t(:description),
I18n.t(:start_date),
I18n.t(:platform_category),
I18n.t(:project_category),
I18n.t(:acquisition_category),
I18n.t(:project_area),
I18n.t(:state),
I18n.t(:creator),
] + @organization.fields.map(&:name)
end
tmp = Array.new
@projects.each do |project|
tmp = [
project.title,
project.version,
project.product_name,
ActionView::Base.full_sanitizer.sanitize(project.description),
project.start_date,
project.platform_category,
project.project_category,
project.acquisition_category,
project.project_area,
project.estimation_status,
project.creator
]
@organization.fields.each do |field|
pf = ProjectField.where(field_id: field.id, project_id: project.id).first
tmp = tmp + [ pf.nil? ? '-' : convert_with_precision(pf.value.to_f / field.coefficient.to_f, user_number_precision) ]
end
csv << tmp
end
end
send_data(csv_string.encode("ISO-8859-1"), :type => 'text/csv; header=present', :disposition => "attachment; filename=Rapport-#{Time.now}.csv")
end
def report
@organization = Organization.find(params[:organization_id])
end
def authorization
@organization = Organization.find(params[:organization_id])
set_breadcrumbs "Organizations" => "/organizationals_params", @organization.to_s => ""
@groups = @organization.groups
@organization_permissions = Permission.order('name').defined.select{ |i| i.object_type == "organization_super_admin_objects" }
@global_permissions = Permission.order('name').defined.select{ |i| i.object_type == "general_objects" }
@permission_projects = Permission.order('name').defined.select{ |i| i.object_type == "project_dependencies_objects" }
@modules_permissions = Permission.order('name').defined.select{ |i| i.object_type == "module_objects" }
@master_permissions = Permission.order('name').defined.select{ |i| i.is_master_permission }
@permissions_classes_organization = @organization_permissions.map(&:category).uniq.sort
@permissions_classes_globals = @global_permissions.map(&:category).uniq.sort
@permissions_classes_projects = @permission_projects.map(&:category).uniq.sort
@permissions_classes_masters = @master_permissions.map(&:category).uniq.sort
@permissions_classes_modules = @modules_permissions.map(&:category).uniq.sort
@project_security_levels = @organization.project_security_levels
end
def setting
@organization = Organization.find(params[:organization_id])
set_breadcrumbs "Organizations" => "/organizationals_params", @organization.to_s => ""
@technologies = @organization.organization_technologies
@fields = @organization.fields
@work_element_types = @organization.work_element_types
@organization_profiles = @organization.organization_profiles
@organization_group = @organization.groups
@estimation_models = @organization.projects.where(:is_model => true)
end
def module_estimation
@organization = Organization.find(params[:organization_id])
set_breadcrumbs "Organizations" => "/organizationals_params", @organization.to_s => ""
@guw_models = @organization.guw_models
@wbs_activities = @organization.wbs_activities
@size_units = SizeUnit.all
@technologies = @organization.organization_technologies
@size_unit_types = @organization.size_unit_types
@amoa_models = @organization.amoa_models
end
def users
@organization = Organization.find(params[:organization_id])
set_breadcrumbs "Organizations" => "/organizationals_params", @organization.to_s => ""
end
def estimations
@organization = Organization.find(params[:organization_id])
if @organization.is_image_organization == true
redirect_to("/organizationals_params", flash: { error: "Vous ne pouvez pas accéder aux estimations d'une organization image"}) and return
end
set_breadcrumbs "Organizations" => "/organizationals_params", @organization.to_s => ""
@projects = @organization.projects.where(is_model: false).all
end
# New organization from image
def new_organization_from_image
end
# Method that execute the duplication: duplicate estimation model for organization
def execute_duplication(project_id, new_organization_id)
#begin
old_prj = Project.find(project_id)
new_organization = Organization.find(new_organization_id)
new_prj = old_prj.amoeba_dup #amoeba gem is configured in Project class model
new_prj.organization_id = new_organization_id
new_prj.title = old_prj.title
new_prj.description = old_prj.description
new_estimation_status = new_organization.estimation_statuses.where(copy_id: new_prj.estimation_status_id).first
new_estimation_status_id = new_estimation_status.nil? ? nil : new_estimation_status.id
new_prj.estimation_status_id = new_estimation_status_id
if old_prj.is_model
new_prj.is_model = true
else
new_prj.is_model = false
end
if new_prj.save
old_prj.save #Original project copy number will be incremented to 1
#Managing the component tree : PBS
pe_wbs_product = new_prj.pe_wbs_projects.products_wbs.first
# For PBS
new_prj_components = pe_wbs_product.pbs_project_elements
new_prj_components.each do |new_c|
new_ancestor_ids_list = []
new_c.ancestor_ids.each do |ancestor_id|
ancestor_id = PbsProjectElement.find_by_pe_wbs_project_id_and_copy_id(new_c.pe_wbs_project_id, ancestor_id).id
new_ancestor_ids_list.push(ancestor_id)
end
new_c.ancestry = new_ancestor_ids_list.join('/')
new_c.save
end
#Update the project securities for the current user who create the estimation from model
#if params[:action_name] == "create_project_from_template"
if old_prj.is_model
creator_securities = old_prj.creator.project_securities_for_select(new_prj.id)
unless creator_securities.nil?
creator_securities.update_attribute(:user_id, current_user.id)
end
end
#Other project securities for groups
new_prj.project_securities.where('group_id IS NOT NULL').each do |project_security|
new_security_level = new_organization.project_security_levels.where(copy_id: project_security.project_security_level_id).first
new_group = new_organization.groups.where(copy_id: project_security.group_id).first
if new_security_level.nil? || new_group.nil?
project_security.destroy
else
project_security.update_attributes(project_security_level_id: new_security_level.id, group_id: new_group.id)
end
end
#Other project securities for users
new_prj.project_securities.where('user_id IS NOT NULL').each do |project_security|
new_security_level = new_organization.project_security_levels.where(copy_id: project_security.project_security_level_id).first
if new_security_level.nil?
project_security.destroy
else
project_security.update_attributes(project_security_level_id: new_security_level.id)
end
end
# For ModuleProject associations
old_prj.module_projects.group(:id).each do |old_mp|
new_mp = ModuleProject.find_by_project_id_and_copy_id(new_prj.id, old_mp.id)
# ModuleProject Associations for the new project
old_mp.associated_module_projects.each do |associated_mp|
new_associated_mp = ModuleProject.where('project_id = ? AND copy_id = ?', new_prj.id, associated_mp.id).first
new_mp.associated_module_projects << new_associated_mp
end
# if the module_project view is nil
#if new_mp.view.nil?
# default_view = new_organization.views.where('pemodule_id = ? AND is_default_view = ?', new_mp.pemodule_id, true).first
# if default_view.nil?
# default_view = View.create(name: "#{new_mp} view", description: "", pemodule_id: new_mp.pemodule_id, organization_id: new_organization_id)
# end
# new_mp.update_attribute(:view_id, default_view.id)
#end
#Recreate view for all moduleproject as the projects are not is the same organization
#Copy the views and widgets for the new project
#mp_default_view =
#if old_mp.view.nil?
#
#else
#
#end
new_view = View.create(organization_id: new_organization_id, name: "#{new_prj.to_s} : view for #{new_mp.to_s}", description: "Please rename the view's name and description if needed.")
# We have to copy all the selected view's widgets in a new view for the current module_project
if old_mp.view
old_mp_view_widgets = old_mp.view.views_widgets.all
old_mp_view_widgets.each do |view_widget|
new_view_widget_mp = ModuleProject.find_by_project_id_and_copy_id(new_prj.id, view_widget.module_project_id)
new_view_widget_mp_id = new_view_widget_mp.nil? ? nil : new_view_widget_mp.id
widget_est_val = view_widget.estimation_value
unless widget_est_val.nil?
in_out = widget_est_val.in_out
widget_pe_attribute_id = widget_est_val.pe_attribute_id
unless new_view_widget_mp.nil?
new_estimation_value = new_view_widget_mp.estimation_values.where('pe_attribute_id = ? AND in_out=?', widget_pe_attribute_id, in_out).last
estimation_value_id = new_estimation_value.nil? ? nil : new_estimation_value.id
widget_copy = ViewsWidget.create(view_id: new_view.id, module_project_id: new_view_widget_mp_id, estimation_value_id: estimation_value_id, name: view_widget.name, show_name: view_widget.show_name,
icon_class: view_widget.icon_class, color: view_widget.color, show_min_max: view_widget.show_min_max, widget_type: view_widget.widget_type,
width: view_widget.width, height: view_widget.height, position: view_widget.position, position_x: view_widget.position_x, position_y: view_widget.position_y)
pf = ProjectField.where(project_id: new_prj.id, views_widget_id: view_widget.id).first
unless pf.nil?
new_field = new_organization.fields.where(copy_id: pf.field_id).first
pf.views_widget_id = widget_copy.id
pf.field_id = new_field.nil? ? nil : new_field.id
pf.save
end
end
end
end
end
#update the new module_project view
new_mp.update_attribute(:view_id, new_view.id)
###end
#Update the Unit of works's groups
new_mp.guw_unit_of_work_groups.each do |guw_group|
new_pbs_project_element = new_prj_components.find_by_copy_id(guw_group.pbs_project_element_id)
new_pbs_project_element_id = new_pbs_project_element.nil? ? nil : new_pbs_project_element.id
#technology
new_technology = new_organization.organization_technologies.where(copy_id: guw_group.organization_technology_id).first
new_technology_id = new_technology.nil? ? nil : new_technology.id
guw_group.update_attributes(pbs_project_element_id: new_pbs_project_element_id, organization_technology_id: new_technology_id)
# Update the group unit of works and attributes
guw_group.guw_unit_of_works.each do |guw_uow|
new_uow_mp = ModuleProject.find_by_project_id_and_copy_id(new_prj.id, guw_uow.module_project_id)
new_uow_mp_id = new_uow_mp.nil? ? nil : new_uow_mp.id
#PBS
new_pbs = new_prj_components.find_by_copy_id(guw_uow.pbs_project_element_id)
new_pbs_id = new_pbs.nil? ? nil : new_pbs.id
# GuwModel
new_guw_model = new_organization.guw_models.where(copy_id: guw_uow.guw_model_id).first
new_guw_model_id = new_guw_model.nil? ? nil : new_guw_model.id
# guw_work_unit
if !new_guw_model.nil?
new_guw_work_unit = new_guw_model.guw_work_units.where(copy_id: guw_uow.guw_work_unit_id).first
new_guw_work_unit_id = new_guw_work_unit.nil? ? nil : new_guw_work_unit.id
#Type
new_guw_type = new_guw_model.guw_types.where(copy_id: guw_uow.guw_type_id).first
new_guw_type_id = new_guw_type.nil? ? nil : new_guw_type.id
#Complexity
if !guw_uow.guw_complexity_id.nil? && !new_guw_type.nil?
new_complexity = new_guw_type.guw_complexities.where(copy_id: guw_uow.guw_complexity_id).first
new_complexity_id = new_complexity.nil? ? nil : new_complexity.id
else
new_complexity_id = nil
end
else
new_guw_work_unit_id = nil
new_guw_type_id = nil
new_complexity_id = nil
end
#Technology
uow_new_technology = new_organization.organization_technologies.where(copy_id: guw_uow.organization_technology_id).first
uow_new_technology_id = uow_new_technology.nil? ? nil : uow_new_technology.id
guw_uow.update_attributes(module_project_id: new_uow_mp_id, pbs_project_element_id: new_pbs_id, guw_model_id: new_guw_model_id,
guw_type_id: new_guw_type_id, guw_work_unit_id: new_guw_work_unit_id, guw_complexity_id: new_complexity_id,
organization_technology_id: uow_new_technology_id)
end
end
# UOW-INPUTS
new_mp.uow_inputs.each do |uo|
new_pbs_project_element = new_prj_components.find_by_copy_id(uo.pbs_project_element_id)
new_pbs_project_element_id = new_pbs_project_element.nil? ? nil : new_pbs_project_element.id
uo.update_attribute(:pbs_project_element_id, new_pbs_project_element_id)
end
#WBS-ACTIVITY-INPUTS
new_mp.wbs_activity_inputs.each do |activity_input|
new_wbs_activity = new_organization.wbs_activities.where(copy_id: activity_input.wbs_activity_id).first
unless new_wbs_activity.nil?
new_wbs_activity_ratio = new_wbs_activity.wbs_activity_ratios.where(copy_id: activity_input.wbs_activity_ratio_id).first
unless new_wbs_activity_ratio.nil?
activity_input.update_attributes(wbs_activity_id: new_wbs_activity.id, wbs_activity_ratio_id: new_wbs_activity_ratio.id)
end
end
end
["input", "output"].each do |io|
new_mp.pemodule.pe_attributes.each do |attr|
old_prj.pbs_project_elements.each do |old_component|
new_prj_components.each do |new_component|
ev = new_mp.estimation_values.where(pe_attribute_id: attr.id, in_out: io).first
unless ev.nil?
ev.string_data_low[new_component.id.to_i] = ev.string_data_low.delete old_component.id
ev.string_data_most_likely[new_component.id.to_i] = ev.string_data_most_likely.delete old_component.id
ev.string_data_high[new_component.id.to_i] = ev.string_data_high.delete old_component.id
ev.string_data_probable[new_component.id.to_i] = ev.string_data_probable.delete old_component.id
ev.save
end
end
end
end
end
end
else
new_prj = nil
end
#rescue
#new_prj = nil
#end
new_prj
end
# Create New organization from selected image organization
# Or duplicate current selected organization
def create_organization_from_image
authorize! :manage, Organization
case params[:action_name]
#Duplicate organization
when "copy_organization"
organization_image = Organization.find(params[:organization_id])
#Create the organization from image organization
when "new_organization_from_image"
organization_image_id = params[:organization_image]
if organization_image_id.nil?
flash[:warning] = "Veuillez sélectionner une organisation image pour continuer"
else
organization_image = Organization.find(organization_image_id)
@organization_name = params[:organization_name]
@firstname = params[:firstname]
@lastname = params[:lastname]
@email = params[:email]
@login_name = params[:identifiant]
@password = params[:password]
if @password.empty?
@password = SecureRandom.hex(8)
end
change_password_required = params[:change_password_required]
end
else
flash[:error] = "Aucune organization sélectionnée"
redirect_to :back and return
end
if organization_image.nil?
flash[:warning] = "Veuillez sélectionner une organisation pour continuer"
else
new_organization = organization_image.amoeba_dup
if params[:action_name] == "new_organization_from_image"
new_organization.name = @organization_name
elsif params[:action_name] == "copy_organization"
new_organization.description << "\n \n Cette organisation est une copie de l'organisation #{organization_image.name}."
new_organization.description << "\n #{I18n.l(Time.now)} : #{I18n.t(:organization_copied_by, username: current_user.name)}"
end
new_organization.is_image_organization = false
if new_organization.save
organization_image.save #Original organization copy number will be incremented to 1
#Copy the organization estimation_statuses workflow and groups/roles
new_estimation_statuses = new_organization.estimation_statuses
new_estimation_statuses.each do |estimation_status|
copied_status = EstimationStatus.find(estimation_status.copy_id)
#Get the to_transitions for the Statuses Workflow
copied_status.to_transition_statuses.each do |to_transition|
new_to_transition = new_estimation_statuses.where(copy_id: to_transition.id).first
unless new_to_transition.nil?
StatusTransition.create(from_transition_status_id: estimation_status.id, to_transition_status_id: new_to_transition.id)
end
end
end
#Get the estimation_statuses role / by group
new_organization.project_security_levels.each do |project_security_level|
project_security_level.estimation_status_group_roles.each do |group_role|
new_group = new_organization.groups.where(copy_id: group_role.group_id).first
estimation_status = new_organization.estimation_statuses.where(copy_id: group_role.estimation_status_id).first
unless estimation_status.nil?
group_role.update_attributes(organization_id: new_organization.id, estimation_status_id: estimation_status.id, group_id: new_group.id)
end
end
end
#Then copy the image organization estimation models
if params[:action_name] == "new_organization_from_image"
# Create a user in the Admin group of the new organization
admin_user = User.new(first_name: @firstname, last_name: @lastname, login_name: @login_name, email: @email, password: @password, password_confirmation: @password, super_admin: false)
# Add the user to the created organization
admin_group = new_organization.groups.where(name: '*USER').first #first_or_create(name: "*USER", organization_id: new_organization.id, description: "Groupe créé par défaut dans l'organisation pour la gestion des administrateurs")
unless admin_group.nil?
admin_user.groups << admin_group
admin_user.save
end
elsif params[:action_name] == "copy_organization"
# add users to groups
organization_image.groups.each do |group|
new_group = new_organization.groups.where(copy_id: group.id).first
unless new_group.nil?
new_group.users = group.users
new_group.save
end
end
end
# Copy the WBS-Activities modules's Models instances
organization_image.wbs_activities.each do |old_wbs_activity|
new_wbs_activity = old_wbs_activity.amoeba_dup #amoeba gem is configured in WbsActivity class model
new_wbs_activity.organization_id = new_organization.id
new_wbs_activity.transaction do
if new_wbs_activity.save
old_wbs_activity.save #Original WbsActivity copy number will be incremented to 1
#we also have to save to wbs_activity_ratio
old_wbs_activity.wbs_activity_ratios.each do |ratio|
ratio.save
end
#get new WBS Ratio elements
new_wbs_activity_ratio_elts = []
new_wbs_activity.wbs_activity_ratios.each do |ratio|
ratio.wbs_activity_ratio_elements.each do |ratio_elt|
new_wbs_activity_ratio_elts << ratio_elt
#Update ratio elements profiles
ratio_elt.wbs_activity_ratio_profiles.each do |activity_ratio_profile|
new_organization_profile = new_organization.organization_profiles.where(copy_id: activity_ratio_profile.organization_profile_id).first
unless new_organization_profile.nil?
activity_ratio_profile.update_attribute(:organization_profile_id, new_organization_profile.id)
end
end
end
end
#Managing the component tree
old_wbs_activity_elements = old_wbs_activity.wbs_activity_elements.order('ancestry_depth asc')
old_wbs_activity_elements.each do |old_elt|
new_elt = old_elt.amoeba_dup
new_elt.wbs_activity_id = new_wbs_activity.id
new_elt.save#(:validate => false)
unless new_elt.is_root?
new_ancestor_ids_list = []
new_elt.ancestor_ids.each do |ancestor_id|
ancestor = WbsActivityElement.find_by_wbs_activity_id_and_copy_id(new_elt.wbs_activity_id, ancestor_id)
unless ancestor.nil?
ancestor_id = ancestor.id
new_ancestor_ids_list.push(ancestor_id)
end
end
new_elt.ancestry = new_ancestor_ids_list.join('/')
corresponding_ratio_elts = new_wbs_activity_ratio_elts.select { |ratio_elt| ratio_elt.wbs_activity_element_id == new_elt.copy_id}.each do |ratio_elt|
ratio_elt.update_attribute('wbs_activity_element_id', new_elt.id)
end
new_elt.save(:validate => false)
end
end
else
flash[:error] = "#{new_wbs_activity.errors.full_messages.to_sentence}"
end
end
# Update all the new organization module_project's guw_model with the current guw_model
wbs_activity_copy_id = old_wbs_activity.id
new_organization.module_projects.where(wbs_activity_id: wbs_activity_copy_id).update_all(wbs_activity_id: new_wbs_activity.id)
end
# copy the organization's projects
organization_image.projects.all.each do |est_model|
#DuplicateWorker.perform(est_model.id, new_organization.id)
new_template = execute_duplication(est_model.id, new_organization.id)
unless new_template.nil?
new_template.is_model = est_model.is_model
#new_template.original_model_id = nil
new_template.save
end
end
#update the project's ancestry
new_organization.projects.all.each do |project|
unless project.original_model_id.nil?
new_original_model = new_organization.projects.where(copy_id: project.original_model_id).first
new_original_model_id = new_original_model.nil? ? nil : new_original_model.id
project.original_model_id = new_original_model_id
project.save
end
unless project.ancestry.nil?
new_ancestor_ids_list = []
project.ancestor_ids.each do |ancestor_id|
ancestor = new_organization.projects.where(copy_id: ancestor_id).first
unless ancestor.nil?
#ancestor_id = ancestor.id
new_ancestor_ids_list.push(ancestor.id)
end
end
project.ancestry = new_ancestor_ids_list.join('/')
project.save
end
end
# Update the Expert Judgement modules's Models instances
new_organization.expert_judgement_instances.each do |expert_judgment|
# Update all the new organization module_project's guw_model with the current guw_model
expert_judgment_copy_id = expert_judgment.copy_id
new_organization.module_projects.where(expert_judgement_instance_id: expert_judgment_copy_id).update_all(expert_judgement_instance_id: expert_judgment.id)
end
# Update the modules's GE Models instances
new_organization.ge_models.each do |ge_model|
# Update all the new organization module_project's guw_model with the current guw_model
ge_copy_id = ge_model.copy_id
new_organization.module_projects.where(ge_model_id: ge_copy_id).update_all(ge_model_id: ge_model.id)
end
# Copy the modules's GUW Models instances
new_organization.guw_models.each do |guw_model|
# Update all the new organization module_project's guw_model with the current guw_model
copy_id = guw_model.copy_id
new_organization.module_projects.where(guw_model_id: copy_id).update_all(guw_model_id: guw_model.id)
guw_model.guw_types.each do |guw_type|
# Copy the complexities technologies
guw_type.guw_complexities.each do |guw_complexity|
# Copy the complexities technologie
guw_complexity.guw_complexity_technologies.each do |guw_complexity_technology|
new_organization_technology = new_organization.organization_technologies.where(copy_id: guw_complexity_technology.organization_technology_id).first
unless new_organization_technology.nil?
guw_complexity_technology.update_attribute(:organization_technology_id, new_organization_technology.id)
end
end
# Copy the complexities units of works
guw_complexity.guw_complexity_work_units.each do |guw_complexity_work_unit|
new_guw_work_unit = guw_model.guw_work_units.where(copy_id: guw_complexity_work_unit.guw_work_unit_id).first
unless new_guw_work_unit.nil?
guw_complexity_work_unit.update_attribute(:guw_work_unit_id, new_guw_work_unit.id)
end
end
end
#Guw UnitOfWorkAttributes
guw_type.guw_unit_of_works.each do |guw_unit_of_work|
guw_unit_of_work.guw_unit_of_work_attributes.each do |guw_uow_attr|
new_guw_type = guw_model.guw_types.where(copy_id: guw_uow_attr.guw_type_id).first
new_guw_type_id = new_guw_type.nil? ? nil : new_guw_type.id
new_guw_attribute = guw_model.guw_attributes.where(copy_id: guw_uow_attr.guw_attribute_id).first
new_guw_attribute_id = new_guw_attribute.nil? ? nil : new_guw_attribute.id
guw_uow_attr.update_attributes(guw_type_id: new_guw_type_id, guw_attribute_id: new_guw_attribute_id)
end
end
# Copy the GUW-attribute-complexity
#guw_type.guw_type_complexities.each do |guw_type_complexity|
# guw_type_complexity.guw_attribute_complexities.each do |guw_attr_complexity|
#
# new_guw_attribute = guw_model.guw_attributes.where(copy_id: guw_attr_complexity.guw_attribute_id).first
# new_guw_attribute_id = new_guw_attribute.nil? ? nil : new_guw_attribute.id
#
# new_guw_type = guw_model.guw_types.where(copy_id: guw_type_complexity.guw_type_id).first
# new_guw_type_id = new_guw_type.nil? ? nil : new_guw_type.id
#
# guw_attr_complexity.update_attributes(guw_type_id: new_guw_type_id, guw_attribute_id: new_guw_attribute_id)
# end
#end
end
guw_model.guw_attributes.each do |guw_attribute|
guw_attribute.guw_attribute_complexities.each do |guw_attr_complexity|
new_guw_type = guw_model.guw_types.where(copy_id: guw_attr_complexity.guw_type_id).first
new_guw_type_id = new_guw_type.nil? ? nil : new_guw_type.id
new_guw_type_complexity = new_guw_type.guw_type_complexities.where(copy_id: guw_attr_complexity.guw_type_complexity_id).first
new_guw_type_complexity_id = new_guw_type_complexity.nil? ? nil : new_guw_type_complexity.id
guw_attr_complexity.update_attributes(guw_type_id: new_guw_type_id, guw_type_complexity_id: new_guw_type_complexity_id )
end
end
end
flash[:notice] = I18n.t(:notice_organization_successful_created)
else
flash[:error] = I18n.t('errors.messages.not_saved')
end
end
redirect_to :back
end
def new
authorize! :create_organizations, Organization
set_page_title 'Organizations'
@organization = Organization.new
@groups = @organization.groups
end
def edit
#authorize! :edit_organizations, Organization
authorize! :edit_organizations, Organization
set_page_title 'Organizations'
@organization = Organization.find(params[:id])
set_breadcrumbs "Organizations" => "/organizationals_params", @organization.to_s => ""
@attributes = PeAttribute.defined.all
@attribute_settings = AttributeOrganization.all(:conditions => {:organization_id => @organization.id})
@complexities = @organization.organization_uow_complexities
@factors = Factor.order("factor_type")
@ot = @organization.organization_technologies.first
@unitofworks = @organization.unit_of_works
@users = @organization.users
@fields = @organization.fields
@organization_profiles = @organization.organization_profiles
@work_element_types = @organization.work_element_types
end
def refresh_value_elements
@size_unit = SizeUnit.find(params[:size_unit_id])
@technologies = OrganizationTechnology.all
end
def create
authorize! :create_organizations, Organization
@organization = Organization.new(params[:organization])
# Organization's projects selected columns
@organization.project_selected_columns = Project.default_selected_columns
# Add current_user to the organization
@organization.users << current_user
#A la sauvegarde, on crée des sous traitants
if @organization.save
#Create default the size unit type's
size_unit_types = [
['New', 'new', ""],
['Modified', 'new', ""],
['Reused', 'new', ""],
]
size_unit_types.each do |i|
sut = SizeUnitType.create(:name => i[0], :alias => i[1], :description => i[2], :organization_id => @organization.id)
@organization.organization_technologies.each do |ot|
SizeUnit.all.each do |su|
TechnologySizeType.create(organization_id: sut.organization_id, organization_technology_id: ot.id, size_unit_id: su.id, size_unit_type_id: sut.id, value: 1)
end
end
end
uow = [
['Données', 'data', "Création, modification, suppression, duplication de composants d'une base de données (tables, fichiers). Une UO doit être comptée pour chaque entité métier. Seules les entités métier sont comptabilisées."],
['Traitement', 'traitement', 'Création, modification, suppression, duplication de composants de visualisation, gestion de données, activation de fonctionnalités avec une interface de type Caractère (terminal passif).'],
['Batch', 'batch', "Création, modification, suppression, duplication de composants d'extraction ou de MAJ de données d'une source de données persistante. Par convention, cette UO ne couvre pas les interfaces. Cette UO couvre le nettoyage et la purge des tables."],
['Interfaces', 'interface', "Création, modification, suppression, duplication de composants d'interface de type : Médiation, Conversion, Transcodification, Transformation (les transformations sont implémentées en langage de programmation). Les 'Historisation avec clés techniques générée' sont à comptabiliser en 'Règle de gestion'"]
]
uow.each do |i|
@organization.unit_of_works.create(:name => i[0], :alias => i[1], :description => i[2], :state => 'defined')
end
#A la création de l'organixation, on crée les complexités de facteurs à partir des defined ( les defined ont organization_id => nil)
OrganizationUowComplexity.where(organization_id: nil).each do |o|
ouc = OrganizationUowComplexity.new(name: o.name , organization_id: @organization.id, description: o.description, value: o.value, factor_id: o.factor_id, is_default: o.is_default, :state => 'defined')
ouc.save(validate: false)
end
#Et la, on crée les complexités des unités d'oeuvres par défaut
levels = [
['Simple', 'simple', "Simple", 1, "defined"],
['Moyen', 'moyen', "Moyen", 2, "defined"],
['Complexe', 'complexe', "Complexe", 4, "defined"]
]
levels.each do |i|
@organization.unit_of_works.each do |uow|
ouc = OrganizationUowComplexity.new(:name => i[0], :alias => i[1], :description => i[2], :state => i[4], :unit_of_work_id => uow.id, :organization_id => @organization.id)
ouc.save(validate: false)
@organization.size_unit_types.each do |sut|
SizeUnitTypeComplexity.create(size_unit_type_id: sut.id, organization_uow_complexity_id: ouc.id, value: i[3])
end
end
end
#A la sauvegarde, on copies des technologies
Technology.all.each do |technology|
ot = OrganizationTechnology.new(name: technology.name, alias: technology.name, description: technology.description, organization_id: @organization.id)
ot.save(validate: false)
end
# Add MasterData Profiles to Organization
Profile.all.each do |profile|
op = OrganizationProfile.new(organization_id: @organization.id, name: profile.name, description: profile.description, cost_per_hour: profile.cost_per_hour)
op.save
end
# Add some Estimations statuses in organization
estimation_statuses = [
['0', 'preliminary', "Préliminaire", "999999", "Statut initial lors de la création de l'estimation"],
['1', 'in_progress', "En cours", "3a87ad", "En cours de modification"],
['2', 'in_review', "Relecture", "f89406", "En relecture"],
['3', 'checkpoint', "Contrôle", "b94a48", "En phase de contrôle"],
['4', 'released', "Confirmé", "468847", "Phase finale d'une estimation qui arrive à terme et qui sera retenue comme une version majeure"],
['5', 'rejected', "Rejeté", "333333", "L'estimation dans ce statut est rejetée et ne sera pas poursuivi"]
]
estimation_statuses.each do |i|
status = EstimationStatus.create(organization_id: @organization.id, status_number: i[0], status_alias: i[1], name: i[2], status_color: i[3], description: i[4])
end
#Add a default view for widgets
view = View.create(:name => "Default view",
:description => "Default widgets's default view. If no view is selected for module project, this view will be automatically selected.",
:organization_id => @organization.id)
redirect_to redirect_apply(edit_organization_path(@organization)), notice: "#{I18n.t(:notice_organization_successful_created)}"
else
render action: 'new'
end
end
def update
authorize! :edit_organizations, Organization
@organization = Organization.find(params[:id])
if @organization.update_attributes(params[:organization])
OrganizationUowComplexity.where(organization_id: @organization.id).each do |ouc|
@organization.size_unit_types.each do |sut|
sutc = SizeUnitTypeComplexity.where(size_unit_type_id: sut.id, organization_uow_complexity_id: ouc.id).first
if sutc.nil?
SizeUnitTypeComplexity.create(size_unit_type_id: sut.id, organization_uow_complexity_id: ouc.id)
end
end
end
flash[:notice] = I18n.t (:notice_organization_successful_updated)
redirect_to redirect_apply(edit_organization_path(@organization), nil, '/organizationals_params')
else
@attributes = PeAttribute.defined.all
@attribute_settings = AttributeOrganization.all(:conditions => {:organization_id => @organization.id})
@complexities = @organization.organization_uow_complexities
@ot = @organization.organization_technologies.first
@unitofworks = @organization.unit_of_works
@factors = Factor.order("factor_type")
@technologies = OrganizationTechnology.all
@size_unit_types = SizeUnitType.all
@organization_profiles = @organization.organization_profiles
@groups = @organization.groups
@organization_group = @organization.groups
@wbs_activities = @organization.wbs_activities
@projects = @organization.projects
@fields = @organization.fields
@size_units = SizeUnit.all
@guw_models = @organization.guw_models
render action: 'edit'
end
end
def confirm_organization_deletion
@organization = Organization.find(params[:organization_id])
authorize! :manage, Organization
end
def destroy
authorize! :manage, Organization
@organization = Organization.find(params[:id])
@organization_id = @organization.id
case params[:commit]
when I18n.t('delete')
if params[:yes_confirmation] == 'selected'
@organization.destroy
if session[:organization_id] == params[:id]
session[:organization_id] = current_user.organizations.first #session[:organization_id] = nil
end
flash[:notice] = I18n.t(:notice_organization_successful_deleted)
redirect_to '/organizationals_params' and return
else
flash[:warning] = I18n.t('warning_need_organization_check_box_confirmation')
render :template => 'organizations/confirm_organization_deletion', :locals => {:organization_id => @organization_id}
end
when I18n.t('cancel')
redirect_to '/organizationals_params' and return
else
render :template => 'projects/confirm_organization_deletion', :locals => {:organization_id => @organization_id}
end
end
def destroy_save
authorize! :manage, Organization
@organization = Organization.find(params[:id])
# Before destroying, we should check if the organization is used by one or more projects/estimations before to be able to delete it.
if @organization.projects.empty? || @organization.projects.nil?
@organization.destroy
if session[:organization_id] == params[:id]
session[:organization_id] = nil
end
flash[:notice] = I18n.t(:notice_organization_successful_deleted)
else
flash[:warning] = I18n.t(:warning_organization_cannot_be_deleted, value: @organization.name)
end
redirect_to '/organizationals_params'
end
def organizationals_params
set_page_title 'Organizational Parameters'
set_breadcrumbs "Organizations" => "/organizationals_params", "Liste des organizations" => ""
if current_user.super_admin?
@organizations = Organization.all
elsif can?(:manage, :all)
@organizations = Organization.all.reject{|org| org.is_image_organization}
else
@organizations = current_user.organizations.all.reject{|org| org.is_image_organization}
end
@size_units = SizeUnit.all
@factors = Factor.order("factor_type")
end
def export
@organization = Organization.find(params[:organization_id])
csv_string = CSV.generate(:col_sep => ",") do |csv|
csv << ['Prénom', 'Nom', 'Email', 'Login', 'Groupes']
@organization.users.each do |user|
csv << [user.first_name, user.last_name, user.email, user.login_name] + user.groups.map(&:name)
end
end
send_data(csv_string.encode("ISO-8859-1"), :type => 'text/csv; header=present', :disposition => "attachment; filename='modele_import_utilisateurs.csv'")
end
def import_user
sep = "#{params[:separator].blank? ? I18n.t(:general_csv_separator) : params[:separator]}"
error_count = 0
file = params[:file]
encoding = params[:encoding]
#begin
CSV.open(file.path, 'r', :quote_char => "\"", :row_sep => :auto, :col_sep => sep, :encoding => "ISO-8859-1:ISO-8859-1") do |csv|
csv.each_with_index do |row, i|
unless i == 0
password = SecureRandom.hex(8)
user = User.where(login_name: row[3]).first
if user.nil?
u = User.new(first_name: row[0],
last_name: row[1],
email: row[2],
login_name: row[3],
id_connexion: row[3],
super_admin: false,
password: password,
password_confirmation: password,
language_id: params[:language_id].to_i,
initials: "#{row[0].first}#{row[1].first}",
time_zone: "Paris",
object_per_page: 50,
auth_type: "Application",
number_precision: 2)
u.save(validate: false)
OrganizationsUsers.create(organization_id: @current_organization.id,
user_id: u.id)
(row.size - 4).times do |i|
group = Group.where(name: row[4 + i], organization_id: @current_organization.id).first
begin
GroupsUsers.create(group_id: group.id,
user_id: u.id)
rescue
# nothing
end
end
end
end
end
end
#rescue
# flash[:error] = "Une erreur est survenue durant l'import du fichier. Vérifier l'encodage du fichier (ISO-8859-1 pour Windows, utf-8 pour Mac) ou le caractère de séparateur du fichier"
#end
redirect_to organization_users_path(@current_organization)
end
def set_technology_size_type_abacus
authorize! :edit_organizations, Organization
@organization = Organization.find(params[:organization])
@technologies = @organization.organization_technologies
@size_unit_types = @organization.size_unit_types
@size_units = SizeUnit.all
@technologies.each do |technology|
@size_unit_types.each do |sut|
@size_units.each do |size_unit|
#size_unit = params[:size_unit]["#{su.id}"].to_i
value = params[:abacus]["#{size_unit.id}"]["#{technology.id}"]["#{sut.id}"].to_f
unless value.nil?
t = TechnologySizeType.where( organization_id: @organization.id,
organization_technology_id: technology.id,
size_unit_id: size_unit.id,
size_unit_type_id: sut.id).first
if t.nil?
TechnologySizeType.create(organization_id: @organization.id,
organization_technology_id: technology.id,
size_unit_id: size_unit.id,
size_unit_type_id: sut.id,
value: value)
else
t.update_attributes(value: value)
end
end
end
end
end
redirect_to edit_organization_path(@organization, :anchor => 'tabs-abacus-sut')
end
def set_technology_size_unit_abacus
authorize! :edit_organizations, Organization
@organization = Organization.find(params[:organization])
@technologies = @organization.organization_technologies
@size_units = SizeUnit.all
@technologies.each do |technology|
@size_units.each do |size_unit|
value = params[:technology_size_units_abacus]["#{size_unit.id}"]["#{technology.id}"].to_f
unless value.nil?
t = TechnologySizeUnit.where( organization_id: @organization.id,
organization_technology_id: technology.id,
size_unit_id: size_unit.id).first
if t.nil?
TechnologySizeUnit.create(organization_id: @organization.id,
organization_technology_id: technology.id,
size_unit_id: size_unit.id,
value: value)
else
t.update_attributes(value: value)
end
end
end
end
redirect_to edit_organization_path(@organization, :anchor => 'tabs-abacus-tsu')
end
def set_abacus
authorize! :edit_organizations, Organization
@ot = OrganizationTechnology.find_by_id(params[:technology])
@complexities = @ot.organization.organization_uow_complexities
@unitofworks = @ot.unit_of_works
@unitofworks.each do |uow|
@complexities.each do |c|
a = AbacusOrganization.find_or_create_by_unit_of_work_id_and_organization_uow_complexity_id_and_organization_technology_id_and_organization_id(uow.id, c.id, @ot.id, params[:id])
begin
a.update_attribute(:value, params['abacus']["#{uow.id}"]["#{c.id}"])
rescue
# :()
end
end
end
redirect_to redirect_apply(edit_organization_path(@ot.organization_id, :anchor => 'tabs-abacus-tsu'), nil, '/organizationals_params')
end
def set_technology_uow_synthesis
authorize! :manage_modules_instances, ModuleProject
#@organization = Organization.find(params[:organization])
params[:abacus].each do |sut|
sut.last.each do |ot|
ot.last.each do |uow|
uow.last.each do |cplx|
sutc = SizeUnitTypeComplexity.where(size_unit_type_id: sut.first.to_i, organization_uow_complexity_id: cplx.first.to_i).first_or_create
sutc.value = cplx.last
sutc.save
end
end
end
end
redirect_to redirect_apply(organization_module_estimation_path(@organization, :anchor => 'taille'), nil, '/organizationals_params')
end
# Update the organization's projects available inline columns
def set_available_inline_columns
redirect_to organization_setting_path(@current_organization, :anchor => 'tabs-select-columns-list')
end
def update_available_inline_columns
puts "test"
# update selected column
#Organization.update_attribute(:project_selected_columns, params[:selected_inline_columns])
#Organization.update_attribute(:project_selected_columns, params[:selected_inline_columns])
selected_columns = params['selected_inline_columns']
query_classname = params['query_classname'].constantize
unless selected_columns.nil?
case params['query_classname']
when "Project"
@current_organization.project_selected_columns = selected_columns
when "Organization"
@current_organization.organization_selected_columns = selected_columns
end
@current_organization.save
end
end
#def import_abacus
# authorize! :edit_organizations, Organization
# @organization = Organization.find(params[:id])
#
# file = params[:file]
#
# case File.extname(file.original_filename)
# when ".ods"
# workbook = Roo::Spreadsheet.open(file.path, extension: :ods)
# when ".xls"
# workbook = Roo::Spreadsheet.open(file.path, extension: :xls)
# when ".xlsx"
# workbook = Roo::Spreadsheet.open(file.path, extension: :xlsx)
# when ".xlsm"
# workbook = Roo::Spreadsheet.open(file.path, extension: :xlsx)
# end
#
# workbook.sheets.each_with_index do |worksheet, k|
# #if sheet name blank, we use sheetN as default name
# name = worksheet
# if name != 'ReadMe' #The ReadMe sheet is only for guidance and don't have to be proceed
#
# @ot = OrganizationTechnology.find_or_create_by_name_and_alias_and_organization_id(:name => name,
# :alias => name,
# :organization_id => @organization.id)
#
# workbook.default_sheet=workbook.sheets[k]
# workbook.each_with_index do |row, i|
# row.each_with_index do |cell, j|
# unless row.nil?
# unless workbook.cell(1,j+1) == "Abacus" or workbook.cell(i+1,1) == "Abacus"
# if can? :manage, Organization
# @ouc = OrganizationUowComplexity.find_or_create_by_name_and_organization_id(:name => workbook.cell(1,j+1), :organization_id => @organization.id)
# end
#
# if can? :manage, Organization
# @uow = UnitOfWork.find_or_create_by_name_and_alias_and_organization_id(:name => workbook.cell(i+1,1), :alias => workbook.cell(i+1,1), :organization_id => @organization.id)
# unless @uow.organization_technologies.map(&:id).include?(@ot.id)
# @uow.organization_technologies << @ot
# end
# @uow.save
# end
#
# ao = AbacusOrganization.find_by_unit_of_work_id_and_organization_uow_complexity_id_and_organization_technology_id_and_organization_id(
# @uow.id,
# @ouc.id,
# @ot.id,
# @organization.id
# )
#
# if ao.nil?
# if can? :manage, Organization
# AbacusOrganization.create(
# :unit_of_work_id => @uow.id,
# :organization_uow_complexity_id => @ouc.id,
# :organization_technology_id => @ot.id,
# :organization_id => @organization.id,
# :value => workbook.cell(i+1, j+1))
# end
# else
# ao.update_attribute(:value, workbook.cell(i+1, j+1))
# end
# end
# end
# end
# end
# end
# end
#
# redirect_to redirect_apply(edit_organization_path(@organization.id), nil, '/organizationals_params')
#end
#
#def export_abacus
# authorize! :edit_organizations, Organization
#
# @organization = Organization.find(params[:id])
# p=Axlsx::Package.new
# wb=p.workbook
# @organization.organization_technologies.each_with_index do |ot|
# wb.add_worksheet(:name => ot.name) do |sheet|
# style_title = sheet.styles.add_style(:bg_color => 'B0E0E6', :sz => 14, :b => true, :alignment => {:horizontal => :center})
# style_title2 = sheet.styles.add_style(:sz => 14, :b => true, :alignment => {:horizontal => :center})
# style_title_red = sheet.styles.add_style(:bg_color => 'B0E0E6', :fg_color => 'FF0000', :sz => 14, :b => true, :i => true, :alignment => {:horizontal => :center})
# style_title_orange = sheet.styles.add_style(:bg_color => 'B0E0E6', :fg_color => 'FF8C00', :sz => 14, :b => true, :i => true, :alignment => {:horizontal => :center})
# style_title_right = sheet.styles.add_style(:bg_color => 'E6E6E6', :sz => 14, :b => true, :alignment => {:horizontal => :right})
# style_title_right_red = sheet.styles.add_style(:bg_color => 'E6E6E6', :fg_color => 'FF8C00', :sz => 14, :b => true, :i => true, :alignment => {:horizontal => :right})
# style_title_right_orange = sheet.styles.add_style(:bg_color => 'E6E6E6', :fg_color => 'FF8C00', :sz => 14, :b => true, :i => true, :alignment => {:horizontal => :right})
# style_data = sheet.styles.add_style(:sz => 12, :alignment => {:horizontal => :center}, :locked => false)
# style_date = sheet.styles.add_style(:format_code => 'YYYY-MM-DD HH:MM:SS')
# head = ['Abacus']
# head_style = [style_title2]
# @organization.organization_uow_complexities.each_with_index do |comp|
# head.push(comp.name)
# if comp.state == 'retired'
# head_style.push(style_title_red)
# elsif comp.state == 'draft'
# head_style.push(style_title_orange)
# else
# head_style.push(style_title)
# end
# end
# row=sheet.add_row(head, :style => head_style)
# ot.unit_of_works.each_with_index do |uow|
# uow_row = []
# if uow.state == 'retired'
# uow_row_style=[style_title_right_red]
# elsif uow.state == 'draft'
# uow_row_style=[style_title_right_orange]
# else
# uow_row_style=[style_title_right]
# end
# uow_row = [uow.name]
#
# @organization.organization_uow_complexities.each_with_index do |comp2, i|
# if AbacusOrganization.where(:unit_of_work_id => uow.id, :organization_uow_complexity_id => comp2.id, :organization_technology_id => ot.id, :organization_id => @organization.id).first.nil?
# data = ''
# else
# data = AbacusOrganization.where(:unit_of_work_id => uow.id,
# :organization_uow_complexity_id => comp2.id,
# :organization_technology_id => ot.id, :organization_id => @organization.id).first.value
# end
# uow_row_style.push(style_data)
# uow_row.push(data)
# end
# row=sheet.add_row(uow_row, :style => uow_row_style)
# end
# sheet.sheet_protection.delete_rows = true
# sheet.sheet_protection.delete_columns = true
# sheet.sheet_protection.format_cells = true
# sheet.sheet_protection.insert_columns = false
# sheet.sheet_protection.insert_rows = false
# sheet.sheet_protection.select_locked_cells = false
# sheet.sheet_protection.select_unlocked_cells = false
# sheet.sheet_protection.objects = false
# sheet.sheet_protection.sheet = true
# end
# end
# wb.add_worksheet(:name => 'ReadMe') do |sheet|
# style_title2 = sheet.styles.add_style(:sz => 14, :b => true, :alignment => {:horizontal => :center})
# style_title_right = sheet.styles.add_style(:bg_color => 'E6E6E6', :sz => 13, :b => true, :alignment => {:horizontal => :right})
# style_date = sheet.styles.add_style(:format_code => 'YYYY-MM-DD HH:MM:SS', :alignment => {:horizontal => :left})
# style_text = sheet.styles.add_style(:alignment => {:wrapText => :true})
# style_field = sheet.styles.add_style(:bg_color => 'F5F5F5', :sz => 12, :b => true)
#
# sheet.add_row(['This File is an export of a ProjEstimate abacus'], :style => style_title2)
# sheet.merge_cells 'A1:F1'
# sheet.add_row(['Organization: ', "#{@organization.name} (#{@organization.id})", @organization.description], :style => [style_title_right, 0, style_text])
# sheet.add_row(['Date: ', Time.now], :style => [style_title_right, style_date])
# sheet.add_row([' '])
# sheet.merge_cells 'A5:F5'
# sheet.add_row(['There is one sheet by technology. Each sheet is organized with the complexity by column and the Unit Of work by row.'])
# sheet.merge_cells 'A6:F6'
# sheet.add_row(['For the complexity and the Unit Of Work state, we are using the following color code : Red=Retired, Orange=Draft).'])
# sheet.merge_cells 'A7:F7'
# sheet.add_row(['In order to allow this abacus to be re-imported into ProjEstimate and to prevent users from accidentally changing the structure of the sheets, workbooks have been protected.'])
# sheet.merge_cells 'A8:F8'
# sheet.add_row(['Advanced users can remove the protection (there is no password). For further information you can have a look on the ProjEstimate Help.'])
# row=sheet.add_row(['For ProjEstimate Help, Click to go'])
# sheet.add_hyperlink :location => 'http://forge.estimancy.com/projects/pe/wiki/Organizations', :ref => "A#{row.index+1}"
# sheet.add_row([' '])
# sheet.add_row([' '])
# sheet.add_row(['Technologies'], :style => [style_title_right])
# sheet.add_row(['Alias', 'Name', 'Description', 'State', 'Productivity Ratio'], :style => style_field)
# @organization.organization_technologies.each_with_index do |ot|
# sheet.add_row([ot.alias, ot.name, ot.description, ot.state, ot.productivity_ratio], :style => [0, 0, style_text])
# end
# sheet.add_row([' '])
# sheet.add_row(['Complexities'], :style => [style_title_right])
# sheet.add_row(['Display Order', 'Name', 'Description', 'State'], :style => style_field)
# @organization.organization_uow_complexities.each_with_index do |comp|
# sheet.add_row([comp.display_order, comp.name, comp.description, comp.state], :style => [0, 0, style_text])
# end
# sheet.add_row([' '])
# sheet.add_row(['Units OF Works'], :style => [style_title_right])
# sheet.add_row(['Alias', 'Name', 'Description', 'State'], :style => style_field)
# @organization.unit_of_works.each_with_index do |uow|
# sheet.add_row([uow.alias, uow.name, uow.description, uow.state], :style => [0, 0, style_text])
# end
# sheet.column_widths 20, 32, 80, 10, 18
# end
# send_data p.to_stream.read, :filename => @organization.name+'.xlsx'
#end
# Duplicate the organization
# Function de delete after => is replaced by the create_from_image fucntion
def duplicate_organization
authorize! :manage_master_data, :all
original_organization = Organization.find(params[:organization_id])
new_organization = original_organization.amoeba_dup
if new_organization.save
original_organization.save #Original organization copy number will be incremented to 1
flash[:notice] = I18n.t(:organization_successfully_copied)
else
flash[:error] = "#{ I18n.t(:errors_when_copying_organization)} : #{new_organization.errors.full_messages.join(', ')}"
end
redirect_to organizationals_params_path
end
def show
authorize! :show_organizations, Organization
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Pender
class Application < Rails::Application
config.generators do |g|
g.javascripts false
g.stylesheets false
g.template_engine false
g.helper false
g.assets false
end
config.autoload_paths << "#{config.root}/lib"
cfg = YAML.load_file("#{Rails.root}/config/config.yml")[Rails.env]
config.middleware.insert_before 0, Rack::Cors do
allow do
origins '*'
resource '*',
headers: [cfg['authorization_header'], 'Content-Type', 'Accept'],
methods: [:get, :post, :delete, :options]
end
end
config.action_dispatch.default_headers.merge!({
'Access-Control-Allow-Credentials' => 'true',
'Access-Control-Request-Method' => '*'
})
config.active_record.sqlite3.represent_boolean_as_integer = true
end
end
# Workaround for https://github.com/rswag/rswag/issues/359
# Move to config/environments/test.rb after issue is fixed.
# Enable Rswag auto generation examples from responses
if Rails.env.test?
RSpec.configure do |config|
config.swagger_dry_run = false
end
end
Ticket CHECK-602: Load lib folder on production
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Pender
class Application < Rails::Application
config.generators do |g|
g.javascripts false
g.stylesheets false
g.template_engine false
g.helper false
g.assets false
end
config.autoload_paths << Rails.root.join("lib")
config.eager_load_paths << Rails.root.join("lib")
cfg = YAML.load_file("#{Rails.root}/config/config.yml")[Rails.env]
config.middleware.insert_before 0, Rack::Cors do
allow do
origins '*'
resource '*',
headers: [cfg['authorization_header'], 'Content-Type', 'Accept'],
methods: [:get, :post, :delete, :options]
end
end
config.action_dispatch.default_headers.merge!({
'Access-Control-Allow-Credentials' => 'true',
'Access-Control-Request-Method' => '*'
})
config.active_record.sqlite3.represent_boolean_as_integer = true
end
end
# Workaround for https://github.com/rswag/rswag/issues/359
# Move to config/environments/test.rb after issue is fixed.
# Enable Rswag auto generation examples from responses
if Rails.env.test?
RSpec.configure do |config|
config.swagger_dry_run = false
end
end
|
# frozen_string_literal: true
class OrganizationsController < ApplicationController
include OrganizationAuthorization
before_action :authorize_organization_addition, only: [:create]
before_action :set_users_github_organizations, only: [:index, :new, :create]
before_action :add_current_user_to_organizations, only: [:index]
before_action :paginate_users_github_organizations, only: [:new, :create]
skip_before_action :set_organization, :authorize_organization_access, only: [:index, :new, :create]
decorates_assigned :organization
def index
@organizations = current_user.organizations.includes(:users).page(params[:page])
end
def new
@organization = Organization.new
end
def create
@organization = Organization.new(new_organization_params)
if @organization.save
redirect_to setup_organization_path(@organization)
else
render :new
end
end
def show
@assignments = Kaminari.paginate_array(@organization.all_assignments.sort_by(&:updated_at)).page(params[:page])
end
def edit
end
def update
if @organization.update_attributes(update_organization_params)
flash[:success] = "Organization \"#{@organization.title}\" updated"
redirect_to @organization
else
render :edit
end
end
def destroy
if @organization.update_attributes(deleted_at: Time.zone.now)
DestroyResourceJob.perform_later(@organization)
flash[:success] = "Your organization, @#{organization.login} is being reset"
redirect_to organizations_path
else
render :edit
end
end
def new_assignment
end
def invite
end
def setup
end
def setup_organization
if @organization.update_attributes(update_organization_params)
redirect_to invite_organization_path(@organization)
else
render :setup
end
end
private
def authorize_organization_access
return if @organization.users.include?(current_user) || current_user.staff?
begin
github_organization.admin?(decorated_current_user.login) ? @organization.users << current_user : not_found
rescue
not_found
end
end
def authorize_organization_addition
new_github_organization = github_organization_from_params
return if new_github_organization.admin?(decorated_current_user.login)
raise NotAuthorized, 'You are not permitted to add this organization as a classroom'
end
def github_organization_from_params
@github_organization_from_params ||= GitHubOrganization.new(current_user.github_client,
params[:organization][:github_id].to_i)
end
def new_organization_params
github_org = github_organization_from_params.organization
title = github_org.name.present? ? github_org.name : github_org.login
params
.require(:organization)
.permit(:github_id)
.merge(users: [current_user])
.merge(title: title)
end
def set_organization
@organization = Organization.find_by!(slug: params[:id])
end
# rubocop:disable AbcSize
def set_users_github_organizations
github_user = GitHubUser.new(current_user.github_client, current_user.uid)
@users_github_organizations = github_user.organization_memberships.map do |membership|
{
classroom: Organization.unscoped.includes(:users).find_by(github_id: membership.organization.id),
github_id: membership.organization.id,
login: membership.organization.login,
role: membership.role
}
end
end
# rubocop:enable AbcSize
# Check if the current user has any organizations with admin privilege, if so add the user to the corresponding
# classroom automatically.
def add_current_user_to_organizations
@users_github_organizations.each do |organization|
classroom = organization[:classroom]
if classroom.present? && !classroom.users.include?(current_user)
create_user_organization_access(classroom)
end
end
end
def create_user_organization_access(organization)
github_org = GitHubOrganization.new(current_user.github_client, organization.github_id)
return unless github_org.admin?(decorated_current_user.login)
organization.users << current_user
end
def paginate_users_github_organizations
@users_github_organizations = Kaminari.paginate_array(@users_github_organizations).page(params[:page]).per(24)
end
def update_organization_params
params
.require(:organization)
.permit(:title)
end
end
remove staff access to organization pages
# frozen_string_literal: true
class OrganizationsController < ApplicationController
include OrganizationAuthorization
before_action :authorize_organization_addition, only: [:create]
before_action :set_users_github_organizations, only: [:index, :new, :create]
before_action :add_current_user_to_organizations, only: [:index]
before_action :paginate_users_github_organizations, only: [:new, :create]
skip_before_action :set_organization, :authorize_organization_access, only: [:index, :new, :create]
decorates_assigned :organization
def index
@organizations = current_user.organizations.includes(:users).page(params[:page])
end
def new
@organization = Organization.new
end
def create
@organization = Organization.new(new_organization_params)
if @organization.save
redirect_to setup_organization_path(@organization)
else
render :new
end
end
def show
@assignments = Kaminari.paginate_array(@organization.all_assignments.sort_by(&:updated_at)).page(params[:page])
end
def edit
end
def update
if @organization.update_attributes(update_organization_params)
flash[:success] = "Organization \"#{@organization.title}\" updated"
redirect_to @organization
else
render :edit
end
end
def destroy
if @organization.update_attributes(deleted_at: Time.zone.now)
DestroyResourceJob.perform_later(@organization)
flash[:success] = "Your organization, @#{organization.login} is being reset"
redirect_to organizations_path
else
render :edit
end
end
def new_assignment
end
def invite
end
def setup
end
def setup_organization
if @organization.update_attributes(update_organization_params)
redirect_to invite_organization_path(@organization)
else
render :setup
end
end
private
def authorize_organization_access
return if @organization.users.include?(current_user)
begin
github_organization.admin?(decorated_current_user.login) ? @organization.users << current_user : not_found
rescue
not_found
end
end
def authorize_organization_addition
new_github_organization = github_organization_from_params
return if new_github_organization.admin?(decorated_current_user.login)
raise NotAuthorized, 'You are not permitted to add this organization as a classroom'
end
def github_organization_from_params
@github_organization_from_params ||= GitHubOrganization.new(current_user.github_client,
params[:organization][:github_id].to_i)
end
def new_organization_params
github_org = github_organization_from_params.organization
title = github_org.name.present? ? github_org.name : github_org.login
params
.require(:organization)
.permit(:github_id)
.merge(users: [current_user])
.merge(title: title)
end
def set_organization
@organization = Organization.find_by!(slug: params[:id])
end
# rubocop:disable AbcSize
def set_users_github_organizations
github_user = GitHubUser.new(current_user.github_client, current_user.uid)
@users_github_organizations = github_user.organization_memberships.map do |membership|
{
classroom: Organization.unscoped.includes(:users).find_by(github_id: membership.organization.id),
github_id: membership.organization.id,
login: membership.organization.login,
role: membership.role
}
end
end
# rubocop:enable AbcSize
# Check if the current user has any organizations with admin privilege, if so add the user to the corresponding
# classroom automatically.
def add_current_user_to_organizations
@users_github_organizations.each do |organization|
classroom = organization[:classroom]
if classroom.present? && !classroom.users.include?(current_user)
create_user_organization_access(classroom)
end
end
end
def create_user_organization_access(organization)
github_org = GitHubOrganization.new(current_user.github_client, organization.github_id)
return unless github_org.admin?(decorated_current_user.login)
organization.users << current_user
end
def paginate_users_github_organizations
@users_github_organizations = Kaminari.paginate_array(@users_github_organizations).page(params[:page]).per(24)
end
def update_organization_params
params
.require(:organization)
.permit(:title)
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Scoutz
class Application < Rails::Application
# config.middleware.use Rack::Deflater
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
config.beginning_of_week = :sunday
config.i18n.enforce_available_locales = false
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# FROM RAILS 3 -- NOT NEEDED?
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib #{config.root}/models/concerns)
# config.autoload_paths += %W(#{config.root}/app/models/ckeditor)
# config.assets.precompile += ['jquery.js']
config.assets.precompile += ['landing.css', 'landing/landing.js', 'ckeditor_config.js', 'ckeditor/config_ckeditor.js', 'print.css', 'printing/print.js']
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
# Override layout for devise controllers
config.to_prepare do
Devise::SessionsController.layout 'dialog_dark_modal'
Devise::RegistrationsController.layout 'dialog_dark_modal'
Devise::ConfirmationsController.layout 'dialog_dark_modal'
Devise::UnlocksController.layout 'dialog_dark_modal'
Devise::PasswordsController.layout 'dialog_dark_modal'
# Devise::Mailer.layout "email" # email.haml or email.erb
end
end
end
APP_NAME = ::Rails.env.production? ? 'SCOUTTin' : "SCOUTTin - #{::Rails.env}"
have dotenv load for all environments
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
Dotenv.load('.env')
module Scoutz
class Application < Rails::Application
# config.middleware.use Rack::Deflater
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
config.beginning_of_week = :sunday
config.i18n.enforce_available_locales = false
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# FROM RAILS 3 -- NOT NEEDED?
# Custom directories with classes and modules you want to be autoloadable.
config.autoload_paths += %W(#{config.root}/lib #{config.root}/models/concerns)
# config.autoload_paths += %W(#{config.root}/app/models/ckeditor)
# config.assets.precompile += ['jquery.js']
config.assets.precompile += ['landing.css', 'landing/landing.js', 'ckeditor_config.js', 'ckeditor/config_ckeditor.js', 'print.css', 'printing/print.js']
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
# Override layout for devise controllers
config.to_prepare do
Devise::SessionsController.layout 'dialog_dark_modal'
Devise::RegistrationsController.layout 'dialog_dark_modal'
Devise::ConfirmationsController.layout 'dialog_dark_modal'
Devise::UnlocksController.layout 'dialog_dark_modal'
Devise::PasswordsController.layout 'dialog_dark_modal'
# Devise::Mailer.layout "email" # email.haml or email.erb
end
end
end
APP_NAME = ::Rails.env.production? ? 'SCOUTTin' : "SCOUTTin - #{::Rails.env}"
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module ChaiIo
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
#config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
config.assets.paths << "#{Rails.root}/spec/javascripts/fixtures"
#Set to true if you want to enable Caching using Redis
config.redis_caching = {
:enabled => true,
:host => 'trunk.com',
:port => 6379
}
#Query Timeout (seconds)
config.query = {
:timeout => 30
}
#
config.generators do |g|
g.test_framework :rspec,
fixtures: true,
view_specs: false,
helper_specs: false,
routing_specs: false,
controller_specs: true,
request_specs: true
g.fixture_replacement :factory_girl, dir: "spec/factories"
end
end
end
application config
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# If you precompile assets before deploying to production, use this line
Bundler.require(*Rails.groups(:assets => %w(development test)))
# If you want your assets lazily compiled in production, use this line
# Bundler.require(:default, :assets, Rails.env)
end
module ChaiIo
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable escaping HTML in JSON.
config.active_support.escape_html_entities_in_json = true
# Use SQL instead of Active Record's schema dumper when creating the database.
# This is necessary if your schema can't be completely dumped by the schema dumper,
# like if you have constraints or database-specific column types
# config.active_record.schema_format = :sql
# Enforce whitelist mode for mass assignment.
# This will create an empty whitelist of attributes available for mass-assignment for all models
# in your app. As such, your models will need to explicitly whitelist or blacklist accessible
# parameters by using an attr_accessible or attr_protected declaration.
#config.active_record.whitelist_attributes = true
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1.0'
config.assets.paths << "#{Rails.root}/spec/javascripts/fixtures"
#Set to true if you want to enable Caching using Redis
config.redis_caching = {
:enabled => false,
:host => '',
:port => 6379
}
#Query Timeout (seconds)
config.query = {
:timeout => 30
}
#
config.generators do |g|
g.test_framework :rspec,
fixtures: true,
view_specs: false,
helper_specs: false,
routing_specs: false,
controller_specs: true,
request_specs: true
g.fixture_replacement :factory_girl, dir: "spec/factories"
end
end
end
|
class ProductUsersController < ApplicationController
admin_tab :index, :new
before_filter :authenticate_user!
before_filter :check_acting_as
before_filter :init_current_facility
before_filter :init_product
load_and_authorize_resource
layout 'two_column'
def initialize
@active_tab = 'admin_products'
super
end
# GET /users
def index
if @product.requires_approval?
@users = @product.product_users
@users = @product.product_users.map { |pu| pu.user }
@users = @users.sort_by { |u| [ u.last_name, u.first_name ] }.paginate(:page => params[:page])
else
@users = nil
flash.now[:notice] = "This #{@product.class.name.downcase} does not require user authorization"
end
end
# GET /users/new
def new
if params[:user]
user = User.find(params[:user])
pu = ProductUser.new(:product => @product, :user => user, :approved_by => session_user.id, :approved_at => Time.zone.now)
if pu.save
flash[:notice] = "The user has been successfully authorized for this #{@product.class.name.downcase}"
else
flash[:error] = pu.errors.full_messages
end
redirect_to(self.send("facility_#{@product.class.name.downcase}_users_url", current_facility, @product))
end
end
def destroy
product_user = ProductUser.find(:first, :conditions => { :product_id => @product.id, :user_id => params[:id] })
product_user.destroy
if product_user.destroyed?
flash[:notice] = "The user has been successfully removed from this #{@product.class.name.downcase}"
else
flash[:error] = "An error was encountered while attempting to remove the user from this #{@product.class.name.downcase}"
end
redirect_to(self.send("facility_#{@product.class.name.downcase}_users_url", current_facility, @product))
end
def user_search_results
@limit = 25
term = generate_multipart_like_search_term(params[:search_term])
if params[:search_term].length > 0
conditions = ["LOWER(first_name) LIKE ? OR LOWER(last_name) LIKE ? OR LOWER(username) LIKE ? OR LOWER(CONCAT(first_name, last_name)) LIKE ?", term, term, term, term]
@users = User.find(:all, :conditions => conditions, :order => "last_name, first_name", :limit => @limit)
@count = @users.length
end
render :layout => false
end
def init_product
@product = current_facility.products.find_by_url_name!(params[:instrument_id] || params[:service_id] || params[:item_id])
end
end
Task #30441: set var @product_user for CanCan
class ProductUsersController < ApplicationController
admin_tab :index, :new
before_filter :authenticate_user!
before_filter :check_acting_as
before_filter :init_current_facility
before_filter :init_product
load_and_authorize_resource
layout 'two_column'
def initialize
@active_tab = 'admin_products'
super
end
# GET /users
def index
if @product.requires_approval?
@users = @product.product_users
@users = @product.product_users.map { |pu| pu.user }
@users = @users.sort_by { |u| [ u.last_name, u.first_name ] }.paginate(:page => params[:page])
else
@users = nil
flash.now[:notice] = "This #{@product.class.name.downcase} does not require user authorization"
end
end
# GET /users/new
def new
if params[:user]
user = User.find(params[:user])
pu = ProductUser.new(:product => @product, :user => user, :approved_by => session_user.id, :approved_at => Time.zone.now)
if pu.save
flash[:notice] = "The user has been successfully authorized for this #{@product.class.name.downcase}"
else
flash[:error] = pu.errors.full_messages
end
redirect_to(self.send("facility_#{@product.class.name.downcase}_users_url", current_facility, @product))
end
end
def destroy
product_user = ProductUser.find(:first, :conditions => { :product_id => @product.id, :user_id => params[:id] })
product_user.destroy
if product_user.destroyed?
flash[:notice] = "The user has been successfully removed from this #{@product.class.name.downcase}"
else
flash[:error] = "An error was encountered while attempting to remove the user from this #{@product.class.name.downcase}"
end
redirect_to(self.send("facility_#{@product.class.name.downcase}_users_url", current_facility, @product))
end
def user_search_results
@limit = 25
term = generate_multipart_like_search_term(params[:search_term])
if params[:search_term].length > 0
conditions = ["LOWER(first_name) LIKE ? OR LOWER(last_name) LIKE ? OR LOWER(username) LIKE ? OR LOWER(CONCAT(first_name, last_name)) LIKE ?", term, term, term, term]
@users = User.find(:all, :conditions => conditions, :order => "last_name, first_name", :limit => @limit)
@count = @users.length
end
render :layout => false
end
def init_product
@product = current_facility.products.find_by_url_name!(params[:instrument_id] || params[:service_id] || params[:item_id])
@product_user=ProductUser.first # for CanCan auth
end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# If you have a Gemfile, require the default gems, the ones in the
# current environment and also include :assets gems if in development
# or test environments.
Bundler.require *Rails.groups(:assets) if defined?(Bundler)
module Prometheus20
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
config.time_zone = 'UTC'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.i18n.default_locale = :en
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable the asset pipeline
config.assets.enabled = true
config.generators do |g|
g.test_framework :rspec, :fixture => true
g.fixture_replacement :factory_girl
end
# fallback for empty translations
config.i18n.fallbacks = true
if Rails.env.to_sym == :production
config.middleware.use ExceptionNotifier,
:email_prefix => "[ERROR] ",
:sender_address => %{"Sisyphus 2.0 Error" <prometheus-noreply@altlinux.org>},
:exception_recipients => %w{igor.zubkov@gmail.com}
config.middleware.use Rack::ForceDomain, 'packages.altlinux.org'
end
end
end
Cleanup
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# If you have a Gemfile, require the default gems, the ones in the
# current environment and also include :assets gems if in development
# or test environments.
Bundler.require *Rails.groups(:assets) if defined?(Bundler)
module Prometheus20
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
config.time_zone = 'UTC'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
config.i18n.default_locale = :en
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable the asset pipeline
config.assets.enabled = true
config.generators do |g|
g.test_framework :rspec, :fixture => true
g.fixture_replacement :factory_girl
end
# fallback for empty translations
config.i18n.fallbacks = true
if Rails.env.production?
config.middleware.use ExceptionNotifier,
:email_prefix => "[ERROR] ",
:sender_address => %{"Sisyphus 2.0 Error" <prometheus-noreply@altlinux.org>},
:exception_recipients => %w{igor.zubkov@gmail.com}
config.middleware.use Rack::ForceDomain, 'packages.altlinux.org'
end
end
end
|
class Pros::SessionsController < Devise::SessionsController
# before_filter :configure_sign_in_params, only: [:create]
#GET /resource/sign_in
def new
super
end
#POST /resource/sign_in
def create
super
end
# DELETE /resource/sign_out
# def destroy
# super
# end
# protected
# You can put the params you want to permit in the empty array.
# def configure_sign_in_params
# devise_parameter_sanitizer.for(:sign_in) << :attribute
# end
end
style check
class Pros::SessionsController < Devise::SessionsController
# before_filter :configure_sign_in_params, only: [:create]
# GET /resource/sign_in
def new
super
end
# POST /resource/sign_in
def create
super
end
# DELETE /resource/sign_out
# def destroy
# super
# end
# protected
# You can put the params you want to permit in the empty array.
# def configure_sign_in_params
# devise_parameter_sanitizer.for(:sign_in) << :attribute
# end
end
|
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
end
module UnipeptWeb
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# JavaScript files you want as :defaults (application.js is always included).
# config.action_view.javascript_expansions[:defaults] = %w(jquery rails)
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1'
config.assets.precompile += ['workers/pancore_worker.js', 'workers/mygenome_worker.js', 'jit/Extras/excanvas.js', '*.eot', '*.svg', '*.ttf', '*.woff']
config.assets.nodigest = ['workers/pancore_worker.js', 'workers/mygenome_worker.js']
config.assets.nodigest_fonts = ['glyphicons-halflings-regular.eot', 'glyphicons-halflings-regular.svg', 'glyphicons-halflings-regular.ttf', 'glyphicons-halflings-regular.woff']
config.assets.paths << "#{Rails}/vendor/assets/fonts"
config.versions = {
:unipept => "2.4",
:gem => "0.5.7",
:uniprot => "2014.05"
}
config.api_host = "api.unipept.ugent.be"
end
end
bump version
require File.expand_path('../boot', __FILE__)
require 'rails/all'
if defined?(Bundler)
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(:default, Rails.env)
end
module UnipeptWeb
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Custom directories with classes and modules you want to be autoloadable.
# config.autoload_paths += %W(#{config.root}/extras)
# Only load the plugins named here, in the order given (default is alphabetical).
# :all can be used as a placeholder for all plugins not explicitly named.
# config.plugins = [ :exception_notification, :ssl_requirement, :all ]
# Activate observers that should always be running.
# config.active_record.observers = :cacher, :garbage_collector, :forum_observer
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# JavaScript files you want as :defaults (application.js is always included).
# config.action_view.javascript_expansions[:defaults] = %w(jquery rails)
# Configure the default encoding used in templates for Ruby 1.9.
config.encoding = "utf-8"
# Configure sensitive parameters which will be filtered from the log file.
config.filter_parameters += [:password]
# Enable the asset pipeline
config.assets.enabled = true
# Version of your assets, change this if you want to expire all your assets
config.assets.version = '1'
config.assets.precompile += ['workers/pancore_worker.js', 'workers/mygenome_worker.js', 'jit/Extras/excanvas.js', '*.eot', '*.svg', '*.ttf', '*.woff']
config.assets.nodigest = ['workers/pancore_worker.js', 'workers/mygenome_worker.js']
config.assets.nodigest_fonts = ['glyphicons-halflings-regular.eot', 'glyphicons-halflings-regular.svg', 'glyphicons-halflings-regular.ttf', 'glyphicons-halflings-regular.woff']
config.assets.paths << "#{Rails}/vendor/assets/fonts"
config.versions = {
:unipept => "2.4.2",
:gem => "0.5.7",
:uniprot => "2014.05"
}
config.api_host = "api.unipept.ugent.be"
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.