after_merge
stringlengths 28
79.6k
| before_merge
stringlengths 20
79.6k
| url
stringlengths 38
71
| full_traceback
stringlengths 43
922k
| traceback_type
stringclasses 555
values |
|---|---|---|---|---|
def perform_action(self, user, technique, target=None):
"""Do something with the thing: animated
:param user:
:param technique: Not a dict: a Technique or Item
:param target:
:returns:
"""
technique.advance_round()
result = technique.use(user, target)
try:
tools.load_sound(technique.sfx).play()
except AttributeError:
pass
# action is performed, so now use sprites to animate it
# this value will be None if the target is off screen
target_sprite = self._monster_sprite_map.get(target, None)
# slightly delay the monster shake, so technique animation
# is synchronized with the damage shake motion
hit_delay = 0
if user:
message = trans(
"combat_used_x", {"user": user.name, "name": technique.name_trans}
)
# TODO: a real check or some params to test if should tackle, etc
if technique in user.moves:
hit_delay += 0.5
user_sprite = self._monster_sprite_map[user]
self.animate_sprite_tackle(user_sprite)
if target_sprite:
self.task(
partial(self.animate_sprite_take_damage, target_sprite),
hit_delay + 0.2,
)
self.task(partial(self.blink, target_sprite), hit_delay + 0.6)
# Track damage
self._damage_map[target].add(user)
else: # assume this was an item used
if result:
message += "\n" + trans("item_success")
else:
message += "\n" + trans("item_failure")
self.alert(message)
self.suppress_phase_change()
else:
if result:
self.suppress_phase_change()
self.alert(
trans(
"combat_status_damage",
{"name": target.name, "status": technique.name_trans},
)
)
if result and target_sprite and hasattr(technique, "images"):
tech_sprite = self.get_technique_animation(technique)
tech_sprite.rect.center = target_sprite.rect.center
self.task(tech_sprite.image.play, hit_delay)
self.task(partial(self.sprites.add, tech_sprite, layer=50), hit_delay)
self.task(tech_sprite.kill, 3)
|
def perform_action(self, user, technique, target=None):
"""Do something with the thing: animated
:param user:
:param technique: Not a dict: a Technique or Item
:param target:
:returns:
"""
technique.advance_round()
result = technique.use(user, target)
try:
tools.load_sound(technique.sfx).play()
except AttributeError:
pass
# action is performed, so now use sprites to animate it
# this value will be None if the target is off screen
target_sprite = self._monster_sprite_map.get(target, None)
# slightly delay the monster shake, so technique animation
# is synchronized with the damage shake motion
hit_delay = 0
if user:
# message = "%s used %s!" % (user.name, technique.name)
message = trans("combat_used_x", {"user": user.name, "name": technique.name})
# TODO: a real check or some params to test if should tackle, etc
if technique in user.moves:
hit_delay += 0.5
user_sprite = self._monster_sprite_map[user]
self.animate_sprite_tackle(user_sprite)
if target_sprite:
self.task(
partial(self.animate_sprite_take_damage, target_sprite),
hit_delay + 0.2,
)
self.task(partial(self.blink, target_sprite), hit_delay + 0.6)
# Track damage
self._damage_map[target].add(user)
else: # assume this was an item used
if result:
message += "\n" + trans("item_success")
else:
message += "\n" + trans("item_failure")
self.alert(message)
self.suppress_phase_change()
else:
if result:
self.suppress_phase_change()
self.alert(
trans(
"combat_status_damage",
{"name": target.name, "status": technique.name},
)
)
if result and target_sprite and hasattr(technique, "images"):
tech_sprite = self.get_technique_animation(technique)
tech_sprite.rect.center = target_sprite.rect.center
self.task(tech_sprite.image.play, hit_delay)
self.task(partial(self.sprites.add, tech_sprite, layer=50), hit_delay)
self.task(tech_sprite.kill, 3)
|
https://github.com/Tuxemon/Tuxemon/issues/160
|
Traceback (most recent call last):
File "./tuxemon.py", line 60, in <module>
main()
File "/tmp/Tuxemon/tuxemon/core/main.py", line 109, in main
control.main()
File "/tmp/Tuxemon/tuxemon/core/control.py", line 512, in main
self.main_loop()
File "/tmp/Tuxemon/tuxemon/core/control.py", line 573, in main_loop
self.update(time_delta)
File "/tmp/Tuxemon/tuxemon/core/control.py", line 203, in update
state.update(dt)
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 156, in update
self.update_phase()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 328, in update_phase
self.handle_action_queue()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 338, in handle_action_queue
self.check_party_hp()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 614, in check_party_hp
self.faint_monster(monster)
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 581, in faint_monster
awarded_exp = monster.total_experience/monster.level/len(self._damage_map[monster])
KeyError: <core.components.monster.Monster object at 0x7fb24236ddd0>
|
KeyError
|
def open_technique_menu(self):
"""Open menus to choose a Technique to use
:return: None
"""
def choose_technique():
# open menu to choose technique
menu = self.game.push_state("Menu")
menu.shrink_to_items = True
# add techniques to the menu
for tech in self.monster.moves:
image = self.shadow_text(tech.name_trans)
item = MenuItem(image, None, None, tech)
menu.add(item)
# position the new menu
menu.anchor("bottom", self.rect.top)
menu.anchor("right", self.game.screen.get_rect().right)
# set next menu after after selection is made
menu.on_menu_selection = choose_target
def choose_target(menu_item):
# open menu to choose target of technique
technique = menu_item.game_object
state = self.game.push_state("CombatTargetMenuState")
state.on_menu_selection = partial(enqueue_technique, technique)
def enqueue_technique(technique, menu_item):
# enqueue the technique
target = menu_item.game_object
combat_state = self.game.get_state_name("CombatState")
combat_state.enqueue_action(self.monster, technique, target)
# close all the open menus
self.game.pop_state() # close target chooser
self.game.pop_state() # close technique menu
self.game.pop_state() # close the monster action menu
choose_technique()
|
def open_technique_menu(self):
"""Open menus to choose a Technique to use
:return: None
"""
def choose_technique():
# open menu to choose technique
menu = self.game.push_state("Menu")
menu.shrink_to_items = True
# add techniques to the menu
for tech in self.monster.moves:
image = self.shadow_text(tech.name)
item = MenuItem(image, tech.name, None, tech)
menu.add(item)
# position the new menu
menu.anchor("bottom", self.rect.top)
menu.anchor("right", self.game.screen.get_rect().right)
# set next menu after after selection is made
menu.on_menu_selection = choose_target
def choose_target(menu_item):
# open menu to choose target of technique
technique = menu_item.game_object
state = self.game.push_state("CombatTargetMenuState")
state.on_menu_selection = partial(enqueue_technique, technique)
def enqueue_technique(technique, menu_item):
# enqueue the technique
target = menu_item.game_object
combat_state = self.game.get_state_name("CombatState")
combat_state.enqueue_action(self.monster, technique, target)
# close all the open menus
self.game.pop_state() # close target chooser
self.game.pop_state() # close technique menu
self.game.pop_state() # close the monster action menu
choose_technique()
|
https://github.com/Tuxemon/Tuxemon/issues/160
|
Traceback (most recent call last):
File "./tuxemon.py", line 60, in <module>
main()
File "/tmp/Tuxemon/tuxemon/core/main.py", line 109, in main
control.main()
File "/tmp/Tuxemon/tuxemon/core/control.py", line 512, in main
self.main_loop()
File "/tmp/Tuxemon/tuxemon/core/control.py", line 573, in main_loop
self.update(time_delta)
File "/tmp/Tuxemon/tuxemon/core/control.py", line 203, in update
state.update(dt)
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 156, in update
self.update_phase()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 328, in update_phase
self.handle_action_queue()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 338, in handle_action_queue
self.check_party_hp()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 614, in check_party_hp
self.faint_monster(monster)
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 581, in faint_monster
awarded_exp = monster.total_experience/monster.level/len(self._damage_map[monster])
KeyError: <core.components.monster.Monster object at 0x7fb24236ddd0>
|
KeyError
|
def choose_technique():
# open menu to choose technique
menu = self.game.push_state("Menu")
menu.shrink_to_items = True
# add techniques to the menu
for tech in self.monster.moves:
image = self.shadow_text(tech.name_trans)
item = MenuItem(image, None, None, tech)
menu.add(item)
# position the new menu
menu.anchor("bottom", self.rect.top)
menu.anchor("right", self.game.screen.get_rect().right)
# set next menu after after selection is made
menu.on_menu_selection = choose_target
|
def choose_technique():
# open menu to choose technique
menu = self.game.push_state("Menu")
menu.shrink_to_items = True
# add techniques to the menu
for tech in self.monster.moves:
image = self.shadow_text(tech.name)
item = MenuItem(image, tech.name, None, tech)
menu.add(item)
# position the new menu
menu.anchor("bottom", self.rect.top)
menu.anchor("right", self.game.screen.get_rect().right)
# set next menu after after selection is made
menu.on_menu_selection = choose_target
|
https://github.com/Tuxemon/Tuxemon/issues/160
|
Traceback (most recent call last):
File "./tuxemon.py", line 60, in <module>
main()
File "/tmp/Tuxemon/tuxemon/core/main.py", line 109, in main
control.main()
File "/tmp/Tuxemon/tuxemon/core/control.py", line 512, in main
self.main_loop()
File "/tmp/Tuxemon/tuxemon/core/control.py", line 573, in main_loop
self.update(time_delta)
File "/tmp/Tuxemon/tuxemon/core/control.py", line 203, in update
state.update(dt)
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 156, in update
self.update_phase()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 328, in update_phase
self.handle_action_queue()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 338, in handle_action_queue
self.check_party_hp()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 614, in check_party_hp
self.faint_monster(monster)
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 581, in faint_monster
awarded_exp = monster.total_experience/monster.level/len(self._damage_map[monster])
KeyError: <core.components.monster.Monster object at 0x7fb24236ddd0>
|
KeyError
|
def startup(self, **kwargs):
self.state = "normal"
# this sprite is used to display the item
# its also animated to pop out of the backpack
self.item_center = self.rect.width * 0.164, self.rect.height * 0.13
self.item_sprite = Sprite()
self.item_sprite.image = None
self.sprites.add(self.item_sprite)
# do not move this line
super(ItemMenuState, self).startup(**kwargs)
self.menu_items.line_spacing = tools.scale(5)
# this is the area where the item description is displayed
rect = self.game.screen.get_rect()
rect.top = tools.scale(106)
rect.left = tools.scale(3)
rect.width = tools.scale(250)
rect.height = tools.scale(32)
self.text_area = TextArea(self.font, self.font_color, (96, 96, 128))
self.text_area.rect = rect
self.sprites.add(self.text_area, layer=100)
# load the backpack icon
self.backpack_center = self.rect.width * 0.16, self.rect.height * 0.45
self.load_sprite("gfx/ui/item/backpack.png", center=self.backpack_center, layer=100)
|
def startup(self, **kwargs):
self.state = "normal"
# this sprite is used to display the item
# its also animated to pop out of the backpack
self.item_center = self.rect.width * 0.164, self.rect.height * 0.13
self.item_sprite = Sprite()
self.item_sprite.image = None
self.sprites.add(self.item_sprite)
# do not move this line
super(ItemMenuState, self).startup(**kwargs)
self.menu_items.line_spacing = tools.scale(5)
# this is the area where the item description is displayed
rect = self.game.screen.get_rect()
center = rect.center
rect.width *= 0.95
rect.height *= 0.25
rect.center = center
rect.top = tools.scale(190)
self.text_area = TextArea(self.font, self.font_color, (96, 96, 128))
self.text_area.rect = rect
self.sprites.add(self.text_area, layer=100)
# load the backpack icon
self.backpack_center = self.rect.width * 0.16, self.rect.height * 0.45
self.load_sprite("gfx/ui/item/backpack.png", center=self.backpack_center, layer=100)
|
https://github.com/Tuxemon/Tuxemon/issues/160
|
Traceback (most recent call last):
File "./tuxemon.py", line 60, in <module>
main()
File "/tmp/Tuxemon/tuxemon/core/main.py", line 109, in main
control.main()
File "/tmp/Tuxemon/tuxemon/core/control.py", line 512, in main
self.main_loop()
File "/tmp/Tuxemon/tuxemon/core/control.py", line 573, in main_loop
self.update(time_delta)
File "/tmp/Tuxemon/tuxemon/core/control.py", line 203, in update
state.update(dt)
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 156, in update
self.update_phase()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 328, in update_phase
self.handle_action_queue()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 338, in handle_action_queue
self.check_party_hp()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 614, in check_party_hp
self.faint_monster(monster)
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 581, in faint_monster
awarded_exp = monster.total_experience/monster.level/len(self._damage_map[monster])
KeyError: <core.components.monster.Monster object at 0x7fb24236ddd0>
|
KeyError
|
def on_menu_selection(self, menu_item):
"""Called when player has selected something from the inventory
Currently, opens a new menu depending on the state context
:param menu_item:
:return:
"""
item = menu_item.game_object
state = self.determine_state_called_from()
if state in item.usable_in:
self.open_confirm_use_menu(item)
else:
msg = trans("item_cannot_use_here", {"name": item.name_trans})
tools.open_dialog(self.game, [msg])
|
def on_menu_selection(self, menu_item):
"""Called when player has selected something from the inventory
Currently, opens a new menu depending on the state context
:param menu_item:
:return:
"""
item = menu_item.game_object
state = self.determine_state_called_from()
if state in item.usable_in:
self.open_confirm_use_menu(item)
else:
msg = trans("item_cannot_use_here", {"name": item.name})
tools.open_dialog(self.game, [msg])
|
https://github.com/Tuxemon/Tuxemon/issues/160
|
Traceback (most recent call last):
File "./tuxemon.py", line 60, in <module>
main()
File "/tmp/Tuxemon/tuxemon/core/main.py", line 109, in main
control.main()
File "/tmp/Tuxemon/tuxemon/core/control.py", line 512, in main
self.main_loop()
File "/tmp/Tuxemon/tuxemon/core/control.py", line 573, in main_loop
self.update(time_delta)
File "/tmp/Tuxemon/tuxemon/core/control.py", line 203, in update
state.update(dt)
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 156, in update
self.update_phase()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 328, in update_phase
self.handle_action_queue()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 338, in handle_action_queue
self.check_party_hp()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 614, in check_party_hp
self.faint_monster(monster)
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 581, in faint_monster
awarded_exp = monster.total_experience/monster.level/len(self._damage_map[monster])
KeyError: <core.components.monster.Monster object at 0x7fb24236ddd0>
|
KeyError
|
def initialize_items(self):
"""Get all player inventory items and add them to menu
:return:
"""
for name, properties in self.game.player1.inventory.items():
obj = properties["item"]
image = self.shadow_text(obj.name_trans, bg=(128, 128, 128))
yield MenuItem(image, obj.name_trans, obj.description_trans, obj)
|
def initialize_items(self):
"""Get all player inventory items and add them to menu
:return:
"""
for name, properties in self.game.player1.inventory.items():
obj = properties["item"]
image = self.shadow_text(obj.name, bg=(128, 128, 128))
yield MenuItem(image, obj.name, obj.description, obj)
|
https://github.com/Tuxemon/Tuxemon/issues/160
|
Traceback (most recent call last):
File "./tuxemon.py", line 60, in <module>
main()
File "/tmp/Tuxemon/tuxemon/core/main.py", line 109, in main
control.main()
File "/tmp/Tuxemon/tuxemon/core/control.py", line 512, in main
self.main_loop()
File "/tmp/Tuxemon/tuxemon/core/control.py", line 573, in main_loop
self.update(time_delta)
File "/tmp/Tuxemon/tuxemon/core/control.py", line 203, in update
state.update(dt)
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 156, in update
self.update_phase()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 328, in update_phase
self.handle_action_queue()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 338, in handle_action_queue
self.check_party_hp()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 614, in check_party_hp
self.faint_monster(monster)
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 581, in faint_monster
awarded_exp = monster.total_experience/monster.level/len(self._damage_map[monster])
KeyError: <core.components.monster.Monster object at 0x7fb24236ddd0>
|
KeyError
|
def animate_open(self):
"""Animate the menu sliding in
:return:
"""
self.state = "opening" # required
# position the menu off screen. it will be slid into view with an animation
right, height = prepare.SCREEN_SIZE
# TODO: more robust API for sizing (kivy esque?)
# this is highly irregular:
# shrink to get the final width
# record the width
# turn off shrink, then adjust size
self.shrink_to_items = True # force shrink of menu
self.menu_items.expand = False # force shrink of items
self.refresh_layout() # rearrange items
width = self.rect.width # store the ideal width
self.shrink_to_items = False # force shrink of menu
self.menu_items.expand = True # force shrink of items
self.refresh_layout() # rearrange items
self.rect = pygame.Rect(right, 0, width, height) # set new rect
# animate the menu sliding in
ani = self.animate(self.rect, x=right - width, duration=0.50)
ani.callback = lambda: setattr(self, "state", "normal")
return ani
|
def animate_open(self):
"""Animate the menu sliding in
:return:
"""
self.state = "opening" # required
# position the menu off screen. it will be slid into view with an animation
right, height = prepare.SCREEN_SIZE
# TODO: more robust API for sizing (kivy esque?)
# TODO: after menu "add" merge, this can be simplified
# this is highly irregular:
# shrink to get the final width
# record the width
# turn off shrink, then adjust size
self.shrink_to_items = True # force shrink of menu
self.menu_items.expand = False # force shrink of items
self.initialize_items() # re-add items, trigger layout
width = self.rect.width # store the ideal width
self.shrink_to_items = False # force shrink of menu
self.menu_items.expand = True # force shrink of items
self.initialize_items() # re-add items, trigger layout
self.rect = pygame.Rect(right, 0, width, height) # set new rect
# animate the menu sliding in
ani = self.animate(self.rect, x=right - width, duration=0.50)
ani.callback = lambda: setattr(self, "state", "normal")
return ani
|
https://github.com/Tuxemon/Tuxemon/issues/160
|
Traceback (most recent call last):
File "./tuxemon.py", line 60, in <module>
main()
File "/tmp/Tuxemon/tuxemon/core/main.py", line 109, in main
control.main()
File "/tmp/Tuxemon/tuxemon/core/control.py", line 512, in main
self.main_loop()
File "/tmp/Tuxemon/tuxemon/core/control.py", line 573, in main_loop
self.update(time_delta)
File "/tmp/Tuxemon/tuxemon/core/control.py", line 203, in update
state.update(dt)
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 156, in update
self.update_phase()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 328, in update_phase
self.handle_action_queue()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 338, in handle_action_queue
self.check_party_hp()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 614, in check_party_hp
self.faint_monster(monster)
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 581, in faint_monster
awarded_exp = monster.total_experience/monster.level/len(self._damage_map[monster])
KeyError: <core.components.monster.Monster object at 0x7fb24236ddd0>
|
KeyError
|
def startup(self, **kwargs):
self.state = "normal"
# this sprite is used to display the item
# its also animated to pop out of the backpack
self.item_center = self.rect.width * 0.164, self.rect.height * 0.13
self.item_sprite = Sprite()
self.item_sprite.image = None
self.sprites.add(self.item_sprite)
# do not move this line
super(ItemMenuState, self).startup(**kwargs)
self.menu_items.line_spacing = tools.scale(5)
# this is the area where the item description is displayed
rect = self.game.screen.get_rect()
rect.top = tools.scale(106)
rect.left = tools.scale(3)
rect.width = tools.scale(250)
rect.height = tools.scale(32)
self.text_area = TextArea(self.font, self.font_color, (96, 96, 128))
print(rect)
self.text_area.rect = rect
self.sprites.add(self.text_area, layer=100)
# load the backpack icon
self.backpack_center = self.rect.width * 0.16, self.rect.height * 0.45
self.load_sprite("gfx/ui/item/backpack.png", center=self.backpack_center, layer=100)
|
def startup(self, **kwargs):
self.state = "normal"
# this sprite is used to display the item
# its also animated to pop out of the backpack
self.item_center = self.rect.width * 0.164, self.rect.height * 0.13
self.item_sprite = Sprite()
self.item_sprite.image = None
self.sprites.add(self.item_sprite)
# do not move this line
super(ItemMenuState, self).startup(**kwargs)
self.menu_items.line_spacing = tools.scale(5)
# this is the area where the item description is displayed
rect = self.game.screen.get_rect()
rect.top = tools.scale(106)
rect.left = tools.scale(3)
rect.width = tools.scale(250)
rect.height = tools.scale(32)
self.text_area = TextArea(self.font, self.font_color, (96, 96, 128))
self.text_area.rect = rect
self.sprites.add(self.text_area, layer=100)
# load the backpack icon
self.backpack_center = self.rect.width * 0.16, self.rect.height * 0.45
self.load_sprite("gfx/ui/item/backpack.png", center=self.backpack_center, layer=100)
|
https://github.com/Tuxemon/Tuxemon/issues/160
|
Traceback (most recent call last):
File "./tuxemon.py", line 60, in <module>
main()
File "/tmp/Tuxemon/tuxemon/core/main.py", line 109, in main
control.main()
File "/tmp/Tuxemon/tuxemon/core/control.py", line 512, in main
self.main_loop()
File "/tmp/Tuxemon/tuxemon/core/control.py", line 573, in main_loop
self.update(time_delta)
File "/tmp/Tuxemon/tuxemon/core/control.py", line 203, in update
state.update(dt)
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 156, in update
self.update_phase()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 328, in update_phase
self.handle_action_queue()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 338, in handle_action_queue
self.check_party_hp()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 614, in check_party_hp
self.faint_monster(monster)
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 581, in faint_monster
awarded_exp = monster.total_experience/monster.level/len(self._damage_map[monster])
KeyError: <core.components.monster.Monster object at 0x7fb24236ddd0>
|
KeyError
|
def animate_open(self):
"""Animate the menu sliding in
:return:
"""
self.state = "opening" # required
# position the menu off screen. it will be slid into view with an animation
right, height = prepare.SCREEN_SIZE
# TODO: more robust API for sizing (kivy esque?)
# this is highly irregular:
# shrink to get the final width
# record the width
# turn off shrink, then adjust size
self.shrink_to_items = True # force shrink of menu
self.menu_items.expand = False # force shrink of items
self.refresh_layout() # rearrange items
width = self.rect.width # store the ideal width
self.shrink_to_items = False # force shrink of menu
self.menu_items.expand = True # force shrink of items
self.refresh_layout() # rearrange items
self.rect = pygame.Rect(right, 0, width, height) # set new rect
# animate the menu sliding in
ani = self.animate(self.rect, x=right - width, duration=0.50)
ani.callback = lambda: setattr(self, "state", "normal")
return ani
|
def animate_open(self):
"""Animate the menu sliding in
:return:
"""
self.state = "opening" # required
# position the menu off screen. it will be slid into view with an animation
right, height = prepare.SCREEN_SIZE
# TODO: more robust API for sizing (kivy esque?)
# TODO: after menu "add" merge, this can be simplified
# this is highly irregular:
# shrink to get the final width
# record the width
# turn off shrink, then adjust size
self.shrink_to_items = True # force shrink of menu
self.menu_items.expand = False # force shrink of items
self.refresh_layout() # rearrange items
width = self.rect.width # store the ideal width
self.shrink_to_items = False # force shrink of menu
self.menu_items.expand = True # force shrink of items
self.refresh_layout() # rearrange items
self.rect = pygame.Rect(right, 0, width, height) # set new rect
# animate the menu sliding in
ani = self.animate(self.rect, x=right - width, duration=0.50)
ani.callback = lambda: setattr(self, "state", "normal")
return ani
|
https://github.com/Tuxemon/Tuxemon/issues/160
|
Traceback (most recent call last):
File "./tuxemon.py", line 60, in <module>
main()
File "/tmp/Tuxemon/tuxemon/core/main.py", line 109, in main
control.main()
File "/tmp/Tuxemon/tuxemon/core/control.py", line 512, in main
self.main_loop()
File "/tmp/Tuxemon/tuxemon/core/control.py", line 573, in main_loop
self.update(time_delta)
File "/tmp/Tuxemon/tuxemon/core/control.py", line 203, in update
state.update(dt)
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 156, in update
self.update_phase()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 328, in update_phase
self.handle_action_queue()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 338, in handle_action_queue
self.check_party_hp()
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 614, in check_party_hp
self.faint_monster(monster)
File "/tmp/Tuxemon/tuxemon/core/states/combat/combat.py", line 581, in faint_monster
awarded_exp = monster.total_experience/monster.level/len(self._damage_map[monster])
KeyError: <core.components.monster.Monster object at 0x7fb24236ddd0>
|
KeyError
|
def main():
args = _options.options.parse_args()
if args.help:
_options.options.print_help()
return
if args.version:
print(metadata.version("nox"), file=sys.stderr)
return
setup_logging(color=args.color, verbose=args.verbose)
# Execute the appropriate tasks.
exit_code = workflow.execute(
global_config=args,
workflow=(
tasks.load_nox_module,
tasks.merge_noxfile_options,
tasks.discover_manifest,
tasks.filter_manifest,
tasks.honor_list_request,
tasks.verify_manifest_nonempty,
tasks.run_manifest,
tasks.print_summary,
tasks.create_report,
tasks.final_reduce,
),
)
# Done; exit.
sys.exit(exit_code)
|
def main():
args = _options.options.parse_args()
if args.help:
_options.options.print_help()
return
if args.version:
dist = pkg_resources.get_distribution("nox")
print(dist.version, file=sys.stderr)
return
setup_logging(color=args.color, verbose=args.verbose)
# Execute the appropriate tasks.
exit_code = workflow.execute(
global_config=args,
workflow=(
tasks.load_nox_module,
tasks.merge_noxfile_options,
tasks.discover_manifest,
tasks.filter_manifest,
tasks.honor_list_request,
tasks.verify_manifest_nonempty,
tasks.run_manifest,
tasks.print_summary,
tasks.create_report,
tasks.final_reduce,
),
)
# Done; exit.
sys.exit(exit_code)
|
https://github.com/theacodes/nox/issues/271
|
nox --help
Traceback (most recent call last):
File "...\python38\lib\runpy.py", line 193, in _run_module_as_main
return _run_code(code, main_globals, None,
File "...\python38\lib\runpy.py", line 86, in _run_code
exec(code, run_globals)
File "...\AppData\Roaming\Python\Python38\Scripts\nox.exe\__main__.py", line 5, in <module>
File "...\AppData\Roaming\Python\Python38\site-packages\nox\__main__.py", line 24, in <module>
import pkg_resources
ModuleNotFoundError: No module named 'pkg_resources'
pip install setuptools
Collecting setuptools
...
Successfully installed setuptools-44.0.0
nox --help
usage: nox [-h] [--version] [-l] [-s [SESSIONS [SESSIONS ...]]] [-k KEYWORDS] [-v] [-r] [--no-reuse-existing-virtualenvs] [-f NOXFILE] [--envdir ENVDIR]
[-x] [--no-stop-on-first-error] [--error-on-missing-interpreters] [--no-error-on-missing-interpreters] [--error-on-external-run]
[--no-error-on-external-run] [--install-only] [--report REPORT] [--non-interactive] [--nocolor] [--forcecolor]
...
Nox is a Python automation toolkit.
|
ModuleNotFoundError
|
def __init__(self, shape, default=None):
"""
The basic shape class to be set in InputType.
Attribute:
shape: list of (int), symbolic values, RangeDim object
The valid shape of the input
default: tuple of int or None
The default shape that is used for initiating the model, and set in
the metadata of the model file.
If None, then `shape` would be used.
"""
from coremltools.converters.mil.mil import get_new_symbol
if not isinstance(shape, (list, tuple)):
msg = "Shape should be list or tuple, got type {} instead"
raise ValueError(msg.format(type(shape)))
self.symbolic_shape = []
shape = list(shape)
for idx, s in enumerate(shape):
if s is None or s == -1:
msg = (
"Dimension cannot be None or -1. Use "
+ "ct.RangeDim for runtime determined dimension. "
+ "Dim {}: {} "
+ "See https://coremltools.readme.io/docs/flexible-inputs"
)
raise ValueError(msg.format(idx, s))
if isinstance(s, RangeDim):
sym = s.symbol
self.symbolic_shape.append(sym)
elif isinstance(s, (np.generic, six.integer_types)) or is_symbolic(s):
self.symbolic_shape.append(s)
else:
raise ValueError("Unknown type {} to build symbolic shape.".format(type(s)))
self.shape = tuple(shape)
if default is not None:
if not isinstance(default, (list, tuple)):
raise ValueError(
"Default shape should be list or tuple, got type {} instead".format(
type(default)
)
)
for idx, s in enumerate(default):
if not isinstance(s, (np.generic, six.integer_types)) and not is_symbolic(
s
):
raise ValueError(
"Default shape invalid, got error at index {} which is {}".format(
idx, s
)
)
else:
default = []
for idx, s in enumerate(self.shape):
if isinstance(s, RangeDim):
default.append(s.default)
elif s is None or s == -1:
default.append(self.symbolic_shape[idx])
else:
default.append(s)
self.default = tuple(default)
|
def __init__(self, shape, default=None):
"""
The basic shape class to be set in InputType.
Attribute:
shape: list of (int), symbolic values, RangeDim object
The valid shape of the input
default: tuple of int or None
The default shape that is used for initiating the model, and set in
the metadata of the model file.
If None, then `shape` would be used.
"""
from coremltools.converters.mil.mil import get_new_symbol
if not isinstance(shape, (list, tuple)):
msg = "Shape should be list or tuple, got type {} instead"
raise ValueError(msg.format(type(shape)))
self.symbolic_shape = []
shape = list(shape)
for idx, s in enumerate(shape):
if s is None or s == -1:
msg = (
"Dimension cannot be None of -1. Use "
+ "ct.RangeDim for runtime determined dimension. "
+ "Dim {}: {} "
+ "See https://coremltools.readme.io/docs/flexible-inputs"
)
raise ValueError(msg.format(idx, s))
if isinstance(s, RangeDim):
sym = s.symbol
self.symbolic_shape.append(sym)
elif isinstance(s, (np.generic, six.integer_types)) or is_symbolic(s):
self.symbolic_shape.append(s)
else:
raise ValueError("Unknown type {} to build symbolic shape.".format(type(s)))
self.shape = tuple(shape)
if default is not None:
if not isinstance(default, (list, tuple)):
raise ValueError(
"Default shape should be list or tuple, got type {} instead".format(
type(default)
)
)
for idx, s in enumerate(default):
if not isinstance(s, (np.generic, six.integer_types)) and not is_symbolic(
s
):
raise ValueError(
"Default shape invalid, got error at index {} which is {}".format(
idx, s
)
)
else:
default = []
for idx, s in enumerate(self.shape):
if isinstance(s, RangeDim):
default.append(s.default)
elif s is None or s == -1:
default.append(self.symbolic_shape[idx])
else:
default.append(s)
self.default = tuple(default)
|
https://github.com/apple/coremltools/issues/877
|
import coremltools as ct
shape = ct.Shape(shape=(-1,224,224,3), default=(1,224,224,3))
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/lib/python3.6/dist-packages/coremltools/converters/mil/input_types.py", line 300, in __init__
raise ValueError(msg.format(idx, s))
ValueError: Dimension cannot be None of -1. Use ct.RangeDim for runtime determined dimension. Dim 0: -1 See https://coremltools.readme.io/docs/flexible-inputs
|
ValueError
|
def _remove_internal_identity_nodes(nnssa):
"""
remove identity nodes that are not connected to the model outputs
"""
delete_count = 0
for fn_key in list(nnssa.functions.keys()):
f = nnssa.functions[fn_key]
for name in list(f.graph.keys()):
if name not in f.graph:
continue
node = f.graph[name]
# Check if the node is in graph outputs
if len(node.inputs) != 1:
continue
if len(node.outputs) == 0 and len(node.control_outputs) == 0:
continue
# Remove identity node
inp_node = f.graph[node.inputs[0]]
if node.op == "Identity" and inp_node.op != "get_tuple":
delete_count += 1
parent_name = f.graph[name].inputs[0]
disconnect_edge(f.graph, parent_name, name)
for control_input in f.graph[name].control_inputs:
replace_control_dest(f.graph, control_input, name, parent_name)
replace_node(f.graph, name, parent_name) # join parent to children
delete_node(f.graph, name)
return delete_count
|
def _remove_internal_identity_nodes(nnssa):
"""
remove identity nodes that are not connected to the model outputs
"""
delete_count = 0
for fn_key in list(nnssa.functions.keys()):
f = nnssa.functions[fn_key]
keys = list(f.graph.keys())
for k in keys:
if k not in f.graph:
continue
node = f.graph[k]
if len(node.inputs) != 1 or len(node.outputs) != 1:
continue
inp_node = f.graph[node.inputs[0]]
if node.op == "Identity" and inp_node.op != "get_tuple":
delete_count += 1
parent_name = f.graph[k].inputs[0]
disconnect_edge(f.graph, parent_name, k)
for control_input in f.graph[k].control_inputs:
replace_control_dest(f.graph, control_input, k, parent_name)
replace_node(f.graph, k, parent_name) # join parent to children
delete_node(f.graph, k)
return delete_count
|
https://github.com/apple/coremltools/issues/623
|
Converting pb model to .mlmodel......
0 assert nodes deleted
872 nodes deleted
36 nodes deleted
0 nodes deleted
[Op Fusion] fuse_bias_add() deleted 24 nodes.
77 identity nodes deleted
Traceback (most recent call last):
File "tf2coreml.py", line 198, in <module>
convertModel(model=_TRAIN_MODEL, ml_model=_ML_MODEL, pb_model=_PB_MODEL)
File "tf2coreml.py", line 174, in convertModel
minimum_ios_deployment_target='12'
File "/home/ubuntu/anaconda3/envs/tensorflow_p36/lib/python3.6/site-packages/coremltools/converters/tensorflow/_tf_converter.py", line 193, in convert
optional_inputs=optional_inputs)
File "/home/ubuntu/anaconda3/envs/tensorflow_p36/lib/python3.6/site-packages/coremltools/converters/nnssa/coreml/ssa_converter.py", line 130, in ssa_convert
p(ssa)
File "/home/ubuntu/anaconda3/envs/tensorflow_p36/lib/python3.6/site-packages/coremltools/converters/nnssa/coreml/graph_pass/op_removals.py", line 20, in
remove_no_ops_and_shift_control_dependencies
f.graph[each_control_input].control_outputs.remove(node.name)
ValueError: list.remove(x): x not in list
|
ValueError
|
def check_connections(gd):
"""
Given a graph, checks that all
- inputs/outputs are symmetric
- control_inputs/control_outputs are symmetric
- The graph does not reference vertices outside of the graph
Takes a graph in "dict{str, ParsedNode}" form. Does not return,
asserts false on failure.
"""
# check that inputs and outputs line up
for k, v in gd.items():
for i in v.inputs:
assert k in gd[i].outputs
for i in v.outputs:
assert k in gd[i].inputs
for i in v.control_inputs:
assert k in gd[i].control_outputs
for i in v.control_outputs:
message = f"Node {k} not in {i} control_inputs"
assert k in gd[i].control_inputs, message
|
def check_connections(gd):
"""
Given a graph, checks that all
- inputs/outputs are symmetric
- control_inputs/control_outputs are symmetric
- The graph does not reference vertices outside of the graph
Takes a graph in "dict{str, ParsedNode}" form. Does not return,
asserts false on failure.
"""
# check that inputs and outputs line up
for k, v in gd.items():
for i in v.inputs:
assert k in gd[i].outputs
for i in v.outputs:
assert k in gd[i].inputs
for i in v.control_inputs:
assert k in gd[i].control_outputs
for i in v.control_outputs:
assert k in gd[i].control_inputs
|
https://github.com/apple/coremltools/issues/554
|
0 assert nodes deleted
5 nodes deleted
0 nodes deleted
0 nodes deleted
[Op Fusion] fuse_bias_add() deleted 4 nodes.
1 identity nodes deleted
2 disconnected nodes deleted
---------------------------------------------------------------------------
AssertionError Traceback (most recent call last)
~/.pyenv/versions/3.6.8/envs/fritz-models-private/src/coremltools/coremltools/converters/tensorflow/_tf_converter.py in convert(filename, inputs, outputs, image_input_names, is_bgr, red_bias, green_bias, blue_bias, gray_bias, image_scale, class_labels, predicted_feature_name, predicted_probabilities_output, add_custom_layers, custom_conversion_functions, custom_shape_functions, **kwargs)
94 custom_shape_functions=custom_shape_functions,
---> 95 optional_inputs=optional_inputs)
96 except ImportError as e:
~/.pyenv/versions/3.6.8/envs/fritz-models-private/src/coremltools/coremltools/converters/nnssa/coreml/ssa_converter.py in ssa_convert(ssa, top_func, inputs, outputs, image_input_names, is_bgr, red_bias, green_bias, blue_bias, gray_bias, image_scale, class_labels, predicted_feature_name, predicted_probabilities_output, add_custom_layers, custom_conversion_functions, custom_shape_functions, optional_inputs)
132 for f in list(ssa.functions.values()):
--> 133 check_connections(f.graph)
134
~/.pyenv/versions/3.6.8/envs/fritz-models-private/src/coremltools/coremltools/converters/nnssa/commons/basic_graph_ops.py in check_connections(gd)
152 try:
--> 153 assert (k in gd[i].control_inputs)
154 except Exception:
|
AssertionError
|
def transform_nhwc_to_nchw(nnssa):
"""
Mark each one of the node with "NHWC", so that the conversion process
could avoid inserting unnecessary transpositions.
A node's format is "NHWC" if and only if:
(1) it is a conv or pooling or image_resize layer with "NHWC" data format
(2) it is a rank-preserving operation whose inputs are all "NHWC"
"""
for fn_key in list(nnssa.functions.keys()):
graph = nnssa.functions[fn_key].graph
# this pass needs the ssa to be in the topologically sorted order
node_names = topsort(graph)
# Mark all NHWC nodes
nhwc_nodes = []
for name in node_names:
node = graph[name]
if len(node.outputs) > 0 and len(node.inputs) > 0 and _is_NHWC(graph, node):
node.attr["data_format"] = "NHWC_format_inserted"
nhwc_nodes.append(name)
for name in nhwc_nodes:
node = graph[name]
# Adjust type inference
if builtins.is_tensor(node.datatype):
s = node.datatype.get_shape()
if len(s) == 4:
new_shape = tuple([s[0], s[3], s[1], s[2]])
node.datatype = builtins.tensor(
node.datatype.get_primitive(), new_shape
)
node.attr["symbolic_datatype"] = node.datatype
if "_output_shapes" in node.attr:
orig_out_shapes = node.attr["_output_shapes"]
if len(orig_out_shapes) == 1 and len(orig_out_shapes[0]) == 4:
s = orig_out_shapes[0]
node.attr["_output_shapes"] = [[s[0], s[3], s[1], s[2]]]
if node.op in ELEMENTWISE_OPS:
for inp in node.inputs:
parent_node = graph[inp]
if parent_node.value is None:
continue
# if there is a constant vector input
val = np.array(parent_node.value.val)
if len(val.shape) == 1 and builtins.is_tensor(parent_node.datatype):
new_shape = (1, val.shape[0], 1, 1)
parent_node.datatype = builtins.tensor(
parent_node.datatype.get_primitive(), new_shape
)
parent_node.value.val = np.reshape(
parent_node.value.val, new_shape
)
# Insert NHWC -> NCHW transpose
for i, inp_node_name in enumerate(node.inputs):
inp_node_format = graph[inp_node_name].attr.get("data_format")
symbolic_value = graph[inp_node_name].attr["symbolic_value"]
if (
graph[inp_node_name].op == "Const"
or len(graph[inp_node_name].datatype.get_shape()) != 4
or (symbolic_value and not any_symbolic_or_unknown(symbolic_value))
):
# Const weights and parameters
continue
if inp_node_format != "NHWC_format_inserted":
assert len(graph[inp_node_name].datatype.get_shape()) == 4
_insert_transpose_to_nchw(graph, graph[inp_node_name], node)
# Insert NCHW -> NHWC transpose
for i, out_node_name in enumerate(node.outputs):
out_node_format = graph[out_node_name].attr.get("data_format")
if out_node_format != "NHWC_format_inserted":
_insert_transpose_from_nchw(graph, node, graph[out_node_name])
# Adjust output shape and concat layer's axis parameter
if (
node.op == "Concat"
and len(node.inputs) > 1
and graph[node.inputs[0]].value is not None
):
axis = graph[node.inputs[0]].value.val
axis = 4 + axis if axis < 0 else axis
if axis == 3:
node.attr["axis"] = 1
elif axis == 2 or axis == 1:
node.attr["axis"] = axis + 1
else:
node.attr["axis"] = axis
if (
node.op == "ConcatV2"
and len(node.inputs) > 1
and graph[node.inputs[-1]].value is not None
):
axis = graph[node.inputs[-1]].value.val
axis = 4 + axis if axis < 0 else axis
if axis == 3:
node.attr["axis"] = 1
elif axis == 2 or axis == 1:
node.attr["axis"] = axis + 1
else:
node.attr["axis"] = axis
|
def transform_nhwc_to_nchw(nnssa):
"""
Mark each one of the node with "NHWC", so that the conversion process
could avoid inserting unnecessary transpositions.
A node's format is "NHWC" if and only if:
(1) it is a conv or pooling or image_resize layer with "NHWC" data format
(2) it is a rank-preserving operation whose inputs are all "NHWC"
"""
for fn_key in list(nnssa.functions.keys()):
graph = nnssa.functions[fn_key].graph
# this pass needs the ssa to be in the topologically sorted order
node_names = topsort(graph)
# Mark all NHWC nodes
nhwc_nodes = []
for name in node_names:
node = graph[name]
if len(node.outputs) > 0 and len(node.inputs) > 0 and _is_NHWC(graph, node):
node.attr["data_format"] = "NHWC_format_inserted"
nhwc_nodes.append(name)
for name in nhwc_nodes:
node = graph[name]
# Adjust type inference
if builtins.is_tensor(node.datatype):
s = node.datatype.get_shape()
if len(s) == 4:
new_shape = tuple([s[0], s[3], s[1], s[2]])
node.datatype = builtins.tensor(
node.datatype.get_primitive(), new_shape
)
node.attr["symbolic_datatype"] = node.datatype
if "_output_shapes" in node.attr:
orig_out_shapes = node.attr["_output_shapes"]
if len(orig_out_shapes) == 1 and len(orig_out_shapes[0]) == 4:
s = orig_out_shapes[0]
node.attr["_output_shapes"] = [[s[0], s[3], s[1], s[2]]]
if node.op in ELEMENTWISE_OPS:
for inp in node.inputs:
parent_node = graph[inp]
if parent_node.value is not None:
# if there is a constant vector input
val = np.array(parent_node.value.val)
if len(val.shape) == 1 and builtins.is_tensor(
parent_node.datatype
):
new_shape = (1, val.shape[0], 1, 1)
parent_node.datatype = builtins.tensor(
parent_node.datatype.get_primitive(), new_shape
)
parent_node.value.val = np.reshape(
parent_node.value.val, new_shape
)
# Insert NHWC -> NCHW transpose
for i, inp_node_name in enumerate(node.inputs):
inp_node_format = graph[inp_node_name].attr.get("data_format")
symbolic_value = graph[inp_node_name].attr["symbolic_value"]
if (
graph[inp_node_name].op == "Const"
or len(graph[inp_node_name].datatype.get_shape()) != 4
or (symbolic_value and not any_symbolic_or_unknown(symbolic_value))
):
# Const weights and parameters
continue
if inp_node_format != "NHWC_format_inserted":
assert len(graph[inp_node_name].datatype.get_shape()) == 4
_insert_transpose_to_nchw(graph, graph[inp_node_name], node)
# Insert NCHW -> NHWC transpose
for i, out_node_name in enumerate(node.outputs):
out_node_format = graph[out_node_name].attr.get("data_format")
if out_node_format != "NHWC_format_inserted":
_insert_transpose_from_nchw(graph, node, graph[out_node_name])
# Adjust output shape and concat layer's axis parameter
if (
node.op == "Concat"
and len(node.inputs) > 1
and graph[node.inputs[0]].value is not None
):
axis = graph[node.inputs[0]].value.val
axis = 4 + axis if axis < 0 else axis
if axis == 3:
node.attr["axis"] = 1
elif axis == 2 or axis == 1:
node.attr["axis"] = axis + 1
else:
node.attr["axis"] = axis
if (
node.op == "ConcatV2"
and len(node.inputs) > 1
and graph[node.inputs[-1]].value is not None
):
axis = graph[node.inputs[-1]].value.val
axis = 4 + axis if axis < 0 else axis
if axis == 3:
node.attr["axis"] = 1
elif axis == 2 or axis == 1:
node.attr["axis"] = axis + 1
else:
node.attr["axis"] = axis
|
https://github.com/apple/coremltools/issues/554
|
0 assert nodes deleted
5 nodes deleted
0 nodes deleted
0 nodes deleted
[Op Fusion] fuse_bias_add() deleted 4 nodes.
1 identity nodes deleted
2 disconnected nodes deleted
---------------------------------------------------------------------------
AssertionError Traceback (most recent call last)
~/.pyenv/versions/3.6.8/envs/fritz-models-private/src/coremltools/coremltools/converters/tensorflow/_tf_converter.py in convert(filename, inputs, outputs, image_input_names, is_bgr, red_bias, green_bias, blue_bias, gray_bias, image_scale, class_labels, predicted_feature_name, predicted_probabilities_output, add_custom_layers, custom_conversion_functions, custom_shape_functions, **kwargs)
94 custom_shape_functions=custom_shape_functions,
---> 95 optional_inputs=optional_inputs)
96 except ImportError as e:
~/.pyenv/versions/3.6.8/envs/fritz-models-private/src/coremltools/coremltools/converters/nnssa/coreml/ssa_converter.py in ssa_convert(ssa, top_func, inputs, outputs, image_input_names, is_bgr, red_bias, green_bias, blue_bias, gray_bias, image_scale, class_labels, predicted_feature_name, predicted_probabilities_output, add_custom_layers, custom_conversion_functions, custom_shape_functions, optional_inputs)
132 for f in list(ssa.functions.values()):
--> 133 check_connections(f.graph)
134
~/.pyenv/versions/3.6.8/envs/fritz-models-private/src/coremltools/coremltools/converters/nnssa/commons/basic_graph_ops.py in check_connections(gd)
152 try:
--> 153 assert (k in gd[i].control_inputs)
154 except Exception:
|
AssertionError
|
def fuse_batch_norm(ssa):
"""
A graph pass that match and fuses following op patterns into a single BatchNorm op.
Pattern 1:
[Const] [Const]
| |
V V
[...] --> [Mul] --> [Add] --> [...] to [...] --> [BatchNorm] --> [...]
Pattern 2:
[Const] [Const] [Const]
| | |
V V V
[...] --> [Sub] --> [Mul] --> [Add] --> [...] to [...] --> [BatchNorm] --> [...]
Pattern 3:
[Const] [Const] [Const] [Const]
| | | |
V V V V
[...] --> [Sub] --> [RealDiv] --> [Mul] --> [BiasAdd] --> [...] to [...] --> [BatchNorm] --> [...]
"""
def _match_batch_norm_pattern(graph, entry_node, pattern_ops):
if not _check_number_outputs(entry_node, 1):
return None
nodes_to_merge = list()
node = graph[entry_node.outputs[0]]
for i, op in enumerate(pattern_ops):
if node.op != op:
return None
if node.op != pattern_ops[i] and not _check_number_outputs(node, 1):
return None
if not _check_number_inputs(node, 2):
return None
const_node = graph[node.inputs[1]]
if not _check_single_out_vector_constant_node(const_node):
return None
if not _check_rank_matches(const_node, node):
return None
nodes_to_merge.extend([const_node, node])
if len(node.outputs) == 0: # do not fuse the output layer
return None
node = graph[node.outputs[0]]
if len(nodes_to_merge) != len(pattern_ops) * 2:
return None
return nodes_to_merge
def _merge_batch_norm(graph, nodes, pattern_id=1):
expected_num_nodes = 4
if pattern_id == 2:
expected_num_nodes = 6
elif pattern_id == 3:
expected_num_nodes = 8
assert len(nodes) == expected_num_nodes
current_node = graph[nodes[1].inputs[0]]
out_node = nodes[-1]
bn_outputs = out_node.outputs[:]
fused_bn_node = ParsedNode()
fused_bn_node.op = "BatchNorm"
fused_bn_node.name = out_node.name + "_batch_norm"
fused_bn_node.attr = {
"gamma": np.squeeze(nodes[0].value.val),
"beta": np.squeeze(nodes[2].value.val),
}
if pattern_id == 2:
fused_bn_node.attr = {
"mean": np.squeeze(nodes[0].value.val),
"gamma": np.squeeze(nodes[2].value.val),
"beta": np.squeeze(nodes[4].value.val),
}
elif pattern_id == 3:
fused_bn_node.attr = {
"mean": np.squeeze(nodes[0].value.val),
"gamma": np.squeeze(nodes[4].value.val)
/ np.squeeze(nodes[2].value.val),
"beta": np.squeeze(nodes[6].value.val),
}
fused_bn_node.datatype = current_node.datatype
graph[fused_bn_node.name] = fused_bn_node
# combine control i/o
control_inputs = []
control_outputs = []
bn_node_names = [x.name for x in nodes]
print(f"\n\nProcessing Fused Batch Norm: {fused_bn_node.name}")
for name in bn_node_names:
control_inputs += graph[name].control_inputs
control_outputs += graph[name].control_outputs
# Modify control outputs with name of fused batch norm node.
for control_output_name in graph[name].control_outputs:
ctrl_node = graph[control_output_name]
for i, inpt_name in enumerate(ctrl_node.control_inputs):
if inpt_name == name:
ctrl_node.control_inputs[i] = fused_bn_node.name
fused_bn_node.control_inputs = control_inputs
fused_bn_node.control_outputs = control_outputs
# connect fused node to entry and output nodes
connect_edge(graph, current_node.name, fused_bn_node.name)
connect_dests(graph, fused_bn_node.name, bn_outputs)
# correct output's inputs order
for out in bn_outputs:
if len(graph[out].inputs) < 2:
continue
out_inputs = graph[out].inputs
a = out_inputs.index(out_node.name)
b = out_inputs.index(fused_bn_node.name)
out_inputs[a], out_inputs[b] = out_inputs[b], out_inputs[a]
# delete merged nodes
for name in bn_node_names:
delete_node(graph, name)
def _fuse_batch_norm(graph):
keys = list(graph.keys())
count = 0
for k in keys:
if k not in graph:
continue
current_node = graph[k]
# return nodes order: [Const, Sub, Const, RealDiv, Const, Mul, Const, BiasAdd]
nodes3 = _match_batch_norm_pattern(
graph, current_node, ["Sub", "RealDiv", "Mul", "BiasAdd"]
)
# return nodes order: : [Const, Sub, Const, Mul, Const, Add]
nodes2 = _match_batch_norm_pattern(
graph, current_node, ["Sub", "Mul", "Add"]
)
# return nodes order: : [Const, Mul, Const, Add]
nodes1 = _match_batch_norm_pattern(graph, current_node, ["Mul", "Add"])
if nodes3:
_merge_batch_norm(graph, nodes=nodes3, pattern_id=3)
count += len(nodes3)
if nodes2:
_merge_batch_norm(graph, nodes=nodes2, pattern_id=2)
count += len(nodes2)
if nodes1:
_merge_batch_norm(graph, nodes=nodes1, pattern_id=1)
count += len(nodes1)
if count > 0:
print("[Op Fusion] Fused {} nodes into BatchNorms.".format(count))
for fn_key in list(ssa.functions.keys()):
f = ssa.functions[fn_key]
_fuse_batch_norm(f.graph)
|
def fuse_batch_norm(ssa):
"""
A graph pass that match and fuses following op patterns into a single BatchNorm op.
Pattern 1:
[Const] [Const]
| |
V V
[...] --> [Mul] --> [Add] --> [...] to [...] --> [BatchNorm] --> [...]
Pattern 2:
[Const] [Const] [Const]
| | |
V V V
[...] --> [Sub] --> [Mul] --> [Add] --> [...] to [...] --> [BatchNorm] --> [...]
Pattern 3:
[Const] [Const] [Const] [Const]
| | | |
V V V V
[...] --> [Sub] --> [RealDiv] --> [Mul] --> [BiasAdd] --> [...] to [...] --> [BatchNorm] --> [...]
"""
def _match_batch_norm_pattern(graph, entry_node, pattern_ops):
if not _check_number_outputs(entry_node, 1):
return None
nodes_to_merge = list()
node = graph[entry_node.outputs[0]]
for i, op in enumerate(pattern_ops):
if node.op != op:
return None
if node.op != pattern_ops[i] and not _check_number_outputs(node, 1):
return None
if not _check_number_inputs(node, 2):
return None
const_node = graph[node.inputs[1]]
if not _check_single_out_vector_constant_node(const_node):
return None
if not _check_rank_matches(const_node, node):
return None
nodes_to_merge.extend([const_node, node])
if len(node.outputs) == 0: # do not fuse the output layer
return None
node = graph[node.outputs[0]]
if len(nodes_to_merge) != len(pattern_ops) * 2:
return None
return nodes_to_merge
def _merge_batch_norm(graph, nodes, pattern_id=1):
expected_num_nodes = 4
if pattern_id == 2:
expected_num_nodes = 6
elif pattern_id == 3:
expected_num_nodes = 8
assert len(nodes) == expected_num_nodes
current_node = graph[nodes[1].inputs[0]]
out_node = nodes[-1]
bn_outputs = out_node.outputs[:]
fused_bn_node = ParsedNode()
fused_bn_node.op = "BatchNorm"
fused_bn_node.name = out_node.name + "_batch_norm"
fused_bn_node.attr = {
"gamma": np.squeeze(nodes[0].value.val),
"beta": np.squeeze(nodes[2].value.val),
}
if pattern_id == 2:
fused_bn_node.attr = {
"mean": np.squeeze(nodes[0].value.val),
"gamma": np.squeeze(nodes[2].value.val),
"beta": np.squeeze(nodes[4].value.val),
}
elif pattern_id == 3:
fused_bn_node.attr = {
"mean": np.squeeze(nodes[0].value.val),
"gamma": np.squeeze(nodes[4].value.val)
/ np.squeeze(nodes[2].value.val),
"beta": np.squeeze(nodes[6].value.val),
}
fused_bn_node.datatype = current_node.datatype
graph[fused_bn_node.name] = fused_bn_node
# combine control i/o
control_inputs = list()
control_outputs = list()
bn_node_names = [x.name for x in nodes]
for name in bn_node_names:
control_inputs += graph[name].control_inputs
control_outputs += graph[name].control_outputs
fused_bn_node.control_inputs.extend(control_inputs)
fused_bn_node.control_outputs.extend(control_outputs)
# connect fused node to entry and output nodes
connect_edge(graph, current_node.name, fused_bn_node.name)
connect_dests(graph, fused_bn_node.name, bn_outputs)
# correct output's inputs order
for out in bn_outputs:
if len(graph[out].inputs) < 2:
continue
out_inputs = graph[out].inputs
a = out_inputs.index(out_node.name)
b = out_inputs.index(fused_bn_node.name)
out_inputs[a], out_inputs[b] = out_inputs[b], out_inputs[a]
# delete merged nodes
for name in bn_node_names:
delete_node(graph, name)
def _fuse_batch_norm(graph):
keys = list(graph.keys())
count = 0
for k in keys:
if k not in graph:
continue
current_node = graph[k]
# return nodes order: [Const, Sub, Const, RealDiv, Const, Mul, Const, BiasAdd]
nodes3 = _match_batch_norm_pattern(
graph, current_node, ["Sub", "RealDiv", "Mul", "BiasAdd"]
)
# return nodes order: : [Const, Sub, Const, Mul, Const, Add]
nodes2 = _match_batch_norm_pattern(
graph, current_node, ["Sub", "Mul", "Add"]
)
# return nodes order: : [Const, Mul, Const, Add]
nodes1 = _match_batch_norm_pattern(graph, current_node, ["Mul", "Add"])
if nodes3:
_merge_batch_norm(graph, nodes=nodes3, pattern_id=3)
count += len(nodes3)
if nodes2:
_merge_batch_norm(graph, nodes=nodes2, pattern_id=2)
count += len(nodes2)
if nodes1:
_merge_batch_norm(graph, nodes=nodes1, pattern_id=1)
count += len(nodes1)
if count > 0:
print("[Op Fusion] Fused {} nodes into BatchNorms.".format(count))
for fn_key in list(ssa.functions.keys()):
f = ssa.functions[fn_key]
_fuse_batch_norm(f.graph)
|
https://github.com/apple/coremltools/issues/554
|
0 assert nodes deleted
5 nodes deleted
0 nodes deleted
0 nodes deleted
[Op Fusion] fuse_bias_add() deleted 4 nodes.
1 identity nodes deleted
2 disconnected nodes deleted
---------------------------------------------------------------------------
AssertionError Traceback (most recent call last)
~/.pyenv/versions/3.6.8/envs/fritz-models-private/src/coremltools/coremltools/converters/tensorflow/_tf_converter.py in convert(filename, inputs, outputs, image_input_names, is_bgr, red_bias, green_bias, blue_bias, gray_bias, image_scale, class_labels, predicted_feature_name, predicted_probabilities_output, add_custom_layers, custom_conversion_functions, custom_shape_functions, **kwargs)
94 custom_shape_functions=custom_shape_functions,
---> 95 optional_inputs=optional_inputs)
96 except ImportError as e:
~/.pyenv/versions/3.6.8/envs/fritz-models-private/src/coremltools/coremltools/converters/nnssa/coreml/ssa_converter.py in ssa_convert(ssa, top_func, inputs, outputs, image_input_names, is_bgr, red_bias, green_bias, blue_bias, gray_bias, image_scale, class_labels, predicted_feature_name, predicted_probabilities_output, add_custom_layers, custom_conversion_functions, custom_shape_functions, optional_inputs)
132 for f in list(ssa.functions.values()):
--> 133 check_connections(f.graph)
134
~/.pyenv/versions/3.6.8/envs/fritz-models-private/src/coremltools/coremltools/converters/nnssa/commons/basic_graph_ops.py in check_connections(gd)
152 try:
--> 153 assert (k in gd[i].control_inputs)
154 except Exception:
|
AssertionError
|
def _merge_batch_norm(graph, nodes, pattern_id=1):
expected_num_nodes = 4
if pattern_id == 2:
expected_num_nodes = 6
elif pattern_id == 3:
expected_num_nodes = 8
assert len(nodes) == expected_num_nodes
current_node = graph[nodes[1].inputs[0]]
out_node = nodes[-1]
bn_outputs = out_node.outputs[:]
fused_bn_node = ParsedNode()
fused_bn_node.op = "BatchNorm"
fused_bn_node.name = out_node.name + "_batch_norm"
fused_bn_node.attr = {
"gamma": np.squeeze(nodes[0].value.val),
"beta": np.squeeze(nodes[2].value.val),
}
if pattern_id == 2:
fused_bn_node.attr = {
"mean": np.squeeze(nodes[0].value.val),
"gamma": np.squeeze(nodes[2].value.val),
"beta": np.squeeze(nodes[4].value.val),
}
elif pattern_id == 3:
fused_bn_node.attr = {
"mean": np.squeeze(nodes[0].value.val),
"gamma": np.squeeze(nodes[4].value.val) / np.squeeze(nodes[2].value.val),
"beta": np.squeeze(nodes[6].value.val),
}
fused_bn_node.datatype = current_node.datatype
graph[fused_bn_node.name] = fused_bn_node
# combine control i/o
control_inputs = []
control_outputs = []
bn_node_names = [x.name for x in nodes]
print(f"\n\nProcessing Fused Batch Norm: {fused_bn_node.name}")
for name in bn_node_names:
control_inputs += graph[name].control_inputs
control_outputs += graph[name].control_outputs
# Modify control outputs with name of fused batch norm node.
for control_output_name in graph[name].control_outputs:
ctrl_node = graph[control_output_name]
for i, inpt_name in enumerate(ctrl_node.control_inputs):
if inpt_name == name:
ctrl_node.control_inputs[i] = fused_bn_node.name
fused_bn_node.control_inputs = control_inputs
fused_bn_node.control_outputs = control_outputs
# connect fused node to entry and output nodes
connect_edge(graph, current_node.name, fused_bn_node.name)
connect_dests(graph, fused_bn_node.name, bn_outputs)
# correct output's inputs order
for out in bn_outputs:
if len(graph[out].inputs) < 2:
continue
out_inputs = graph[out].inputs
a = out_inputs.index(out_node.name)
b = out_inputs.index(fused_bn_node.name)
out_inputs[a], out_inputs[b] = out_inputs[b], out_inputs[a]
# delete merged nodes
for name in bn_node_names:
delete_node(graph, name)
|
def _merge_batch_norm(graph, nodes, pattern_id=1):
expected_num_nodes = 4
if pattern_id == 2:
expected_num_nodes = 6
elif pattern_id == 3:
expected_num_nodes = 8
assert len(nodes) == expected_num_nodes
current_node = graph[nodes[1].inputs[0]]
out_node = nodes[-1]
bn_outputs = out_node.outputs[:]
fused_bn_node = ParsedNode()
fused_bn_node.op = "BatchNorm"
fused_bn_node.name = out_node.name + "_batch_norm"
fused_bn_node.attr = {
"gamma": np.squeeze(nodes[0].value.val),
"beta": np.squeeze(nodes[2].value.val),
}
if pattern_id == 2:
fused_bn_node.attr = {
"mean": np.squeeze(nodes[0].value.val),
"gamma": np.squeeze(nodes[2].value.val),
"beta": np.squeeze(nodes[4].value.val),
}
elif pattern_id == 3:
fused_bn_node.attr = {
"mean": np.squeeze(nodes[0].value.val),
"gamma": np.squeeze(nodes[4].value.val) / np.squeeze(nodes[2].value.val),
"beta": np.squeeze(nodes[6].value.val),
}
fused_bn_node.datatype = current_node.datatype
graph[fused_bn_node.name] = fused_bn_node
# combine control i/o
control_inputs = list()
control_outputs = list()
bn_node_names = [x.name for x in nodes]
for name in bn_node_names:
control_inputs += graph[name].control_inputs
control_outputs += graph[name].control_outputs
fused_bn_node.control_inputs.extend(control_inputs)
fused_bn_node.control_outputs.extend(control_outputs)
# connect fused node to entry and output nodes
connect_edge(graph, current_node.name, fused_bn_node.name)
connect_dests(graph, fused_bn_node.name, bn_outputs)
# correct output's inputs order
for out in bn_outputs:
if len(graph[out].inputs) < 2:
continue
out_inputs = graph[out].inputs
a = out_inputs.index(out_node.name)
b = out_inputs.index(fused_bn_node.name)
out_inputs[a], out_inputs[b] = out_inputs[b], out_inputs[a]
# delete merged nodes
for name in bn_node_names:
delete_node(graph, name)
|
https://github.com/apple/coremltools/issues/554
|
0 assert nodes deleted
5 nodes deleted
0 nodes deleted
0 nodes deleted
[Op Fusion] fuse_bias_add() deleted 4 nodes.
1 identity nodes deleted
2 disconnected nodes deleted
---------------------------------------------------------------------------
AssertionError Traceback (most recent call last)
~/.pyenv/versions/3.6.8/envs/fritz-models-private/src/coremltools/coremltools/converters/tensorflow/_tf_converter.py in convert(filename, inputs, outputs, image_input_names, is_bgr, red_bias, green_bias, blue_bias, gray_bias, image_scale, class_labels, predicted_feature_name, predicted_probabilities_output, add_custom_layers, custom_conversion_functions, custom_shape_functions, **kwargs)
94 custom_shape_functions=custom_shape_functions,
---> 95 optional_inputs=optional_inputs)
96 except ImportError as e:
~/.pyenv/versions/3.6.8/envs/fritz-models-private/src/coremltools/coremltools/converters/nnssa/coreml/ssa_converter.py in ssa_convert(ssa, top_func, inputs, outputs, image_input_names, is_bgr, red_bias, green_bias, blue_bias, gray_bias, image_scale, class_labels, predicted_feature_name, predicted_probabilities_output, add_custom_layers, custom_conversion_functions, custom_shape_functions, optional_inputs)
132 for f in list(ssa.functions.values()):
--> 133 check_connections(f.graph)
134
~/.pyenv/versions/3.6.8/envs/fritz-models-private/src/coremltools/coremltools/converters/nnssa/commons/basic_graph_ops.py in check_connections(gd)
152 try:
--> 153 assert (k in gd[i].control_inputs)
154 except Exception:
|
AssertionError
|
def _remove_output_identity_nodes(nnssa):
"""
remove identity nodes that ARE connected to the model outputs
"""
delete_count = 0
for fn_key in list(nnssa.functions.keys()):
f = nnssa.functions[fn_key]
keys = list(f.graph.keys())
for k in keys:
if k not in f.graph:
continue
node = f.graph[k]
if node.op != "Identity" or len(node.inputs) != 1:
continue
if len(node.outputs) != 0 or (k not in f.outputs) or k != node.name:
continue
# this means node k is the "output-identity" node that nnssa inserts
# we remove it here
parent_name = node.inputs[0]
parent_node = f.graph[parent_name]
# Continue if parent node has an other outputs than identity node.
if any([an_output != k for an_output in parent_node.outputs]):
continue
delete_count += 1
# Remove Identity node and copy existing parent node
parent_node = copy.deepcopy(f.graph[parent_name])
for control_input_name in node.control_inputs:
if control_input_name == parent_node.name:
continue
if control_input_name in parent_node.control_inputs:
continue
parent_node.control_inputs.append(control_input_name)
del f.graph[k]
del f.graph[parent_name]
parent_node.name = k
parent_node.outputs = []
f.graph[k] = parent_node
node = f.graph[k]
for p in node.inputs:
for idx, out in enumerate(f.graph[p].outputs):
if out == parent_name:
f.graph[p].outputs[idx] = k
for p in node.control_inputs:
for idx, out in enumerate(f.graph[p].control_outputs):
if out == parent_name:
f.graph[p].control_outputs[idx] = k
return delete_count
|
def _remove_output_identity_nodes(nnssa):
"""
remove identity nodes that ARE connected to the model outputs
"""
delete_count = 0
for fn_key in list(nnssa.functions.keys()):
f = nnssa.functions[fn_key]
keys = list(f.graph.keys())
for k in keys:
if k not in f.graph:
continue
if f.graph[k].op == "Identity" and len(f.graph[k].inputs) == 1:
if (
len(f.graph[k].outputs) == 0
and (k in f.outputs)
and k == f.graph[k].name
):
# this means node k is the "output-identity" node that nnssa inserts
# we remove it here
delete_count += 1
parent_name = f.graph[k].inputs[0]
f.graph[parent_name].control_outputs = f.graph[k].control_outputs
if any(
[an_output != k for an_output in f.graph[parent_name].outputs]
):
continue
del f.graph[k]
f.graph[k] = copy.deepcopy(f.graph[parent_name])
del f.graph[parent_name]
f.graph[k].name = k
f.graph[k].outputs = []
for p in f.graph[k].inputs:
for idx, out in enumerate(f.graph[p].outputs):
if out == parent_name:
f.graph[p].outputs[idx] = k
for p in f.graph[k].control_inputs:
for idx, out in enumerate(f.graph[p].control_outputs):
if out == parent_name:
f.graph[p].control_outputs[idx] = k
return delete_count
|
https://github.com/apple/coremltools/issues/554
|
0 assert nodes deleted
5 nodes deleted
0 nodes deleted
0 nodes deleted
[Op Fusion] fuse_bias_add() deleted 4 nodes.
1 identity nodes deleted
2 disconnected nodes deleted
---------------------------------------------------------------------------
AssertionError Traceback (most recent call last)
~/.pyenv/versions/3.6.8/envs/fritz-models-private/src/coremltools/coremltools/converters/tensorflow/_tf_converter.py in convert(filename, inputs, outputs, image_input_names, is_bgr, red_bias, green_bias, blue_bias, gray_bias, image_scale, class_labels, predicted_feature_name, predicted_probabilities_output, add_custom_layers, custom_conversion_functions, custom_shape_functions, **kwargs)
94 custom_shape_functions=custom_shape_functions,
---> 95 optional_inputs=optional_inputs)
96 except ImportError as e:
~/.pyenv/versions/3.6.8/envs/fritz-models-private/src/coremltools/coremltools/converters/nnssa/coreml/ssa_converter.py in ssa_convert(ssa, top_func, inputs, outputs, image_input_names, is_bgr, red_bias, green_bias, blue_bias, gray_bias, image_scale, class_labels, predicted_feature_name, predicted_probabilities_output, add_custom_layers, custom_conversion_functions, custom_shape_functions, optional_inputs)
132 for f in list(ssa.functions.values()):
--> 133 check_connections(f.graph)
134
~/.pyenv/versions/3.6.8/envs/fritz-models-private/src/coremltools/coremltools/converters/nnssa/commons/basic_graph_ops.py in check_connections(gd)
152 try:
--> 153 assert (k in gd[i].control_inputs)
154 except Exception:
|
AssertionError
|
def __init__(self, tfnode=None):
super(ParsedTFNode, self).__init__()
self.original_node = tfnode
if tfnode is not None:
from .parse import parse_attr
self.name = tfnode.name
if tfnode.op == "PlaceholderWithDefault":
self.op = "Placeholder"
else:
self.op = tfnode.op
self.inputs = [x for x in tfnode.input if not x.startswith("^")]
self.control_inputs = [x[1:] for x in tfnode.input if x.startswith("^")]
self.attr = {k: parse_attr(v) for k, v in tfnode.attr.items()}
|
def __init__(self, tfnode=None):
ParsedNode.__init__(self)
self.original_node = tfnode
if tfnode is not None:
from .parse import parse_attr
self.name = tfnode.name
if tfnode.op == "PlaceholderWithDefault":
self.op = "Placeholder"
else:
self.op = tfnode.op
self.inputs = [x for x in tfnode.input if not x.startswith("^")]
self.control_inputs = [x[1:] for x in tfnode.input if x.startswith("^")]
self.attr = {k: parse_attr(v) for k, v in tfnode.attr.items()}
|
https://github.com/apple/coremltools/issues/554
|
0 assert nodes deleted
5 nodes deleted
0 nodes deleted
0 nodes deleted
[Op Fusion] fuse_bias_add() deleted 4 nodes.
1 identity nodes deleted
2 disconnected nodes deleted
---------------------------------------------------------------------------
AssertionError Traceback (most recent call last)
~/.pyenv/versions/3.6.8/envs/fritz-models-private/src/coremltools/coremltools/converters/tensorflow/_tf_converter.py in convert(filename, inputs, outputs, image_input_names, is_bgr, red_bias, green_bias, blue_bias, gray_bias, image_scale, class_labels, predicted_feature_name, predicted_probabilities_output, add_custom_layers, custom_conversion_functions, custom_shape_functions, **kwargs)
94 custom_shape_functions=custom_shape_functions,
---> 95 optional_inputs=optional_inputs)
96 except ImportError as e:
~/.pyenv/versions/3.6.8/envs/fritz-models-private/src/coremltools/coremltools/converters/nnssa/coreml/ssa_converter.py in ssa_convert(ssa, top_func, inputs, outputs, image_input_names, is_bgr, red_bias, green_bias, blue_bias, gray_bias, image_scale, class_labels, predicted_feature_name, predicted_probabilities_output, add_custom_layers, custom_conversion_functions, custom_shape_functions, optional_inputs)
132 for f in list(ssa.functions.values()):
--> 133 check_connections(f.graph)
134
~/.pyenv/versions/3.6.8/envs/fritz-models-private/src/coremltools/coremltools/converters/nnssa/commons/basic_graph_ops.py in check_connections(gd)
152 try:
--> 153 assert (k in gd[i].control_inputs)
154 except Exception:
|
AssertionError
|
def _configure(self):
try:
self.store = storage.get_driver(self.conf)
self.index = indexer.get_driver(self.conf)
self.index.connect()
except Exception as e:
raise utils.Retry(e)
|
def _configure(self):
self.store = storage.get_driver(self.conf)
self.index = indexer.get_driver(self.conf)
self.index.connect()
|
https://github.com/gnocchixyz/gnocchi/issues/681
|
2018-01-25 00:19:57.033 621506 ERROR cotyledon [-] Unhandled exception
2018-01-25 00:19:57.033 621506 ERROR cotyledon Traceback (most recent call last):
2018-01-25 00:19:57.033 621506 ERROR cotyledon File "/usr/lib/python2.7/site-packages/cotyledon/__init__.py", line 52, in _exit_on_exception
2018-01-25 00:19:57.033 621506 ERROR cotyledon yield
2018-01-25 00:19:57.033 621506 ERROR cotyledon File "/usr/lib/python2.7/site-packages/cotyledon/__init__.py", line 130, in _run
2018-01-25 00:19:57.033 621506 ERROR cotyledon self.run()
2018-01-25 00:19:57.033 621506 ERROR cotyledon File "/usr/lib/python2.7/site-packages/gnocchi/cli.py", line 93, in run
2018-01-25 00:19:57.033 621506 ERROR cotyledon self._configure()
2018-01-25 00:19:57.033 621506 ERROR cotyledon File "/usr/lib/python2.7/site-packages/retrying.py", line 68, in wrapped_f
2018-01-25 00:19:57.033 621506 ERROR cotyledon return Retrying(*dargs, **dkw).call(f, *args, **kw)
2018-01-25 00:19:57.033 621506 ERROR cotyledon File "/usr/lib/python2.7/site-packages/retrying.py", line 223, in call
2018-01-25 00:19:57.033 621506 ERROR cotyledon return attempt.get(self._wrap_exception)
2018-01-25 00:19:57.033 621506 ERROR cotyledon File "/usr/lib/python2.7/site-packages/retrying.py", line 261, in get
2018-01-25 00:19:57.033 621506 ERROR cotyledon six.reraise(self.value[0], self.value[1], self.value[2])
2018-01-25 00:19:57.033 621506 ERROR cotyledon File "/usr/lib/python2.7/site-packages/retrying.py", line 217, in call
2018-01-25 00:19:57.033 621506 ERROR cotyledon attempt = Attempt(fn(*args, **kwargs), attempt_number, False)
2018-01-25 00:19:57.033 621506 ERROR cotyledon File "/usr/lib/python2.7/site-packages/gnocchi/cli.py", line 88, in _configure
2018-01-25 00:19:57.033 621506 ERROR cotyledon self.store = storage.get_driver(self.conf)
2018-01-25 00:19:57.033 621506 ERROR cotyledon File "/usr/lib/python2.7/site-packages/gnocchi/storage/__init__.py", line 164, in get_driver
2018-01-25 00:19:57.033 621506 ERROR cotyledon return get_driver_class(conf)(conf.storage)
2018-01-25 00:19:57.033 621506 ERROR cotyledon File "/usr/lib/python2.7/site-packages/gnocchi/storage/swift.py", line 90, in __init__
2018-01-25 00:19:57.033 621506 ERROR cotyledon self.swift.put_container(self.MEASURE_PREFIX)
2018-01-25 00:19:57.033 621506 ERROR cotyledon File "/usr/lib/python2.7/site-packages/swiftclient/client.py", line 1728, in put_container
2018-01-25 00:19:57.033 621506 ERROR cotyledon query_string=query_string)
2018-01-25 00:19:57.033 621506 ERROR cotyledon File "/usr/lib/python2.7/site-packages/swiftclient/client.py", line 1647, in _retry
2018-01-25 00:19:57.033 621506 ERROR cotyledon service_token=self.service_token, **kwargs)
2018-01-25 00:19:57.033 621506 ERROR cotyledon File "/usr/lib/python2.7/site-packages/swiftclient/client.py", line 996, in put_container
2018-01-25 00:19:57.033 621506 ERROR cotyledon conn.request(method, path, '', headers)
2018-01-25 00:19:57.033 621506 ERROR cotyledon File "/usr/lib/python2.7/site-packages/swiftclient/client.py", line 437, in request
2018-01-25 00:19:57.033 621506 ERROR cotyledon files=files, **self.requests_args)
2018-01-25 00:19:57.033 621506 ERROR cotyledon File "/usr/lib/python2.7/site-packages/swiftclient/client.py", line 420, in _request
2018-01-25 00:19:57.033 621506 ERROR cotyledon return self.request_session.request(*arg, **kwarg)
2018-01-25 00:19:57.033 621506 ERROR cotyledon File "/usr/lib/python2.7/site-packages/requests/sessions.py", line 468, in request
2018-01-25 00:19:57.033 621506 ERROR cotyledon resp = self.send(prep, **send_kwargs)
2018-01-25 00:19:57.033 621506 ERROR cotyledon File "/usr/lib/python2.7/site-packages/requests/sessions.py", line 576, in send
2018-01-25 00:19:57.033 621506 ERROR cotyledon r = adapter.send(request, **kwargs)
2018-01-25 00:19:57.033 621506 ERROR cotyledon File "/usr/lib/python2.7/site-packages/requests/adapters.py", line 433, in send
2018-01-25 00:19:57.033 621506 ERROR cotyledon raise SSLError(e, request=request)
2018-01-25 00:19:57.033 621506 ERROR cotyledon SSLError: ("bad handshake: Error([('SSL routines', 'ssl3_get_server_certificate', 'certificate verify failed')],)",)
2018-01-25 00:19:57.033 621506 ERROR cotyledon
|
SSLError
|
def group_serie(self, granularity, start=0):
# NOTE(jd) Our whole serialization system is based on Epoch, and we
# store unsigned integer, so we can't store anything before Epoch.
# Sorry!
if not self.ts.empty and self.ts.index[0].value < 0:
raise BeforeEpochError(self.ts.index[0])
return GroupedTimeSeries(self.ts[start:], granularity)
|
def group_serie(self, granularity, start=0):
# NOTE(jd) Our whole serialization system is based on Epoch, and we
# store unsigned integer, so we can't store anything before Epoch.
# Sorry!
if self.ts.index[0].value < 0:
raise BeforeEpochError(self.ts.index[0])
return GroupedTimeSeries(self.ts[start:], granularity)
|
https://github.com/gnocchixyz/gnocchi/issues/69
|
2017-06-02 16:59:03.590093 mod_wsgi (pid=16031): Exception occurred processing WSGI script '/var/www/gnocchi/app.wsgi'.
2017-06-02 16:59:03.590234 Traceback (most recent call last):
2017-06-02 16:59:03.590304 File "/usr/local/lib/python2.7/dist-packages/webob/dec.py", line 131, in __call__
2017-06-02 16:59:03.590391 resp = self.call_func(req, *args, **self.kwargs)
2017-06-02 16:59:03.590458 File "/usr/local/lib/python2.7/dist-packages/webob/dec.py", line 196, in call_func
2017-06-02 16:59:03.590525 return self.func(req, *args, **kwargs)
2017-06-02 16:59:03.590576 File "/usr/local/lib/python2.7/dist-packages/oslo_middleware/base.py", line 126, in __call__
2017-06-02 16:59:03.590636 response = req.get_response(self.application)
2017-06-02 16:59:03.590689 File "/usr/local/lib/python2.7/dist-packages/webob/request.py", line 1316, in send
2017-06-02 16:59:03.590756 application, catch_exc_info=False)
2017-06-02 16:59:03.590806 File "/usr/local/lib/python2.7/dist-packages/webob/request.py", line 1280, in call_application
2017-06-02 16:59:03.590871 app_iter = application(self.environ, start_response)
2017-06-02 16:59:03.590926 File "/usr/local/lib/python2.7/dist-packages/paste/urlmap.py", line 216, in __call__
2017-06-02 16:59:03.590982 return app(environ, start_response)
2017-06-02 16:59:03.591033 File "/usr/local/lib/python2.7/dist-packages/webob/dec.py", line 131, in __call__
2017-06-02 16:59:03.591097 resp = self.call_func(req, *args, **self.kwargs)
2017-06-02 16:59:03.591149 File "/usr/local/lib/python2.7/dist-packages/webob/dec.py", line 196, in call_func
2017-06-02 16:59:03.591211 return self.func(req, *args, **kwargs)
2017-06-02 16:59:03.591260 File "/usr/local/lib/python2.7/dist-packages/oslo_middleware/base.py", line 126, in __call__
2017-06-02 16:59:03.591322 response = req.get_response(self.application)
2017-06-02 16:59:03.591373 File "/usr/local/lib/python2.7/dist-packages/webob/request.py", line 1316, in send
2017-06-02 16:59:03.591435 application, catch_exc_info=False)
2017-06-02 16:59:03.591484 File "/usr/local/lib/python2.7/dist-packages/webob/request.py", line 1280, in call_application
2017-06-02 16:59:03.591538 app_iter = application(self.environ, start_response)
2017-06-02 16:59:03.591590 File "/usr/local/lib/python2.7/dist-packages/webob/dec.py", line 131, in __call__
2017-06-02 16:59:03.591642 resp = self.call_func(req, *args, **self.kwargs)
2017-06-02 16:59:03.591692 File "/usr/local/lib/python2.7/dist-packages/webob/dec.py", line 196, in call_func
2017-06-02 16:59:03.591752 return self.func(req, *args, **kwargs)
2017-06-02 16:59:03.591802 File "/usr/local/lib/python2.7/dist-packages/keystonemiddleware/auth_token/__init__.py", line 335, in __call__
2017-06-02 16:59:03.591871 response = req.get_response(self._app)
2017-06-02 16:59:03.591921 File "/usr/local/lib/python2.7/dist-packages/webob/request.py", line 1316, in send
2017-06-02 16:59:03.591983 application, catch_exc_info=False)
2017-06-02 16:59:03.592032 File "/usr/local/lib/python2.7/dist-packages/webob/request.py", line 1280, in call_application
2017-06-02 16:59:03.592095 app_iter = application(self.environ, start_response)
2017-06-02 16:59:03.592139 File "/usr/local/lib/python2.7/dist-packages/webob/exc.py", line 1162, in __call__
2017-06-02 16:59:03.592202 return self.application(environ, start_response)
2017-06-02 16:59:03.592243 File "/opt/stack/gnocchi/gnocchi/rest/app.py", line 69, in __call__
2017-06-02 16:59:03.592302 return self.app(environ, start_response)
2017-06-02 16:59:03.592363 File "/usr/local/lib/python2.7/dist-packages/pecan/middleware/recursive.py", line 56, in __call__
2017-06-02 16:59:03.592434 return self.application(environ, start_response)
2017-06-02 16:59:03.592476 File "/usr/local/lib/python2.7/dist-packages/pecan/core.py", line 840, in __call__
2017-06-02 16:59:03.592529 return super(Pecan, self).__call__(environ, start_response)
2017-06-02 16:59:03.592760 File "/usr/local/lib/python2.7/dist-packages/pecan/core.py", line 683, in __call__
2017-06-02 16:59:03.592830 self.invoke_controller(controller, args, kwargs, state)
2017-06-02 16:59:03.592873 File "/usr/local/lib/python2.7/dist-packages/pecan/core.py", line 574, in invoke_controller
2017-06-02 16:59:03.592935 result = controller(*args, **kwargs)
2017-06-02 16:59:03.592984 File "/opt/stack/gnocchi/gnocchi/rest/__init__.py", line 1536, in post
2017-06-02 16:59:03.593042 granularity, needed_overlap, fill, refresh, resample)
2017-06-02 16:59:03.593096 File "/opt/stack/gnocchi/gnocchi/rest/__init__.py", line 1657, in get_cross_metric_measures_from_objs
2017-06-02 16:59:03.593164 reaggregation, resample, granularity, needed_overlap, fill)
2017-06-02 16:59:03.593209 File "/opt/stack/gnocchi/gnocchi/storage/_carbonara.py", line 506, in get_cross_metric_measures
2017-06-02 16:59:03.593275 tss[i] = ts.resample(resample)
2017-06-02 16:59:03.593324 File "/opt/stack/gnocchi/gnocchi/carbonara.py", line 522, in resample
2017-06-02 16:59:03.593385 self.group_serie(sampling), sampling, self.aggregation_method)
2017-06-02 16:59:03.593440 File "/opt/stack/gnocchi/gnocchi/carbonara.py", line 249, in group_serie
2017-06-02 16:59:03.593502 if self.ts.index[0].value < 0:
2017-06-02 16:59:03.593542 File "/usr/local/lib/python2.7/dist-packages/pandas/indexes/base.py", line 1423, in __getitem__
2017-06-02 16:59:03.593610 return getitem(key)
2017-06-02 16:59:03.593655 IndexError: index 0 is out of bounds for axis 0 with size 0```
|
IndexError
|
def group_serie(self, granularity, start=None):
# NOTE(jd) Our whole serialization system is based on Epoch, and we
# store unsigned integer, so we can't store anything before Epoch.
# Sorry!
if not self.ts.empty and self.ts.index[0].value < 0:
raise BeforeEpochError(self.ts.index[0])
return self.ts[start:].groupby(
functools.partial(round_timestamp, freq=granularity * 10e8)
)
|
def group_serie(self, granularity, start=None):
# NOTE(jd) Our whole serialization system is based on Epoch, and we
# store unsigned integer, so we can't store anything before Epoch.
# Sorry!
if self.ts.index[0].value < 0:
raise BeforeEpochError(self.ts.index[0])
return self.ts[start:].groupby(
functools.partial(round_timestamp, freq=granularity * 10e8)
)
|
https://github.com/gnocchixyz/gnocchi/issues/69
|
2017-06-02 16:59:03.590093 mod_wsgi (pid=16031): Exception occurred processing WSGI script '/var/www/gnocchi/app.wsgi'.
2017-06-02 16:59:03.590234 Traceback (most recent call last):
2017-06-02 16:59:03.590304 File "/usr/local/lib/python2.7/dist-packages/webob/dec.py", line 131, in __call__
2017-06-02 16:59:03.590391 resp = self.call_func(req, *args, **self.kwargs)
2017-06-02 16:59:03.590458 File "/usr/local/lib/python2.7/dist-packages/webob/dec.py", line 196, in call_func
2017-06-02 16:59:03.590525 return self.func(req, *args, **kwargs)
2017-06-02 16:59:03.590576 File "/usr/local/lib/python2.7/dist-packages/oslo_middleware/base.py", line 126, in __call__
2017-06-02 16:59:03.590636 response = req.get_response(self.application)
2017-06-02 16:59:03.590689 File "/usr/local/lib/python2.7/dist-packages/webob/request.py", line 1316, in send
2017-06-02 16:59:03.590756 application, catch_exc_info=False)
2017-06-02 16:59:03.590806 File "/usr/local/lib/python2.7/dist-packages/webob/request.py", line 1280, in call_application
2017-06-02 16:59:03.590871 app_iter = application(self.environ, start_response)
2017-06-02 16:59:03.590926 File "/usr/local/lib/python2.7/dist-packages/paste/urlmap.py", line 216, in __call__
2017-06-02 16:59:03.590982 return app(environ, start_response)
2017-06-02 16:59:03.591033 File "/usr/local/lib/python2.7/dist-packages/webob/dec.py", line 131, in __call__
2017-06-02 16:59:03.591097 resp = self.call_func(req, *args, **self.kwargs)
2017-06-02 16:59:03.591149 File "/usr/local/lib/python2.7/dist-packages/webob/dec.py", line 196, in call_func
2017-06-02 16:59:03.591211 return self.func(req, *args, **kwargs)
2017-06-02 16:59:03.591260 File "/usr/local/lib/python2.7/dist-packages/oslo_middleware/base.py", line 126, in __call__
2017-06-02 16:59:03.591322 response = req.get_response(self.application)
2017-06-02 16:59:03.591373 File "/usr/local/lib/python2.7/dist-packages/webob/request.py", line 1316, in send
2017-06-02 16:59:03.591435 application, catch_exc_info=False)
2017-06-02 16:59:03.591484 File "/usr/local/lib/python2.7/dist-packages/webob/request.py", line 1280, in call_application
2017-06-02 16:59:03.591538 app_iter = application(self.environ, start_response)
2017-06-02 16:59:03.591590 File "/usr/local/lib/python2.7/dist-packages/webob/dec.py", line 131, in __call__
2017-06-02 16:59:03.591642 resp = self.call_func(req, *args, **self.kwargs)
2017-06-02 16:59:03.591692 File "/usr/local/lib/python2.7/dist-packages/webob/dec.py", line 196, in call_func
2017-06-02 16:59:03.591752 return self.func(req, *args, **kwargs)
2017-06-02 16:59:03.591802 File "/usr/local/lib/python2.7/dist-packages/keystonemiddleware/auth_token/__init__.py", line 335, in __call__
2017-06-02 16:59:03.591871 response = req.get_response(self._app)
2017-06-02 16:59:03.591921 File "/usr/local/lib/python2.7/dist-packages/webob/request.py", line 1316, in send
2017-06-02 16:59:03.591983 application, catch_exc_info=False)
2017-06-02 16:59:03.592032 File "/usr/local/lib/python2.7/dist-packages/webob/request.py", line 1280, in call_application
2017-06-02 16:59:03.592095 app_iter = application(self.environ, start_response)
2017-06-02 16:59:03.592139 File "/usr/local/lib/python2.7/dist-packages/webob/exc.py", line 1162, in __call__
2017-06-02 16:59:03.592202 return self.application(environ, start_response)
2017-06-02 16:59:03.592243 File "/opt/stack/gnocchi/gnocchi/rest/app.py", line 69, in __call__
2017-06-02 16:59:03.592302 return self.app(environ, start_response)
2017-06-02 16:59:03.592363 File "/usr/local/lib/python2.7/dist-packages/pecan/middleware/recursive.py", line 56, in __call__
2017-06-02 16:59:03.592434 return self.application(environ, start_response)
2017-06-02 16:59:03.592476 File "/usr/local/lib/python2.7/dist-packages/pecan/core.py", line 840, in __call__
2017-06-02 16:59:03.592529 return super(Pecan, self).__call__(environ, start_response)
2017-06-02 16:59:03.592760 File "/usr/local/lib/python2.7/dist-packages/pecan/core.py", line 683, in __call__
2017-06-02 16:59:03.592830 self.invoke_controller(controller, args, kwargs, state)
2017-06-02 16:59:03.592873 File "/usr/local/lib/python2.7/dist-packages/pecan/core.py", line 574, in invoke_controller
2017-06-02 16:59:03.592935 result = controller(*args, **kwargs)
2017-06-02 16:59:03.592984 File "/opt/stack/gnocchi/gnocchi/rest/__init__.py", line 1536, in post
2017-06-02 16:59:03.593042 granularity, needed_overlap, fill, refresh, resample)
2017-06-02 16:59:03.593096 File "/opt/stack/gnocchi/gnocchi/rest/__init__.py", line 1657, in get_cross_metric_measures_from_objs
2017-06-02 16:59:03.593164 reaggregation, resample, granularity, needed_overlap, fill)
2017-06-02 16:59:03.593209 File "/opt/stack/gnocchi/gnocchi/storage/_carbonara.py", line 506, in get_cross_metric_measures
2017-06-02 16:59:03.593275 tss[i] = ts.resample(resample)
2017-06-02 16:59:03.593324 File "/opt/stack/gnocchi/gnocchi/carbonara.py", line 522, in resample
2017-06-02 16:59:03.593385 self.group_serie(sampling), sampling, self.aggregation_method)
2017-06-02 16:59:03.593440 File "/opt/stack/gnocchi/gnocchi/carbonara.py", line 249, in group_serie
2017-06-02 16:59:03.593502 if self.ts.index[0].value < 0:
2017-06-02 16:59:03.593542 File "/usr/local/lib/python2.7/dist-packages/pandas/indexes/base.py", line 1423, in __getitem__
2017-06-02 16:59:03.593610 return getitem(key)
2017-06-02 16:59:03.593655 IndexError: index 0 is out of bounds for axis 0 with size 0```
|
IndexError
|
def __init__(self, granularity=None, points=None, timespan=None):
if granularity is not None and points is not None and timespan is not None:
if timespan != granularity * points:
raise ValueError("timespan ≠ granularity × points")
if granularity is not None and granularity <= 0:
raise ValueError("Granularity should be > 0")
if points is not None and points <= 0:
raise ValueError("Number of points should be > 0")
if granularity is None:
if points is None or timespan is None:
raise ValueError(
"At least two of granularity/points/timespan must be provided"
)
granularity = round(timespan / float(points))
else:
granularity = float(granularity)
if points is None:
if timespan is None:
self["timespan"] = None
else:
points = int(timespan / granularity)
if points <= 0:
raise ValueError("Calculated number of points is < 0")
self["timespan"] = granularity * points
else:
points = int(points)
self["timespan"] = granularity * points
self["points"] = points
self["granularity"] = granularity
|
def __init__(self, granularity=None, points=None, timespan=None):
if granularity is not None and points is not None and timespan is not None:
if timespan != granularity * points:
raise ValueError("timespan ≠ granularity × points")
if granularity is not None and granularity <= 0:
raise ValueError("Granularity should be > 0")
if points is not None and points <= 0:
raise ValueError("Number of points should be > 0")
if granularity is None:
if points is None or timespan is None:
raise ValueError(
"At least two of granularity/points/timespan must be provided"
)
granularity = round(timespan / float(points))
else:
granularity = float(granularity)
if points is None:
if timespan is None:
self["timespan"] = None
else:
points = int(timespan / granularity)
self["timespan"] = granularity * points
else:
points = int(points)
self["timespan"] = granularity * points
self["points"] = points
self["granularity"] = granularity
|
https://github.com/gnocchixyz/gnocchi/issues/40
|
Traceback (most recent call last):
File "/usr/lib64/python2.7/wsgiref/handlers.py", line 85, in run
self.result = application(self.environ, self.start_response)
File "/usr/lib/python2.7/site-packages/webob/dec.py", line 130, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/usr/lib/python2.7/site-packages/webob/dec.py", line 195, in call_func
return self.func(req, *args, **kwargs)
File "/usr/lib/python2.7/site-packages/oslo_middleware/base.py", line 126, in __call__
response = req.get_response(self.application)
File "/usr/lib/python2.7/site-packages/webob/request.py", line 1299, in send
application, catch_exc_info=False)
File "/usr/lib/python2.7/site-packages/webob/request.py", line 1263, in call_application
app_iter = application(self.environ, start_response)
File "/usr/lib/python2.7/site-packages/paste/urlmap.py", line 216, in __call__
return app(environ, start_response)
File "/usr/lib/python2.7/site-packages/webob/dec.py", line 130, in __call__
resp = self.call_func(req, *args, **self.kwargs)
File "/usr/lib/python2.7/site-packages/webob/dec.py", line 195, in call_func
return self.func(req, *args, **kwargs)
File "/usr/lib/python2.7/site-packages/oslo_middleware/base.py", line 126, in __call__
response = req.get_response(self.application)
File "/usr/lib/python2.7/site-packages/webob/request.py", line 1299, in send
application, catch_exc_info=False)
File "/usr/lib/python2.7/site-packages/webob/request.py", line 1263, in call_application
app_iter = application(self.environ, start_response)
File "/usr/lib/python2.7/site-packages/webob/exc.py", line 1162, in __call__
return self.application(environ, start_response)
File "/usr/lib/python2.7/site-packages/gnocchi/rest/app.py", line 68, in __call__
return self.app(environ, start_response)
File "/usr/lib/python2.7/site-packages/pecan/middleware/recursive.py", line 56, in __call__
return self.application(environ, start_response)
File "/usr/lib/python2.7/site-packages/pecan/core.py", line 840, in __call__
return super(Pecan, self).__call__(environ, start_response)
File "/usr/lib/python2.7/site-packages/pecan/core.py", line 683, in __call__
self.invoke_controller(controller, args, kwargs, state)
File "/usr/lib/python2.7/site-packages/pecan/core.py", line 574, in invoke_controller
result = controller(*args, **kwargs)
File "/usr/lib/python2.7/site-packages/gnocchi/rest/__init__.py", line 354, in get_all
return pecan.request.indexer.list_archive_policies()
File "/usr/lib/python2.7/site-packages/gnocchi/indexer/sqlalchemy.py", line 559, in list_archive_policies
return list(session.query(ArchivePolicy).all())
File "/usr/lib64/python2.7/site-packages/sqlalchemy/orm/query.py", line 2613, in all
return list(self)
File "/usr/lib64/python2.7/site-packages/sqlalchemy/orm/loading.py", line 86, in instances
util.raise_from_cause(err)
File "/usr/lib64/python2.7/site-packages/sqlalchemy/util/compat.py", line 202, in raise_from_cause
reraise(type(exception), exception, tb=exc_tb, cause=cause)
File "/usr/lib64/python2.7/site-packages/sqlalchemy/orm/loading.py", line 71, in instances
rows = [proc(row) for row in fetch]
File "/usr/lib64/python2.7/site-packages/sqlalchemy/orm/loading.py", line 428, in _instance
loaded_instance, populate_existing, populators)
File "/usr/lib64/python2.7/site-packages/sqlalchemy/orm/loading.py", line 486, in _populate_full
dict_[key] = getter(row)
File "/usr/lib64/python2.7/site-packages/sqlalchemy/sql/type_api.py", line 1030, in process
return process_value(value, dialect)
File "/usr/lib/python2.7/site-packages/gnocchi/indexer/sqlalchemy_base.py", line 113, in process_result_value
return [archive_policy.ArchivePolicyItem(**v) for v in values]
File "/usr/lib/python2.7/site-packages/gnocchi/archive_policy.py", line 163, in __init__
raise ValueError("Number of points should be > 0")
ValueError: Number of points should be > 0
|
ValueError
|
def user_run_dir(self):
# Try to ensure that (/var)/run/user/$(id -u) exists so that
# `gpgconf --create-socketdir` can be run later.
#
# NOTE(opadron): This action helps prevent a large class of
# "file-name-too-long" errors in gpg.
try:
has_suitable_gpgconf = bool(GpgConstants.gpgconf_string)
except SpackGPGError:
has_suitable_gpgconf = False
# If there is no suitable gpgconf, don't even bother trying to
# precreate a user run dir.
if not has_suitable_gpgconf:
return None
result = None
for var_run in ("/run", "/var/run"):
if not os.path.exists(var_run):
continue
var_run_user = os.path.join(var_run, "user")
try:
if not os.path.exists(var_run_user):
os.mkdir(var_run_user)
os.chmod(var_run_user, 0o777)
user_dir = os.path.join(var_run_user, str(os.getuid()))
if not os.path.exists(user_dir):
os.mkdir(user_dir)
os.chmod(user_dir, 0o700)
# If the above operation fails due to lack of permissions, then
# just carry on without running gpgconf and hope for the best.
#
# NOTE(opadron): Without a dir in which to create a socket for IPC,
# gnupg may fail if GNUPGHOME is set to a path that
# is too long, where "too long" in this context is
# actually quite short; somewhere in the
# neighborhood of more than 100 characters.
#
# TODO(opadron): Maybe a warning should be printed in this case?
except OSError as exc:
if exc.errno not in (errno.EPERM, errno.EACCES):
raise
user_dir = None
# return the last iteration that provides a usable user run dir
if user_dir is not None:
result = user_dir
return result
|
def user_run_dir(self):
# Try to ensure that (/var)/run/user/$(id -u) exists so that
# `gpgconf --create-socketdir` can be run later.
#
# NOTE(opadron): This action helps prevent a large class of
# "file-name-too-long" errors in gpg.
try:
has_suitable_gpgconf = bool(_GpgConstants.gpgconf_string)
except SpackGPGError:
has_suitable_gpgconf = False
# If there is no suitable gpgconf, don't even bother trying to
# precreate a user run dir.
if not has_suitable_gpgconf:
return None
result = None
for var_run in ("/run", "/var/run"):
if not os.path.exists(var_run):
continue
var_run_user = os.path.join(var_run, "user")
try:
if not os.path.exists(var_run_user):
os.mkdir(var_run_user)
os.chmod(var_run_user, 0o777)
user_dir = os.path.join(var_run_user, str(os.getuid()))
if not os.path.exists(user_dir):
os.mkdir(user_dir)
os.chmod(user_dir, 0o700)
# If the above operation fails due to lack of permissions, then
# just carry on without running gpgconf and hope for the best.
#
# NOTE(opadron): Without a dir in which to create a socket for IPC,
# gnupg may fail if GNUPGHOME is set to a path that
# is too long, where "too long" in this context is
# actually quite short; somewhere in the
# neighborhood of more than 100 characters.
#
# TODO(opadron): Maybe a warning should be printed in this case?
except OSError as exc:
if exc.errno not in (errno.EPERM, errno.EACCES):
raise
user_dir = None
# return the last iteration that provides a usable user run dir
if user_dir is not None:
result = user_dir
return result
|
https://github.com/spack/spack/issues/20585
|
$> spack -d gpg trust e4s.pub
==> [2020-12-28-17:57:13.255469] Imported gpg from built-in commands
==> [2020-12-28-17:57:13.257931] Imported gpg from built-in commands
==> [2020-12-28-17:57:13.258679] '/usr/bin/gpgconf' '--version'
==> [2020-12-28-17:57:13.260456] '/usr/bin/gpgconf' '--dry-run' '--create-socketdir'
gpgconf: invalid option "--create-socketdir"
==> [2020-12-28-17:57:13.262194] '/usr/bin/gpg2' '--version'
==> [2020-12-28-17:57:13.264410] 'None' '--create-socketdir'
==> [2020-12-28-17:57:13.265610] ProcessError: None: No such file or directory: 'None'
Command: 'None' '--create-socketdir'
==> [2020-12-28-17:57:13.265703] Error: None: No such file or directory: 'None'
Command: 'None' '--create-socketdir'
Traceback (most recent call last):
File "/opt/spack/lib/spack/spack/util/executable.py", line 170, in __call__
env=env)
File "/usr/local/lib/python3.7/subprocess.py", line 775, in __init__
restore_signals, start_new_session)
File "/usr/local/lib/python3.7/subprocess.py", line 1522, in _execute_child
raise child_exception_type(errno_num, err_msg, err_filename)
FileNotFoundError: [Errno 2] No such file or directory: 'None': 'None'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/opt/spack/lib/spack/spack/main.py", line 762, in main
return _invoke_command(command, parser, args, unknown)
File "/opt/spack/lib/spack/spack/main.py", line 490, in _invoke_command
return_val = command(parser, args)
File "/opt/spack/lib/spack/spack/cmd/gpg.py", line 201, in gpg
args.func(args)
File "/opt/spack/lib/spack/spack/cmd/gpg.py", line 159, in gpg_trust
spack.util.gpg.trust(args.keyfile)
File "/opt/spack/lib/spack/spack/util/gpg.py", line 410, in trust
return get_global_gpg_instance().trust(*args, **kwargs)
File "/opt/spack/lib/spack/spack/util/gpg.py", line 345, in trust
self('--import', keyfile)
File "/opt/spack/lib/spack/spack/util/gpg.py", line 307, in __call__
if self.prep:
File "/opt/spack/lib/spack/llnl/util/lang.py", line 197, in _memoized_function
func.cache[args] = func(*args)
File "/opt/spack/lib/spack/spack/util/gpg.py", line 290, in prep
self.gpgconf_exe('--create-socketdir')
File "/opt/spack/lib/spack/spack/util/executable.py", line 204, in __call__
'%s: %s' % (self.exe[0], e.strerror), 'Command: ' + cmd_line)
spack.util.executable.ProcessError: None: No such file or directory: 'None'
Command: 'None' '--create-socketdir'
|
FileNotFoundError
|
def verify_executables(self):
"""Raise an error if any of the compiler executables is not valid.
This method confirms that for all of the compilers (cc, cxx, f77, fc)
that have paths, those paths exist and are executable by the current
user.
Raises a CompilerAccessError if any of the non-null paths for the
compiler are not accessible.
"""
def accessible_exe(exe):
# compilers may contain executable names (on Cray or user edited)
if not os.path.isabs(exe):
exe = spack.util.executable.which_string(exe)
if not exe:
return False
return os.path.isfile(exe) and os.access(exe, os.X_OK)
# setup environment before verifying in case we have executable names
# instead of absolute paths
with self._compiler_environment():
missing = [
cmp
for cmp in (self.cc, self.cxx, self.f77, self.fc)
if cmp and not accessible_exe(cmp)
]
if missing:
raise CompilerAccessError(self, missing)
|
def verify_executables(self):
"""Raise an error if any of the compiler executables is not valid.
This method confirms that for all of the compilers (cc, cxx, f77, fc)
that have paths, those paths exist and are executable by the current
user.
Raises a CompilerAccessError if any of the non-null paths for the
compiler are not accessible.
"""
missing = [
cmp
for cmp in (self.cc, self.cxx, self.f77, self.fc)
if cmp and not (os.path.isfile(cmp) and os.access(cmp, os.X_OK))
]
if missing:
raise CompilerAccessError(self, missing)
|
https://github.com/spack/spack/issues/17301
|
$> spack install zlib%gcc@8.3.0 arch=cray-cnl7-haswell
...
==> Error: Failed to install zlib due to ChildError: CompilerAccessError: Compiler 'gcc@8.3.0' has executables that are missing or are not executable: ['cc', 'CC', 'ftn', 'ftn']
/global/u1/l/lpeyrala/spack-greg-test/lib/spack/spack/build_environment.py:854, in child_process:
851 tb_string = traceback.format_exc()
852
853 # build up some context from the offending package so we can
>> 854 # show that, too.
855 package_context = get_package_context(tb)
856
857 build_log = None
Traceback (most recent call last):
File "/global/u1/l/lpeyrala/spack-greg-test/lib/spack/spack/build_environment.py", line 837, in child_process
setup_package(pkg, dirty=dirty)
File "/global/u1/l/lpeyrala/spack-greg-test/lib/spack/spack/build_environment.py", line 712, in setup_package
set_compiler_environment_variables(pkg, build_env)
File "/global/u1/l/lpeyrala/spack-greg-test/lib/spack/spack/build_environment.py", line 202, in set_compiler_environment_variables
compiler.verify_executables()
File "/global/u1/l/lpeyrala/spack-greg-test/lib/spack/spack/compiler.py", line 304, in verify_executables
raise CompilerAccessError(self, missing)
spack.compiler.CompilerAccessError: Compiler 'gcc@8.3.0' has executables that are missing or are not executable: ['cc', 'CC', 'ftn', 'ftn']
|
spack.compiler.CompilerAccessError
|
def is_activated(self, view):
"""Return True if package is activated."""
if not self.is_extension:
raise ValueError("is_activated called on package that is not an extension.")
if self.extendee_spec.package.installed_upstream:
# If this extends an upstream package, it cannot be activated for
# it. This bypasses construction of the extension map, which can
# can fail when run in the context of a downstream Spack instance
return False
extensions_layout = view.extensions_layout
exts = extensions_layout.extension_map(self.extendee_spec)
return (self.name in exts) and (exts[self.name] == self.spec)
|
def is_activated(self, view):
"""Return True if package is activated."""
if not self.is_extension:
raise ValueError("is_activated called on package that is not an extension.")
extensions_layout = view.extensions_layout
exts = extensions_layout.extension_map(self.extendee_spec)
return (self.name in exts) and (exts[self.name] == self.spec)
|
https://github.com/spack/spack/issues/15966
|
==> [2020-04-09-10:26:40.615561, 133285] WRITE LOCK (scons): /dcsrsoft/sandbox/eroche/meleze/opt/spack/.spack-db/prefix_lock[2450758027729552401:1] [Releasing]
==> [2020-04-09-10:26:40.615767, 133285] WRITE LOCK (scons): /dcsrsoft/sandbox/eroche/meleze/opt/spack/.spack-db/prefix_lock[2450758027729552401:1] [Released at 10:26:40.615744]
Traceback (most recent call last):
File "/dcsrsoft/sandbox/eroche/meleze/bin/spack", line 64, in <module>
sys.exit(spack.main.main())
File "/dcsrsoft/sandbox/eroche/meleze/lib/spack/spack/main.py", line 763, in main
return _invoke_command(command, parser, args, unknown)
File "/dcsrsoft/sandbox/eroche/meleze/lib/spack/spack/main.py", line 488, in _invoke_command
return_val = command(parser, args)
File "/dcsrsoft/sandbox/eroche/meleze/lib/spack/spack/cmd/uninstall.py", line 348, in uninstall
uninstall_specs(args, specs)
File "/dcsrsoft/sandbox/eroche/meleze/lib/spack/spack/cmd/uninstall.py", line 323, in uninstall_specs
do_uninstall(env, uninstall_list, args.force)
File "/dcsrsoft/sandbox/eroche/meleze/lib/spack/spack/cmd/uninstall.py", line 225, in do_uninstall
item.do_uninstall(force=force)
File "/dcsrsoft/sandbox/eroche/meleze/lib/spack/spack/package.py", line 1739, in do_uninstall
Package.uninstall_by_spec(self.spec, force)
File "/dcsrsoft/sandbox/eroche/meleze/lib/spack/spack/package.py", line 1711, in uninstall_by_spec
spack.hooks.pre_uninstall(spec)
File "/dcsrsoft/sandbox/eroche/meleze/lib/spack/spack/hooks/__init__.py", line 59, in __call__
hook(*args, **kwargs)
File "/dcsrsoft/sandbox/eroche/meleze/lib/spack/spack/hooks/extensions.py", line 18, in pre_uninstall
if pkg.is_activated(view):
File "/dcsrsoft/sandbox/eroche/meleze/lib/spack/spack/package.py", line 1008, in is_activated
exts = extensions_layout.extension_map(self.extendee_spec)
File "/dcsrsoft/sandbox/eroche/meleze/lib/spack/spack/directory_layout.py", line 430, in extension_map
return self._extension_map(spec).copy()
File "/dcsrsoft/sandbox/eroche/meleze/lib/spack/spack/directory_layout.py", line 457, in _extension_map
with open(path) as ext_file:
PermissionError: [Errno 13] Permission denied: '/dcsrsoft/spack/meleze/v2/opt/spack/linux-centos7-skylake_avx512/gcc-8.3.0/python-3.7.6-3qfa6rjryjmwkj7zdc26ntxkqxq6mkdi/.spack/extensions.yaml'
|
PermissionError
|
def get_package_dir_permissions(spec):
"""Return the permissions configured for the spec.
Include the GID bit if group permissions are on. This makes the group
attribute sticky for the directory. Package-specific settings take
precedent over settings for ``all``"""
perms = get_package_permissions(spec)
if perms & stat.S_IRWXG and spack.config.get("config:allow_sgid", True):
perms |= stat.S_ISGID
return perms
|
def get_package_dir_permissions(spec):
"""Return the permissions configured for the spec.
Include the GID bit if group permissions are on. This makes the group
attribute sticky for the directory. Package-specific settings take
precedent over settings for ``all``"""
perms = get_package_permissions(spec)
if perms & stat.S_IRWXG:
perms |= stat.S_ISGID
return perms
|
https://github.com/spack/spack/issues/14425
|
lib/spack/spack/cmd/__init__.py:102 ==> [2020-01-08-18:01:06.241182] Imported install from built-in commands
lib/spack/spack/config.py:706 ==> [2020-01-08-18:01:06.247268] Reading config file /afs/cern.ch/work/r/razumov/spack_vanilla/etc/spack/defaults/config.yaml
lib/spack/spack/cmd/__init__.py:102 ==> [2020-01-08-18:01:06.271299] Imported install from built-in commands
lib/spack/spack/config.py:706 ==> [2020-01-08-18:01:06.276303] Reading config file /afs/cern.ch/work/r/razumov/spack_vanilla/etc/spack/defaults/repos.yaml
lib/spack/spack/config.py:706 ==> [2020-01-08-18:01:10.210033] Reading config file /afs/cern.ch/work/r/razumov/spack_vanilla/etc/spack/defaults/packages.yaml
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.240640] READ LOCK: /afs/cern.ch/user/r/razumov/.spack/cache/providers/.builtin-index.json.lock[0:0] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.242159] READ LOCK: /afs/cern.ch/user/r/razumov/.spack/cache/providers/.builtin-index.json.lock[0:0] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.271533] READ LOCK: /afs/cern.ch/user/r/razumov/.spack/cache/providers/.builtin-index.json.lock[0:0] [Released]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.277038] READ LOCK: /afs/cern.ch/user/r/razumov/.spack/cache/tags/.builtin-index.json.lock[0:0] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.278357] READ LOCK: /afs/cern.ch/user/r/razumov/.spack/cache/tags/.builtin-index.json.lock[0:0] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.279135] READ LOCK: /afs/cern.ch/user/r/razumov/.spack/cache/tags/.builtin-index.json.lock[0:0] [Released]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.283876] READ LOCK: /afs/cern.ch/user/r/razumov/.spack/cache/patches/.builtin-index.json.lock[0:0] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.285200] READ LOCK: /afs/cern.ch/user/r/razumov/.spack/cache/patches/.builtin-index.json.lock[0:0] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.299337] READ LOCK: /afs/cern.ch/user/r/razumov/.spack/cache/patches/.builtin-index.json.lock[0:0] [Released]
lib/spack/spack/config.py:706 ==> [2020-01-08-18:01:10.312009] Reading config file /afs/cern.ch/user/r/razumov/.spack/linux/compilers.yaml
lib/spack/spack/concretize.py:565 ==> [2020-01-08-18:01:10.331186] Warning: gcc@4.8.5 cannot build optimized binaries for "broadwell". Using best target possible: "haswell"
lib/spack/spack/database.py:310 ==> [2020-01-08-18:01:10.356590] DATABASE LOCK TIMEOUT: 120s
lib/spack/spack/database.py:314 ==> [2020-01-08-18:01:10.356940] PACKAGE LOCK TIMEOUT: No timeout
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.357586] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.359717] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.368517] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Released]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.372162] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.373495] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.374094] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Released]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.376033] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.377234] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.377834] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Released]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.379224] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.380414] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.381048] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Released]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.382854] WRITE LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/prefix_lock[2895703049217635441:1] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.383386] WRITE LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/prefix_lock[2895703049217635441:1] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.383745] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.384759] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.385310] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Released]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.386315] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.387456] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.388098] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Released]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.390128] WRITE LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/prefix_lock[2895703049217635441:1] [Released]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.392482] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/prefix_lock[2895703049217635441:1] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.392943] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/prefix_lock[2895703049217635441:1] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.393335] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.394152] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.394689] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Released]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.395787] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/prefix_lock[2895703049217635441:1] [Released]
lib/spack/spack/package.py:1645 ==> [2020-01-08-18:01:10.396123] Installing expat dependencies
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.396777] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.397757] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.398291] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Released]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.399129] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.400046] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.400590] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Released]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.401353] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.402320] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.402862] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Released]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.404483] WRITE LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/prefix_lock[7237090093046132987:1] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.404948] WRITE LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/prefix_lock[7237090093046132987:1] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.405323] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.406199] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.406925] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Released]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.407781] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.408839] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.409403] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Released]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.410740] WRITE LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/prefix_lock[7237090093046132987:1] [Released]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.422206] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/prefix_lock[7237090093046132987:1] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.422705] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/prefix_lock[7237090093046132987:1] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.424183] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.425561] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.426151] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Released]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.427703] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/prefix_lock[7237090093046132987:1] [Released]
lib/spack/spack/package.py:1662 ==> [2020-01-08-18:01:10.428156] Installing libbsd
lib/spack/spack/package.py:1509 ==> [2020-01-08-18:01:10.428409] Searching for binary cache of libbsd
lib/spack/spack/binary_distribution.py:682 ==> [2020-01-08-18:01:10.429257] No Spack mirrors are currently configured
lib/spack/spack/package.py:1675 ==> [2020-01-08-18:01:10.430136] No binary for libbsd found: installing from source
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.430565] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.431556] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.432112] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Released]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.485290] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquiring]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.486355] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Acquired]
lib/spack/llnl/util/lock.py:368 ==> [2020-01-08-18:01:10.486996] READ LOCK: /afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/.spack-db/lock[0:0] [Released]
Traceback (most recent call last):
File "/afs/cern.ch/work/r/razumov/spack_vanilla/bin/spack", line 64, in <module>
sys.exit(spack.main.main())
File "/afs/cern.ch/work/r/razumov/spack_vanilla/lib/spack/spack/main.py", line 711, in main
return _invoke_command(command, parser, args, unknown)
File "/afs/cern.ch/work/r/razumov/spack_vanilla/lib/spack/spack/main.py", line 456, in _invoke_command
return_val = command(parser, args)
File "/afs/cern.ch/work/r/razumov/spack_vanilla/lib/spack/spack/cmd/install.py", line 373, in install
install_spec(args, kwargs, abstract, concrete)
File "/afs/cern.ch/work/r/razumov/spack_vanilla/lib/spack/spack/cmd/install.py", line 234, in install_spec
spec.package.do_install(**kwargs)
File "/afs/cern.ch/work/r/razumov/spack_vanilla/lib/spack/spack/package.py", line 1652, in do_install
dep.package.do_install(**dep_kwargs)
File "/afs/cern.ch/work/r/razumov/spack_vanilla/lib/spack/spack/package.py", line 1774, in do_install
spack.store.layout.create_install_directory(self.spec)
File "/afs/cern.ch/work/r/razumov/spack_vanilla/lib/spack/spack/directory_layout.py", line 299, in create_install_directory
mkdirp(spec.prefix, mode=perms, group=group, default_perms='parents')
File "/afs/cern.ch/work/r/razumov/spack_vanilla/lib/spack/llnl/util/filesystem.py", line 583, in mkdirp
raise e
File "/afs/cern.ch/work/r/razumov/spack_vanilla/lib/spack/llnl/util/filesystem.py", line 550, in mkdirp
os.chmod(path, mode)
PermissionError: [Errno 13] Permission denied: '/afs/cern.ch/work/r/razumov/spack_vanilla/opt/spack/linux-centos7-haswell/gcc-4.8.5/libbsd-0.10.0-zdpj4mixa3q7p2b3zgojligmsnnw54ne'
|
PermissionError
|
def replace_directory_transaction(directory_name, tmp_root=None):
"""Moves a directory to a temporary space. If the operations executed
within the context manager don't raise an exception, the directory is
deleted. If there is an exception, the move is undone.
Args:
directory_name (path): absolute path of the directory name
tmp_root (path): absolute path of the parent directory where to create
the temporary
Returns:
temporary directory where ``directory_name`` has been moved
"""
# Check the input is indeed a directory with absolute path.
# Raise before anything is done to avoid moving the wrong directory
assert os.path.isdir(directory_name), "Invalid directory: " + directory_name
assert os.path.isabs(directory_name), (
'"directory_name" must contain an absolute path: ' + directory_name
)
directory_basename = os.path.basename(directory_name)
if tmp_root is not None:
assert os.path.isabs(tmp_root)
tmp_dir = tempfile.mkdtemp(dir=tmp_root)
tty.debug("TEMPORARY DIRECTORY CREATED [{0}]".format(tmp_dir))
shutil.move(src=directory_name, dst=tmp_dir)
tty.debug("DIRECTORY MOVED [src={0}, dest={1}]".format(directory_name, tmp_dir))
try:
yield tmp_dir
except (Exception, KeyboardInterrupt, SystemExit):
# Delete what was there, before copying back the original content
if os.path.exists(directory_name):
shutil.rmtree(directory_name)
shutil.move(
src=os.path.join(tmp_dir, directory_basename),
dst=os.path.dirname(directory_name),
)
tty.debug("DIRECTORY RECOVERED [{0}]".format(directory_name))
msg = 'the transactional move of "{0}" failed.'
raise RuntimeError(msg.format(directory_name))
else:
# Otherwise delete the temporary directory
shutil.rmtree(tmp_dir)
tty.debug("TEMPORARY DIRECTORY DELETED [{0}]".format(tmp_dir))
|
def replace_directory_transaction(directory_name, tmp_root=None):
"""Moves a directory to a temporary space. If the operations executed
within the context manager don't raise an exception, the directory is
deleted. If there is an exception, the move is undone.
Args:
directory_name (path): absolute path of the directory name
tmp_root (path): absolute path of the parent directory where to create
the temporary
Returns:
temporary directory where ``directory_name`` has been moved
"""
# Check the input is indeed a directory with absolute path.
# Raise before anything is done to avoid moving the wrong directory
assert os.path.isdir(directory_name), '"directory_name" must be a valid directory'
assert os.path.isabs(directory_name), (
'"directory_name" must contain an absolute path'
)
directory_basename = os.path.basename(directory_name)
if tmp_root is not None:
assert os.path.isabs(tmp_root)
tmp_dir = tempfile.mkdtemp(dir=tmp_root)
tty.debug("TEMPORARY DIRECTORY CREATED [{0}]".format(tmp_dir))
shutil.move(src=directory_name, dst=tmp_dir)
tty.debug("DIRECTORY MOVED [src={0}, dest={1}]".format(directory_name, tmp_dir))
try:
yield tmp_dir
except (Exception, KeyboardInterrupt, SystemExit):
# Delete what was there, before copying back the original content
if os.path.exists(directory_name):
shutil.rmtree(directory_name)
shutil.move(
src=os.path.join(tmp_dir, directory_basename),
dst=os.path.dirname(directory_name),
)
tty.debug("DIRECTORY RECOVERED [{0}]".format(directory_name))
msg = 'the transactional move of "{0}" failed.'
raise RuntimeError(msg.format(directory_name))
else:
# Otherwise delete the temporary directory
shutil.rmtree(tmp_dir)
tty.debug("TEMPORARY DIRECTORY DELETED [{0}]".format(tmp_dir))
|
https://github.com/spack/spack/issues/15620
|
Traceback (most recent call last):
File "/u/sciteam/stewart1/spack/bin/spack", line 64, in <module>
sys.exit(spack.main.main())
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/main.py", line 770, in main
if spack.config.get('config:debug'):
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 671, in get
return config.get(path, default, scope)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 552, in __getattr__
return getattr(self.instance, name)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 548, in instance
self._instance = self.factory()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 653, in _config
_add_platform_scope(cfg, ConfigScope, name, path)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 606, in _add_platform_scope
platform = spack.architecture.platform().name
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 178, in _memoized_function
func.cache[args] = func(*args)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/architecture.py", line 516, in platform
return platform_cls()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/platforms/cray.py", line 63, in __init__
_target = self._default_target_from_env()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/platforms/cray.py", line 121, in _default_target_from_env
env={'TERM': os.environ['TERM']},
File "/usr/lib64/python2.6/UserDict.py", line 22, in __getitem__
raise KeyError(key)
KeyError: 'TERM'
|
KeyError
|
def _is_background_tty(stream):
"""True if the stream is a tty and calling process is in the background."""
return stream.isatty() and os.getpgrp() != os.tcgetpgrp(stream.fileno())
|
def _is_background_tty():
"""Return True iff this process is backgrounded and stdout is a tty"""
if sys.stdout.isatty():
return os.getpgrp() != os.tcgetpgrp(sys.stdout.fileno())
return False # not writing to tty, not background
|
https://github.com/spack/spack/issues/15620
|
Traceback (most recent call last):
File "/u/sciteam/stewart1/spack/bin/spack", line 64, in <module>
sys.exit(spack.main.main())
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/main.py", line 770, in main
if spack.config.get('config:debug'):
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 671, in get
return config.get(path, default, scope)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 552, in __getattr__
return getattr(self.instance, name)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 548, in instance
self._instance = self.factory()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 653, in _config
_add_platform_scope(cfg, ConfigScope, name, path)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 606, in _add_platform_scope
platform = spack.architecture.platform().name
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 178, in _memoized_function
func.cache[args] = func(*args)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/architecture.py", line 516, in platform
return platform_cls()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/platforms/cray.py", line 63, in __init__
_target = self._default_target_from_env()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/platforms/cray.py", line 121, in _default_target_from_env
env={'TERM': os.environ['TERM']},
File "/usr/lib64/python2.6/UserDict.py", line 22, in __getitem__
raise KeyError(key)
KeyError: 'TERM'
|
KeyError
|
def __enter__(self):
if self._active:
raise RuntimeError("Can't re-enter the same log_output!")
if self.file_like is None:
raise RuntimeError("file argument must be set by either __init__ or __call__")
# set up a stream for the daemon to write to
self.close_log_in_parent = True
self.write_log_in_parent = False
if isinstance(self.file_like, string_types):
self.log_file = open(self.file_like, "w")
elif _file_descriptors_work(self.file_like):
self.log_file = self.file_like
self.close_log_in_parent = False
else:
self.log_file = StringIO()
self.write_log_in_parent = True
# record parent color settings before redirecting. We do this
# because color output depends on whether the *original* stdout
# is a TTY. New stdout won't be a TTY so we force colorization.
self._saved_color = tty.color._force_color
forced_color = tty.color.get_color_when()
# also record parent debug settings -- in case the logger is
# forcing debug output.
self._saved_debug = tty._debug
# OS-level pipe for redirecting output to logger
read_fd, write_fd = os.pipe()
# Multiprocessing pipe for communication back from the daemon
# Currently only used to save echo value between uses
self.parent_pipe, child_pipe = multiprocessing.Pipe()
# Sets a daemon that writes to file what it reads from a pipe
try:
# need to pass this b/c multiprocessing closes stdin in child.
try:
input_stream = os.fdopen(os.dup(sys.stdin.fileno()))
except BaseException:
input_stream = None # just don't forward input if this fails
self.process = multiprocessing.Process(
target=_writer_daemon,
args=(
input_stream,
read_fd,
write_fd,
self.echo,
self.log_file,
child_pipe,
),
)
self.process.daemon = True # must set before start()
self.process.start()
os.close(read_fd) # close in the parent process
finally:
if input_stream:
input_stream.close()
# Flush immediately before redirecting so that anything buffered
# goes to the original stream
sys.stdout.flush()
sys.stderr.flush()
# Now do the actual output rediction.
self.use_fds = _file_descriptors_work(sys.stdout, sys.stderr)
if self.use_fds:
# We try first to use OS-level file descriptors, as this
# redirects output for subprocesses and system calls.
# Save old stdout and stderr file descriptors
self._saved_stdout = os.dup(sys.stdout.fileno())
self._saved_stderr = os.dup(sys.stderr.fileno())
# redirect to the pipe we created above
os.dup2(write_fd, sys.stdout.fileno())
os.dup2(write_fd, sys.stderr.fileno())
os.close(write_fd)
else:
# Handle I/O the Python way. This won't redirect lower-level
# output, but it's the best we can do, and the caller
# shouldn't expect any better, since *they* have apparently
# redirected I/O the Python way.
# Save old stdout and stderr file objects
self._saved_stdout = sys.stdout
self._saved_stderr = sys.stderr
# create a file object for the pipe; redirect to it.
pipe_fd_out = os.fdopen(write_fd, "w")
sys.stdout = pipe_fd_out
sys.stderr = pipe_fd_out
# Unbuffer stdout and stderr at the Python level
if not self.buffer:
sys.stdout = Unbuffered(sys.stdout)
sys.stderr = Unbuffered(sys.stderr)
# Force color and debug settings now that we have redirected.
tty.color.set_color_when(forced_color)
tty._debug = self.debug
# track whether we're currently inside this log_output
self._active = True
# return this log_output object so that the user can do things
# like temporarily echo some ouptut.
return self
|
def __enter__(self):
if self._active:
raise RuntimeError("Can't re-enter the same log_output!")
if self.file_like is None:
raise RuntimeError("file argument must be set by either __init__ or __call__")
# set up a stream for the daemon to write to
self.close_log_in_parent = True
self.write_log_in_parent = False
if isinstance(self.file_like, string_types):
self.log_file = open(self.file_like, "w")
elif _file_descriptors_work(self.file_like):
self.log_file = self.file_like
self.close_log_in_parent = False
else:
self.log_file = StringIO()
self.write_log_in_parent = True
# record parent color settings before redirecting. We do this
# because color output depends on whether the *original* stdout
# is a TTY. New stdout won't be a TTY so we force colorization.
self._saved_color = tty.color._force_color
forced_color = tty.color.get_color_when()
# also record parent debug settings -- in case the logger is
# forcing debug output.
self._saved_debug = tty._debug
# OS-level pipe for redirecting output to logger
self.read_fd, self.write_fd = os.pipe()
# Multiprocessing pipe for communication back from the daemon
# Currently only used to save echo value between uses
self.parent, self.child = multiprocessing.Pipe()
# Sets a daemon that writes to file what it reads from a pipe
try:
# need to pass this b/c multiprocessing closes stdin in child.
try:
input_stream = os.fdopen(os.dup(sys.stdin.fileno()))
except BaseException:
input_stream = None # just don't forward input if this fails
self.process = multiprocessing.Process(
target=self._writer_daemon, args=(input_stream,)
)
self.process.daemon = True # must set before start()
self.process.start()
os.close(self.read_fd) # close in the parent process
finally:
if input_stream:
input_stream.close()
# Flush immediately before redirecting so that anything buffered
# goes to the original stream
sys.stdout.flush()
sys.stderr.flush()
# Now do the actual output rediction.
self.use_fds = _file_descriptors_work(sys.stdout, sys.stderr)
if self.use_fds:
# We try first to use OS-level file descriptors, as this
# redirects output for subprocesses and system calls.
# Save old stdout and stderr file descriptors
self._saved_stdout = os.dup(sys.stdout.fileno())
self._saved_stderr = os.dup(sys.stderr.fileno())
# redirect to the pipe we created above
os.dup2(self.write_fd, sys.stdout.fileno())
os.dup2(self.write_fd, sys.stderr.fileno())
os.close(self.write_fd)
else:
# Handle I/O the Python way. This won't redirect lower-level
# output, but it's the best we can do, and the caller
# shouldn't expect any better, since *they* have apparently
# redirected I/O the Python way.
# Save old stdout and stderr file objects
self._saved_stdout = sys.stdout
self._saved_stderr = sys.stderr
# create a file object for the pipe; redirect to it.
pipe_fd_out = os.fdopen(self.write_fd, "w")
sys.stdout = pipe_fd_out
sys.stderr = pipe_fd_out
# Unbuffer stdout and stderr at the Python level
if not self.buffer:
sys.stdout = Unbuffered(sys.stdout)
sys.stderr = Unbuffered(sys.stderr)
# Force color and debug settings now that we have redirected.
tty.color.set_color_when(forced_color)
tty._debug = self.debug
# track whether we're currently inside this log_output
self._active = True
# return this log_output object so that the user can do things
# like temporarily echo some ouptut.
return self
|
https://github.com/spack/spack/issues/15620
|
Traceback (most recent call last):
File "/u/sciteam/stewart1/spack/bin/spack", line 64, in <module>
sys.exit(spack.main.main())
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/main.py", line 770, in main
if spack.config.get('config:debug'):
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 671, in get
return config.get(path, default, scope)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 552, in __getattr__
return getattr(self.instance, name)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 548, in instance
self._instance = self.factory()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 653, in _config
_add_platform_scope(cfg, ConfigScope, name, path)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 606, in _add_platform_scope
platform = spack.architecture.platform().name
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 178, in _memoized_function
func.cache[args] = func(*args)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/architecture.py", line 516, in platform
return platform_cls()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/platforms/cray.py", line 63, in __init__
_target = self._default_target_from_env()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/platforms/cray.py", line 121, in _default_target_from_env
env={'TERM': os.environ['TERM']},
File "/usr/lib64/python2.6/UserDict.py", line 22, in __getitem__
raise KeyError(key)
KeyError: 'TERM'
|
KeyError
|
def __exit__(self, exc_type, exc_val, exc_tb):
# Flush any buffered output to the logger daemon.
sys.stdout.flush()
sys.stderr.flush()
# restore previous output settings, either the low-level way or
# the python way
if self.use_fds:
os.dup2(self._saved_stdout, sys.stdout.fileno())
os.close(self._saved_stdout)
os.dup2(self._saved_stderr, sys.stderr.fileno())
os.close(self._saved_stderr)
else:
sys.stdout = self._saved_stdout
sys.stderr = self._saved_stderr
# print log contents in parent if needed.
if self.write_log_in_parent:
string = self.parent_pipe.recv()
self.file_like.write(string)
if self.close_log_in_parent:
self.log_file.close()
# recover and store echo settings from the child before it dies
self.echo = self.parent_pipe.recv()
# join the daemon process. The daemon will quit automatically
# when the write pipe is closed; we just wait for it here.
self.process.join()
# restore old color and debug settings
tty.color._force_color = self._saved_color
tty._debug = self._saved_debug
self._active = False # safe to enter again
|
def __exit__(self, exc_type, exc_val, exc_tb):
# Flush any buffered output to the logger daemon.
sys.stdout.flush()
sys.stderr.flush()
# restore previous output settings, either the low-level way or
# the python way
if self.use_fds:
os.dup2(self._saved_stdout, sys.stdout.fileno())
os.close(self._saved_stdout)
os.dup2(self._saved_stderr, sys.stderr.fileno())
os.close(self._saved_stderr)
else:
sys.stdout = self._saved_stdout
sys.stderr = self._saved_stderr
# print log contents in parent if needed.
if self.write_log_in_parent:
string = self.parent.recv()
self.file_like.write(string)
if self.close_log_in_parent:
self.log_file.close()
# recover and store echo settings from the child before it dies
self.echo = self.parent.recv()
# join the daemon process. The daemon will quit automatically
# when the write pipe is closed; we just wait for it here.
self.process.join()
# restore old color and debug settings
tty.color._force_color = self._saved_color
tty._debug = self._saved_debug
self._active = False # safe to enter again
|
https://github.com/spack/spack/issues/15620
|
Traceback (most recent call last):
File "/u/sciteam/stewart1/spack/bin/spack", line 64, in <module>
sys.exit(spack.main.main())
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/main.py", line 770, in main
if spack.config.get('config:debug'):
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 671, in get
return config.get(path, default, scope)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 552, in __getattr__
return getattr(self.instance, name)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 548, in instance
self._instance = self.factory()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 653, in _config
_add_platform_scope(cfg, ConfigScope, name, path)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 606, in _add_platform_scope
platform = spack.architecture.platform().name
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 178, in _memoized_function
func.cache[args] = func(*args)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/architecture.py", line 516, in platform
return platform_cls()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/platforms/cray.py", line 63, in __init__
_target = self._default_target_from_env()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/platforms/cray.py", line 121, in _default_target_from_env
env={'TERM': os.environ['TERM']},
File "/usr/lib64/python2.6/UserDict.py", line 22, in __getitem__
raise KeyError(key)
KeyError: 'TERM'
|
KeyError
|
def force_echo(self):
"""Context manager to force local echo, even if echo is off."""
if not self._active:
raise RuntimeError("Can't call force_echo() outside log_output region!")
# This uses the xon/xoff to highlight regions to be echoed in the
# output. We us these control characters rather than, say, a
# separate pipe, because they're in-band and assured to appear
# exactly before and after the text we want to echo.
sys.stdout.write(xon)
sys.stdout.flush()
try:
yield
finally:
sys.stdout.write(xoff)
sys.stdout.flush()
|
def force_echo(self):
"""Context manager to force local echo, even if echo is off."""
if not self._active:
raise RuntimeError("Can't call force_echo() outside log_output region!")
# This uses the xon/xoff to highlight regions to be echoed in the
# output. We us these control characters rather than, say, a
# separate pipe, because they're in-band and assured to appear
# exactly before and after the text we want to echo.
sys.stdout.write(xon)
sys.stdout.flush()
yield
sys.stdout.write(xoff)
sys.stdout.flush()
|
https://github.com/spack/spack/issues/15620
|
Traceback (most recent call last):
File "/u/sciteam/stewart1/spack/bin/spack", line 64, in <module>
sys.exit(spack.main.main())
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/main.py", line 770, in main
if spack.config.get('config:debug'):
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 671, in get
return config.get(path, default, scope)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 552, in __getattr__
return getattr(self.instance, name)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 548, in instance
self._instance = self.factory()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 653, in _config
_add_platform_scope(cfg, ConfigScope, name, path)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 606, in _add_platform_scope
platform = spack.architecture.platform().name
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 178, in _memoized_function
func.cache[args] = func(*args)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/architecture.py", line 516, in platform
return platform_cls()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/platforms/cray.py", line 63, in __init__
_target = self._default_target_from_env()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/platforms/cray.py", line 121, in _default_target_from_env
env={'TERM': os.environ['TERM']},
File "/usr/lib64/python2.6/UserDict.py", line 22, in __getitem__
raise KeyError(key)
KeyError: 'TERM'
|
KeyError
|
def _writer_daemon(stdin, read_fd, write_fd, echo, log_file, control_pipe):
"""Daemon used by ``log_output`` to write to a log file and to ``stdout``.
The daemon receives output from the parent process and writes it both
to a log and, optionally, to ``stdout``. The relationship looks like
this::
Terminal
|
| +-------------------------+
| | Parent Process |
+--------> | with log_output(): |
| stdin | ... |
| +-------------------------+
| ^ | write_fd (parent's redirected stdout)
| | control |
| | pipe |
| | v read_fd
| +-------------------------+ stdout
| | Writer daemon |------------>
+--------> | read from read_fd | log_file
stdin | write to out and log |------------>
+-------------------------+
Within the ``log_output`` handler, the parent's output is redirected
to a pipe from which the daemon reads. The daemon writes each line
from the pipe to a log file and (optionally) to ``stdout``. The user
can hit ``v`` to toggle output on ``stdout``.
In addition to the input and output file descriptors, the daemon
interacts with the parent via ``control_pipe``. It reports whether
``stdout`` was enabled or disabled when it finished and, if the
``log_file`` is a ``StringIO`` object, then the daemon also sends the
logged output back to the parent as a string, to be written to the
``StringIO`` in the parent. This is mainly for testing.
Arguments:
stdin (stream): input from the terminal
read_fd (int): pipe for reading from parent's redirected stdout
write_fd (int): parent's end of the pipe will write to (will be
immediately closed by the writer daemon)
echo (bool): initial echo setting -- controlled by user and
preserved across multiple writer daemons
log_file (file-like): file to log all output
control_pipe (Pipe): multiprocessing pipe on which to send control
information to the parent
"""
# Use line buffering (3rd param = 1) since Python 3 has a bug
# that prevents unbuffered text I/O.
in_pipe = os.fdopen(read_fd, "r", 1)
os.close(write_fd)
# list of streams to select from
istreams = [in_pipe, stdin] if stdin else [in_pipe]
force_echo = False # parent can force echo for certain output
try:
with keyboard_input(stdin) as kb:
while True:
# fix the terminal settings if we recently came to
# the foreground
kb.check_fg_bg()
# wait for input from any stream. use a coarse timeout to
# allow other checks while we wait for input
rlist, _, _ = _retry(select.select)(istreams, [], [], 1e-1)
# Allow user to toggle echo with 'v' key.
# Currently ignores other chars.
# only read stdin if we're in the foreground
if stdin in rlist and not _is_background_tty(stdin):
# it's possible to be backgrounded between the above
# check and the read, so we ignore SIGTTIN here.
with ignore_signal(signal.SIGTTIN):
try:
if stdin.read(1) == "v":
echo = not echo
except IOError as e:
# If SIGTTIN is ignored, the system gives EIO
# to let the caller know the read failed b/c it
# was in the bg. Ignore that too.
if e.errno != errno.EIO:
raise
if in_pipe in rlist:
# Handle output from the calling process.
line = _retry(in_pipe.readline)()
if not line:
break
# find control characters and strip them.
controls = control.findall(line)
line = control.sub("", line)
# Echo to stdout if requested or forced.
if echo or force_echo:
sys.stdout.write(line)
sys.stdout.flush()
# Stripped output to log file.
log_file.write(_strip(line))
log_file.flush()
if xon in controls:
force_echo = True
if xoff in controls:
force_echo = False
except BaseException:
tty.error("Exception occurred in writer daemon!")
traceback.print_exc()
finally:
# send written data back to parent if we used a StringIO
if isinstance(log_file, StringIO):
control_pipe.send(log_file.getvalue())
log_file.close()
# send echo value back to the parent so it can be preserved.
control_pipe.send(echo)
|
def _writer_daemon(self, stdin):
"""Daemon that writes output to the log file and stdout."""
# Use line buffering (3rd param = 1) since Python 3 has a bug
# that prevents unbuffered text I/O.
in_pipe = os.fdopen(self.read_fd, "r", 1)
os.close(self.write_fd)
echo = self.echo # initial echo setting, user-controllable
force_echo = False # parent can force echo for certain output
# list of streams to select from
istreams = [in_pipe, stdin] if stdin else [in_pipe]
log_file = self.log_file
def handle_write(force_echo):
# Handle output from the with block process.
# If we arrive here it means that in_pipe was
# ready for reading : it should never happen that
# line is false-ish
line = in_pipe.readline()
if not line:
return (True, force_echo) # break while loop
# find control characters and strip them.
controls = control.findall(line)
line = re.sub(control, "", line)
# Echo to stdout if requested or forced
if echo or force_echo:
try:
if termios:
conf = termios.tcgetattr(sys.stdout)
tostop = conf[3] & termios.TOSTOP
else:
tostop = True
except Exception:
tostop = True
if not (tostop and _is_background_tty()):
sys.stdout.write(line)
sys.stdout.flush()
# Stripped output to log file.
log_file.write(_strip(line))
log_file.flush()
if xon in controls:
force_echo = True
if xoff in controls:
force_echo = False
return (False, force_echo)
try:
with _keyboard_input(stdin):
while True:
# No need to set any timeout for select.select
# Wait until a key press or an event on in_pipe.
rlist, _, _ = select.select(istreams, [], [])
# Allow user to toggle echo with 'v' key.
# Currently ignores other chars.
# only read stdin if we're in the foreground
if stdin in rlist and not _is_background_tty():
if stdin.read(1) == "v":
echo = not echo
if in_pipe in rlist:
br, fe = handle_write(force_echo)
force_echo = fe
if br:
break
except BaseException:
tty.error("Exception occurred in writer daemon!")
traceback.print_exc()
finally:
# send written data back to parent if we used a StringIO
if self.write_log_in_parent:
self.child.send(log_file.getvalue())
log_file.close()
# send echo value back to the parent so it can be preserved.
self.child.send(echo)
|
https://github.com/spack/spack/issues/15620
|
Traceback (most recent call last):
File "/u/sciteam/stewart1/spack/bin/spack", line 64, in <module>
sys.exit(spack.main.main())
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/main.py", line 770, in main
if spack.config.get('config:debug'):
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 671, in get
return config.get(path, default, scope)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 552, in __getattr__
return getattr(self.instance, name)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 548, in instance
self._instance = self.factory()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 653, in _config
_add_platform_scope(cfg, ConfigScope, name, path)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 606, in _add_platform_scope
platform = spack.architecture.platform().name
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 178, in _memoized_function
func.cache[args] = func(*args)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/architecture.py", line 516, in platform
return platform_cls()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/platforms/cray.py", line 63, in __init__
_target = self._default_target_from_env()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/platforms/cray.py", line 121, in _default_target_from_env
env={'TERM': os.environ['TERM']},
File "/usr/lib64/python2.6/UserDict.py", line 22, in __getitem__
raise KeyError(key)
KeyError: 'TERM'
|
KeyError
|
def _default_target_from_env(self):
"""Set and return the default CrayPE target loaded in a clean login
session.
A bash subshell is launched with a wiped environment and the list of
loaded modules is parsed for the first acceptable CrayPE target.
"""
# env -i /bin/bash -lc echo $CRAY_CPU_TARGET 2> /dev/null
if getattr(self, "default", None) is None:
bash = Executable("/bin/bash")
output = bash(
"-lc",
"echo $CRAY_CPU_TARGET",
env={"TERM": os.environ.get("TERM", "")},
output=str,
error=os.devnull,
)
output = "".join(output.split()) # remove all whitespace
if output:
self.default = output
tty.debug("Found default module:%s" % self.default)
return self.default
|
def _default_target_from_env(self):
"""Set and return the default CrayPE target loaded in a clean login
session.
A bash subshell is launched with a wiped environment and the list of
loaded modules is parsed for the first acceptable CrayPE target.
"""
# env -i /bin/bash -lc echo $CRAY_CPU_TARGET 2> /dev/null
if getattr(self, "default", None) is None:
output = Executable("/bin/bash")(
"-lc",
"echo $CRAY_CPU_TARGET",
env={"TERM": os.environ["TERM"]},
output=str,
error=os.devnull,
)
output = "".join(output.split()) # remove all whitespace
if output:
self.default = output
tty.debug("Found default module:%s" % self.default)
return self.default
|
https://github.com/spack/spack/issues/15620
|
Traceback (most recent call last):
File "/u/sciteam/stewart1/spack/bin/spack", line 64, in <module>
sys.exit(spack.main.main())
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/main.py", line 770, in main
if spack.config.get('config:debug'):
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 671, in get
return config.get(path, default, scope)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 552, in __getattr__
return getattr(self.instance, name)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 548, in instance
self._instance = self.factory()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 653, in _config
_add_platform_scope(cfg, ConfigScope, name, path)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 606, in _add_platform_scope
platform = spack.architecture.platform().name
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 178, in _memoized_function
func.cache[args] = func(*args)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/architecture.py", line 516, in platform
return platform_cls()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/platforms/cray.py", line 63, in __init__
_target = self._default_target_from_env()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/platforms/cray.py", line 121, in _default_target_from_env
env={'TERM': os.environ['TERM']},
File "/usr/lib64/python2.6/UserDict.py", line 22, in __getitem__
raise KeyError(key)
KeyError: 'TERM'
|
KeyError
|
def _satisfies_target(self, other_target, strict):
self_target = self.target
need_to_check = (
bool(other_target)
if strict or self.concrete
else bool(other_target and self_target)
)
# If there's no need to check we are fine
if not need_to_check:
return True
# self is not concrete, but other_target is there and strict=True
if self.target is None:
return False
for target_range in str(other_target).split(","):
t_min, sep, t_max = target_range.partition(":")
# Checking against a single specific target
if not sep and self_target == t_min:
return True
if not sep and self_target != t_min:
return False
# Check against a range
min_ok = self_target.microarchitecture >= t_min if t_min else True
max_ok = self_target.microarchitecture <= t_max if t_max else True
if min_ok and max_ok:
return True
return False
|
def _satisfies_target(self, other_target, strict):
self_target = self.target
need_to_check = (
bool(other_target)
if strict or self.concrete
else bool(other_target and self_target)
)
# If there's no need to check we are fine
if not need_to_check:
return True
for target_range in str(other_target).split(","):
t_min, sep, t_max = target_range.partition(":")
# Checking against a single specific target
if not sep and self_target == t_min:
return True
if not sep and self_target != t_min:
return False
# Check against a range
min_ok = self_target.microarchitecture >= t_min if t_min else True
max_ok = self_target.microarchitecture <= t_max if t_max else True
if min_ok and max_ok:
return True
return False
|
https://github.com/spack/spack/issues/15306
|
Traceback (most recent call last):
File "/root/spack/bin/spack", line 64, in <module>
sys.exit(spack.main.main())
File "/root/spack/lib/spack/spack/main.py", line 763, in main
return _invoke_command(command, parser, args, unknown)
File "/root/spack/lib/spack/spack/main.py", line 488, in _invoke_command
return_val = command(parser, args)
File "/root/spack/lib/spack/spack/cmd/install.py", line 273, in install
concretized_specs = env.concretize()
File "/root/spack/lib/spack/spack/environment.py", line 948, in concretize
return self._concretize_separately()
File "/root/spack/lib/spack/spack/environment.py", line 1016, in _concretize_separately
concrete = _concretize_from_constraints(uspec_constraints)
File "/root/spack/lib/spack/spack/environment.py", line 1554, in _concretize_from_constraints
return s.concretized()
File "/root/spack/lib/spack/spack/spec.py", line 2298, in concretized
clone.concretize()
File "/root/spack/lib/spack/spack/spec.py", line 2148, in concretize
user_spec_deps=user_spec_deps),
File "/root/spack/lib/spack/spack/spec.py", line 2626, in normalize
visited, all_spec_deps, provider_index, tests)
File "/root/spack/lib/spack/spack/spec.py", line 2552, in _normalize_helper
dep = self._evaluate_dependency_conditions(dep_name)
File "/root/spack/lib/spack/spack/spec.py", line 2375, in _evaluate_dependency_conditions
if self.satisfies(when_spec, strict=True):
File "/root/spack/lib/spack/spack/spec.py", line 2879, in satisfies
if not self.architecture.satisfies(other.architecture, strict):
File "/root/spack/lib/spack/spack/spec.py", line 360, in satisfies
return self._satisfies_target(other.target, strict=strict)
File "/root/spack/lib/spack/spack/spec.py", line 383, in _satisfies_target
min_ok = self_target.microarchitecture >= t_min if t_min else True
AttributeError: 'NoneType' object has no attribute 'microarchitecture'
|
AttributeError
|
def add_default_view_to_shell(self, shell):
env_mod = spack.util.environment.EnvironmentModifications()
if default_view_name not in self.views:
# No default view to add to shell
return env_mod.shell_modifications(shell)
env_mod.extend(self.unconditional_environment_modifications(self.default_view))
for _, spec in self.concretized_specs():
if spec in self.default_view and spec.package.installed:
env_mod.extend(
self.environment_modifications_for_spec(spec, self.default_view)
)
# deduplicate paths from specs mapped to the same location
for env_var in env_mod.group_by_name():
env_mod.prune_duplicate_paths(env_var)
return env_mod.shell_modifications(shell)
|
def add_default_view_to_shell(self, shell):
env_mod = spack.util.environment.EnvironmentModifications()
if default_view_name not in self.views:
# No default view to add to shell
return env_mod.shell_modifications(shell)
env_mod.extend(self.unconditional_environment_modifications(self.default_view))
for _, spec in self.concretized_specs():
if spec in self.default_view:
env_mod.extend(
self.environment_modifications_for_spec(spec, self.default_view)
)
# deduplicate paths from specs mapped to the same location
for env_var in env_mod.group_by_name():
env_mod.prune_duplicate_paths(env_var)
return env_mod.shell_modifications(shell)
|
https://github.com/spack/spack/issues/13509
|
Traceback (most recent call last):
File "./atdm-spack/spack/bin/spack", line 64, in <module>
sys.exit(spack.main.main())
File "/home/rabartl/Spack.base4/atdm-spack/spack/lib/spack/spack/main.py", line 653, in main
ev.activate(env, args.use_env_repo)
File "/home/rabartl/Spack.base4/atdm-spack/spack/lib/spack/spack/environment.py", line 163, in activate
cmds += env.add_default_view_to_shell(shell)
File "/home/rabartl/Spack.base4/atdm-spack/spack/lib/spack/spack/environment.py", line 1074, in add_default_view_to_shell
spec, self.default_view))
File "/home/rabartl/Spack.base4/atdm-spack/spack/lib/spack/spack/environment.py", line 1055, in environment_modifications_for_spec
spec, context='run'
File "/home/rabartl/Spack.base4/atdm-spack/spack/lib/spack/spack/build_environment.py", line 759, in modifications_from_dependencies
dpkg.setup_dependent_package(pkg.module, spec)
File "/home/rabartl/Spack.base4/atdm-spack/spack/var/spack/repos/builtin/packages/python/package.py", line 729, in setup_dependent_package
module.setup_py = Executable(
File "/home/rabartl/Spack.base4/atdm-spack/spack/var/spack/repos/builtin/packages/python/package.py", line 526, in command
RuntimeError: Unable to locate python command in /home/rabartl/Spack.base4/atdm-spack/spack/opt/spack/linux-rhel6-x86_64/gcc-7.2.0/python-2.7.15-bfndwzimloeo5odtb656tkcsid4jzg42/bin
|
RuntimeError
|
def rm_default_view_from_shell(self, shell):
env_mod = spack.util.environment.EnvironmentModifications()
if default_view_name not in self.views:
# No default view to add to shell
return env_mod.shell_modifications(shell)
env_mod.extend(
self.unconditional_environment_modifications(self.default_view).reversed()
)
for _, spec in self.concretized_specs():
if spec in self.default_view and spec.package.installed:
env_mod.extend(
self.environment_modifications_for_spec(
spec, self.default_view
).reversed()
)
return env_mod.shell_modifications(shell)
|
def rm_default_view_from_shell(self, shell):
env_mod = spack.util.environment.EnvironmentModifications()
if default_view_name not in self.views:
# No default view to add to shell
return env_mod.shell_modifications(shell)
env_mod.extend(
self.unconditional_environment_modifications(self.default_view).reversed()
)
for _, spec in self.concretized_specs():
if spec in self.default_view:
env_mod.extend(
self.environment_modifications_for_spec(
spec, self.default_view
).reversed()
)
return env_mod.shell_modifications(shell)
|
https://github.com/spack/spack/issues/13509
|
Traceback (most recent call last):
File "./atdm-spack/spack/bin/spack", line 64, in <module>
sys.exit(spack.main.main())
File "/home/rabartl/Spack.base4/atdm-spack/spack/lib/spack/spack/main.py", line 653, in main
ev.activate(env, args.use_env_repo)
File "/home/rabartl/Spack.base4/atdm-spack/spack/lib/spack/spack/environment.py", line 163, in activate
cmds += env.add_default_view_to_shell(shell)
File "/home/rabartl/Spack.base4/atdm-spack/spack/lib/spack/spack/environment.py", line 1074, in add_default_view_to_shell
spec, self.default_view))
File "/home/rabartl/Spack.base4/atdm-spack/spack/lib/spack/spack/environment.py", line 1055, in environment_modifications_for_spec
spec, context='run'
File "/home/rabartl/Spack.base4/atdm-spack/spack/lib/spack/spack/build_environment.py", line 759, in modifications_from_dependencies
dpkg.setup_dependent_package(pkg.module, spec)
File "/home/rabartl/Spack.base4/atdm-spack/spack/var/spack/repos/builtin/packages/python/package.py", line 729, in setup_dependent_package
module.setup_py = Executable(
File "/home/rabartl/Spack.base4/atdm-spack/spack/var/spack/repos/builtin/packages/python/package.py", line 526, in command
RuntimeError: Unable to locate python command in /home/rabartl/Spack.base4/atdm-spack/spack/opt/spack/linux-rhel6-x86_64/gcc-7.2.0/python-2.7.15-bfndwzimloeo5odtb656tkcsid4jzg42/bin
|
RuntimeError
|
def install(self, spec, prefix):
options = [
"--with-ssl=0",
"--download-c2html=0",
"--download-sowing=0",
"--download-hwloc=0",
"CFLAGS=%s" % " ".join(spec.compiler_flags["cflags"]),
"FFLAGS=%s" % " ".join(spec.compiler_flags["fflags"]),
"CXXFLAGS=%s" % " ".join(spec.compiler_flags["cxxflags"]),
]
options.extend(self.mpi_dependent_options())
options.extend(
[
"--with-precision=%s" % ("double" if "+double" in spec else "single"),
"--with-scalar-type=%s" % ("complex" if "+complex" in spec else "real"),
"--with-shared-libraries=%s" % ("1" if "+shared" in spec else "0"),
"--with-debugging=%s" % ("1" if "+debug" in spec else "0"),
"--with-64-bit-indices=%s" % ("1" if "+int64" in spec else "0"),
]
)
if "+debug" not in spec:
options.extend(["COPTFLAGS=", "FOPTFLAGS=", "CXXOPTFLAGS="])
# Make sure we use exactly the same Blas/Lapack libraries
# across the DAG. To that end list them explicitly
lapack_blas = spec["lapack"].libs + spec["blas"].libs
options.extend(["--with-blas-lapack-lib=%s" % lapack_blas.joined()])
if "+knl" in spec:
options.append("--with-avx-512-kernels")
options.append("--with-memalign=64")
if "+X" in spec:
options.append("--with-x=1")
else:
options.append("--with-x=0")
if "trilinos" in spec:
options.append("--with-cxx-dialect=C++11")
if spec.satisfies("^trilinos+boost"):
options.append("--with-boost=1")
if self.spec.satisfies("clanguage=C++"):
options.append("--with-clanguage=C++")
else:
options.append("--with-clanguage=C")
# PETSc depends on scalapack when '+mumps+mpi~int64' (see depends())
# help PETSc pick up Scalapack from MKL
if spec.satisfies("+mumps+mpi~int64"):
scalapack = spec["scalapack"].libs
options.extend(
["--with-scalapack-lib=%s" % scalapack.joined(), "--with-scalapack=1"]
)
else:
options.extend(["--with-scalapack=0"])
# Activates library support if needed
for library in ("metis", "hdf5", "hypre", "parmetis", "mumps", "trilinos", "fftw"):
options.append(
"--with-{library}={value}".format(
library=library, value=("1" if library in spec else "0")
)
)
if library in spec:
options.append(
"--with-{library}-dir={path}".format(
library=library, path=spec[library].prefix
)
)
# PETSc does not pick up SuperluDist from the dir as they look for
# superlu_dist_4.1.a
if "superlu-dist" in spec:
if spec.satisfies("@3.10.3:"):
options.append("--with-cxx-dialect=C++11")
options.extend(
[
"--with-superlu_dist-include=%s" % spec["superlu-dist"].prefix.include,
"--with-superlu_dist-lib=%s"
% join_path(spec["superlu-dist"].prefix.lib, "libsuperlu_dist.a"),
"--with-superlu_dist=1",
]
)
else:
options.append("--with-superlu_dist=0")
# SuiteSparse: configuring using '--with-suitesparse-dir=...' has some
# issues, so specify directly the include path and the libraries.
if "+suite-sparse" in spec:
ss_spec = (
"suite-sparse:umfpack,klu,cholmod,btf,ccolamd,colamd,"
"camd,amd,suitesparseconfig"
)
options.extend(
[
"--with-suitesparse-include=%s" % spec[ss_spec].prefix.include,
"--with-suitesparse-lib=%s" % spec[ss_spec].libs.joined(),
"--with-suitesparse=1",
]
)
else:
options.append("--with-suitesparse=0")
# zlib: configuring using '--with-zlib-dir=...' has some issues with
# SuiteSparse so specify directly the include path and the libraries.
if "zlib" in spec:
options.extend(
[
"--with-zlib-include=%s" % spec["zlib"].prefix.include,
"--with-zlib-lib=%s" % spec["zlib"].libs.joined(),
"--with-zlib=1",
]
)
else:
options.append("--with-zlib=0")
python("configure", "--prefix=%s" % prefix, *options)
# PETSc has its own way of doing parallel make.
make("MAKE_NP=%s" % make_jobs, parallel=False)
make("install")
# solve Poisson equation in 2D to make sure nothing is broken:
if ("mpi" in spec) and self.run_tests:
with working_dir("src/ksp/ksp/examples/tutorials"):
env["PETSC_DIR"] = self.prefix
cc = Executable(spec["mpi"].mpicc)
cc(
"ex50.c",
"-I%s" % prefix.include,
"-L%s" % prefix.lib,
"-lpetsc",
"-lm",
"-o",
"ex50",
)
run = Executable(join_path(spec["mpi"].prefix.bin, "mpirun"))
# For Spectrum MPI, if -np is omitted, the default behavior is
# to assign one process per process slot, where the default
# process slot allocation is one per core. On systems with
# many cores, the number of processes can exceed the size of
# the grid specified when the testcase is run and the test case
# fails. Specify a small number of processes to prevent
# failure.
# For more information about Spectrum MPI invocation, see URL
# https://www.ibm.com/support/knowledgecenter/en/SSZTET_10.1.0/smpi02/smpi02_mpirun_options.html
if "spectrum-mpi" in spec:
run.add_default_arg("-np")
run.add_default_arg("4")
run("ex50", "-da_grid_x", "4", "-da_grid_y", "4")
if "superlu-dist" in spec:
run(
"ex50",
"-da_grid_x",
"4",
"-da_grid_y",
"4",
"-pc_type",
"lu",
"-pc_factor_mat_solver_package",
"superlu_dist",
)
if "mumps" in spec:
run(
"ex50",
"-da_grid_x",
"4",
"-da_grid_y",
"4",
"-pc_type",
"lu",
"-pc_factor_mat_solver_package",
"mumps",
)
if "hypre" in spec:
run(
"ex50",
"-da_grid_x",
"4",
"-da_grid_y",
"4",
"-pc_type",
"hypre",
"-pc_hypre_type",
"boomeramg",
)
|
def install(self, spec, prefix):
options = [
"--with-ssl=0",
"--download-c2html=0",
"--download-sowing=0",
"--download-hwloc=0",
"CFLAGS=%s" % " ".join(spec.compiler_flags["cflags"]),
"FFLAGS=%s" % " ".join(spec.compiler_flags["fflags"]),
"CXXFLAGS=%s" % " ".join(spec.compiler_flags["cxxflags"]),
]
options.extend(self.mpi_dependent_options())
options.extend(
[
"--with-precision=%s" % ("double" if "+double" in spec else "single"),
"--with-scalar-type=%s" % ("complex" if "+complex" in spec else "real"),
"--with-shared-libraries=%s" % ("1" if "+shared" in spec else "0"),
"--with-debugging=%s" % ("1" if "+debug" in spec else "0"),
"--with-64-bit-indices=%s" % ("1" if "+int64" in spec else "0"),
]
)
if "+debug" not in spec:
options.extend(["COPTFLAGS=", "FOPTFLAGS=", "CXXOPTFLAGS="])
# Make sure we use exactly the same Blas/Lapack libraries
# across the DAG. To that end list them explicitly
lapack_blas = spec["lapack"].libs + spec["blas"].libs
options.extend(["--with-blas-lapack-lib=%s" % lapack_blas.joined()])
if "+knl" in spec:
options.append("--with-avx-512-kernels")
options.append("--with-memalign=64")
if "+X" in spec:
options.append("--with-x=1")
else:
options.append("--with-x=0")
if "trilinos" in spec:
options.append("--with-cxx-dialect=C++11")
if spec.satisfies("^trilinos+boost"):
options.append("--with-boost=1")
if self.spec.satisfies("clanguage=C++"):
options.append("--with-clanguage=C++")
else:
options.append("--with-clanguage=C")
# PETSc depends on scalapack when '+mumps+mpi~int64' (see depends())
# help PETSc pick up Scalapack from MKL
if spec.satisfies("+mumps+mpi~int64"):
scalapack = spec["scalapack"].libs
options.extend(
["--with-scalapack-lib=%s" % scalapack.joined(), "--with-scalapack=1"]
)
else:
options.extend(["--with-scalapack=0"])
# Activates library support if needed
for library in ("metis", "hdf5", "hypre", "parmetis", "mumps", "trilinos", "fftw"):
options.append(
"--with-{library}={value}".format(
library=library, value=("1" if library in spec else "0")
)
)
if library in spec:
options.append(
"--with-{library}-dir={path}".format(
library=library, path=spec[library].prefix
)
)
# PETSc does not pick up SuperluDist from the dir as they look for
# superlu_dist_4.1.a
if "superlu-dist" in spec:
if spec.satisfies("@3.10.3:"):
options.append("--with-cxx-dialect=C++11")
options.extend(
[
"--with-superlu_dist-include=%s" % spec["superlu-dist"].prefix.include,
"--with-superlu_dist-lib=%s"
% join_path(spec["superlu-dist"].prefix.lib, "libsuperlu_dist.a"),
"--with-superlu_dist=1",
]
)
else:
options.append("--with-superlu_dist=0")
# SuiteSparse: configuring using '--with-suitesparse-dir=...' has some
# issues, so specify directly the include path and the libraries.
if "+suite-sparse" in spec:
ss_spec = (
"suite-sparse:umfpack,klu,cholmod,btf,ccolamd,colamd,"
"camd,amd,suitesparseconfig"
)
options.extend(
[
"--with-suitesparse-include=%s" % spec[ss_spec].prefix.include,
"--with-suitesparse-lib=%s" % spec[ss_spec].libs.joined(),
"--with-suitesparse=1",
]
)
else:
options.append("--with-suitesparse=0")
# zlib: configuring using '--with-zlib-dir=...' has some issues with
# SuiteSparse so specify directly the include path and the libraries.
if "zlib" in spec:
options.extend(
[
"--with-zlib-include=%s" % spec["zlib"].prefix.include,
"--with-zlib-lib=%s" % spec["zlib"].libs.ld_flags,
"--with-zlib=1",
]
)
else:
options.append("--with-zlib=0")
python("configure", "--prefix=%s" % prefix, *options)
# PETSc has its own way of doing parallel make.
make("MAKE_NP=%s" % make_jobs, parallel=False)
make("install")
# solve Poisson equation in 2D to make sure nothing is broken:
if ("mpi" in spec) and self.run_tests:
with working_dir("src/ksp/ksp/examples/tutorials"):
env["PETSC_DIR"] = self.prefix
cc = Executable(spec["mpi"].mpicc)
cc(
"ex50.c",
"-I%s" % prefix.include,
"-L%s" % prefix.lib,
"-lpetsc",
"-lm",
"-o",
"ex50",
)
run = Executable(join_path(spec["mpi"].prefix.bin, "mpirun"))
# For Spectrum MPI, if -np is omitted, the default behavior is
# to assign one process per process slot, where the default
# process slot allocation is one per core. On systems with
# many cores, the number of processes can exceed the size of
# the grid specified when the testcase is run and the test case
# fails. Specify a small number of processes to prevent
# failure.
# For more information about Spectrum MPI invocation, see URL
# https://www.ibm.com/support/knowledgecenter/en/SSZTET_10.1.0/smpi02/smpi02_mpirun_options.html
if "spectrum-mpi" in spec:
run.add_default_arg("-np")
run.add_default_arg("4")
run("ex50", "-da_grid_x", "4", "-da_grid_y", "4")
if "superlu-dist" in spec:
run(
"ex50",
"-da_grid_x",
"4",
"-da_grid_y",
"4",
"-pc_type",
"lu",
"-pc_factor_mat_solver_package",
"superlu_dist",
)
if "mumps" in spec:
run(
"ex50",
"-da_grid_x",
"4",
"-da_grid_y",
"4",
"-pc_type",
"lu",
"-pc_factor_mat_solver_package",
"mumps",
)
if "hypre" in spec:
run(
"ex50",
"-da_grid_x",
"4",
"-da_grid_y",
"4",
"-pc_type",
"hypre",
"-pc_hypre_type",
"boomeramg",
)
|
https://github.com/spack/spack/issues/10585
|
hron@r0d0$ python
Python 3.6.5 (default, Feb 9 2019, 12:35:34)
[GCC 7.4.0] on linux
Type "help", "copyright", "credits" or "license" for more information.
import sys
import petsc4py
petsc4py.init(sys.argv)
import matplotlib.pyplot as plt
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/pkg/Installs/linux-ubuntu16.04-x86_64-gcc7.4.0/py-matplotlib/3.0.0/56bf2zhzyqlj2lpa/lib/python3.6/site-packages/matplotlib/pyplot.py", line 32, in <module>
import matplotlib.colorbar
File "/usr/local/pkg/Installs/linux-ubuntu16.04-x86_64-gcc7.4.0/py-matplotlib/3.0.0/56bf2zhzyqlj2lpa/lib/python3.6/site-packages/matplotlib/colorbar.py", line 32, in <module>
import matplotlib.contour as contour
File "/usr/local/pkg/Installs/linux-ubuntu16.04-x86_64-gcc7.4.0/py-matplotlib/3.0.0/56bf2zhzyqlj2lpa/lib/python3.6/site-packages/matplotlib/contour.py", line 18, in <module>
import matplotlib.font_manager as font_manager
File "/usr/local/pkg/Installs/linux-ubuntu16.04-x86_64-gcc7.4.0/py-matplotlib/3.0.0/56bf2zhzyqlj2lpa/lib/python3.6/site-packages/matplotlib/font_manager.py", line 48, in <module>
from matplotlib import afm, cbook, ft2font, rcParams, get_cachedir
ImportError: /lib/x86_64-linux-gnu/libz.so.1: version `ZLIB_1.2.9' not found (required by /usr/local/pkg/Installs/linux-ubuntu16.04-x86_64-gcc7.4.0/libpng/1.6.34/uzwwmyb42qhkd7jr/lib/libpng16.so.16)
|
ImportError
|
def __init__(self, name, *args, **kwargs):
# If the user provided `--name py-numpy`, don't rename it py-py-numpy
if not name.startswith("py-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to py-{0}".format(name))
name = "py-{0}".format(name)
super(PythonPackageTemplate, self).__init__(name, *args, **kwargs)
|
def __init__(self, name, *args):
# If the user provided `--name py-numpy`, don't rename it py-py-numpy
if not name.startswith("py-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to py-{0}".format(name))
name = "py-{0}".format(name)
super(PythonPackageTemplate, self).__init__(name, *args)
|
https://github.com/spack/spack/issues/12529
|
$ spack -d create https://pypi.io/packages/source/P/PyJWT/PyJWT-1.7.1.tar.gz
==> [2019-08-22-15:47:19.578845] Imported create from built-in commands
==> [2019-08-22-15:47:19.581250] Imported create from built-in commands
==> [2019-08-22-15:47:19.779743] Reading config file /disk/home/sinan/Downloads/spack/etc/spack/defaults/modules.yaml
==> [2019-08-22-15:47:19.797824] Reading config file /disk/home/sinan/Downloads/spack/etc/spack/defaults/linux/modules.yaml
==> [2019-08-22-15:47:19.802880] Reading config file /disk/home/sinan/Downloads/spack/etc/spack/defaults/config.yaml
==> [2019-08-22-15:47:19.841457] This looks like a URL for PyJWT
==> [2019-08-22-15:47:19.842703] Warning: Spack will not check SSL certificates. You need to update your Python to enable certificate verification.
==> [2019-08-22-15:47:19.915762] HTTPError: HTTP Error 404: Not Found
==> [2019-08-22-15:47:19.917359] Warning: Spack will not check SSL certificates. You need to update your Python to enable certificate verification.
==> [2019-08-22-15:47:20.027930] HTTPError: HTTP Error 404: Not Found
==> [2019-08-22-15:47:20.043323] Found 1 version of pyjwt:
1.7.1 https://pypi.io/packages/source/P/PyJWT/PyJWT-1.7.1.tar.gz
==> [2019-08-22-15:47:20.043999] How many would you like to checksum? (default is 1, q to abort)
==> [2019-08-22-15:47:22.017241] Downloading...
==> [2019-08-22-15:47:22.021265] WRITE LOCK: /cache/spack-stage/sinan/.lock[2324237983255820602:1] [Acquiring]
==> [2019-08-22-15:47:22.021765] WRITE LOCK: /cache/spack-stage/sinan/.lock[2324237983255820602:1] [Acquired]
==> [2019-08-22-15:47:22.022706] Fetching https://pypi.io/packages/source/P/PyJWT/PyJWT-1.7.1.tar.gz
==> [2019-08-22-15:47:22.025637] '/usr/bin/curl' '-C' '-' '-o' '/cache/spack-stage/sinan/spack-stage-1RRl5T/PyJWT-1.7.1.tar.gz.part' '-f' '-D' '-' '-L' 'https://pypi.io/packages/source/P/PyJWT/PyJWT-1.7.1.tar.gz' '-#'
######################################################################## 100.0%
==> [2019-08-22-15:47:22.314185] '/usr/bin/tar' '--exclude=*/*/*' '-tf' '/cache/spack-stage/sinan/spack-stage-1RRl5T/PyJWT-1.7.1.tar.gz'
==> [2019-08-22-15:47:22.320293] WRITE LOCK: /cache/spack-stage/sinan/.lock[2324237983255820602:1] [Released]
==> [2019-08-22-15:47:22.320423] Checksummed 1 version of pyjwt
==> [2019-08-22-15:47:22.320472] This package looks like it uses the python build system
Traceback (most recent call last):
File "/disk/home/sinan/Downloads/spack/bin/spack", line 48, in <module>
sys.exit(spack.main.main())
File "/disk/home/sinan/Downloads/spack/lib/spack/spack/main.py", line 697, in main
return _invoke_command(command, parser, args, unknown)
File "/disk/home/sinan/Downloads/spack/lib/spack/spack/main.py", line 447, in _invoke_command
return_val = command(parser, args)
File "/disk/home/sinan/Downloads/spack/lib/spack/spack/cmd/create.py", line 719, in create
package = package_class(**constr_args)
TypeError: __init__() got an unexpected keyword argument 'url'
|
TypeError
|
def __init__(self, name, *args, **kwargs):
# If the user provided `--name r-rcpp`, don't rename it r-r-rcpp
if not name.startswith("r-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to r-{0}".format(name))
name = "r-{0}".format(name)
super(RPackageTemplate, self).__init__(name, *args, **kwargs)
|
def __init__(self, name, *args):
# If the user provided `--name r-rcpp`, don't rename it r-r-rcpp
if not name.startswith("r-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to r-{0}".format(name))
name = "r-{0}".format(name)
super(RPackageTemplate, self).__init__(name, *args)
|
https://github.com/spack/spack/issues/12529
|
$ spack -d create https://pypi.io/packages/source/P/PyJWT/PyJWT-1.7.1.tar.gz
==> [2019-08-22-15:47:19.578845] Imported create from built-in commands
==> [2019-08-22-15:47:19.581250] Imported create from built-in commands
==> [2019-08-22-15:47:19.779743] Reading config file /disk/home/sinan/Downloads/spack/etc/spack/defaults/modules.yaml
==> [2019-08-22-15:47:19.797824] Reading config file /disk/home/sinan/Downloads/spack/etc/spack/defaults/linux/modules.yaml
==> [2019-08-22-15:47:19.802880] Reading config file /disk/home/sinan/Downloads/spack/etc/spack/defaults/config.yaml
==> [2019-08-22-15:47:19.841457] This looks like a URL for PyJWT
==> [2019-08-22-15:47:19.842703] Warning: Spack will not check SSL certificates. You need to update your Python to enable certificate verification.
==> [2019-08-22-15:47:19.915762] HTTPError: HTTP Error 404: Not Found
==> [2019-08-22-15:47:19.917359] Warning: Spack will not check SSL certificates. You need to update your Python to enable certificate verification.
==> [2019-08-22-15:47:20.027930] HTTPError: HTTP Error 404: Not Found
==> [2019-08-22-15:47:20.043323] Found 1 version of pyjwt:
1.7.1 https://pypi.io/packages/source/P/PyJWT/PyJWT-1.7.1.tar.gz
==> [2019-08-22-15:47:20.043999] How many would you like to checksum? (default is 1, q to abort)
==> [2019-08-22-15:47:22.017241] Downloading...
==> [2019-08-22-15:47:22.021265] WRITE LOCK: /cache/spack-stage/sinan/.lock[2324237983255820602:1] [Acquiring]
==> [2019-08-22-15:47:22.021765] WRITE LOCK: /cache/spack-stage/sinan/.lock[2324237983255820602:1] [Acquired]
==> [2019-08-22-15:47:22.022706] Fetching https://pypi.io/packages/source/P/PyJWT/PyJWT-1.7.1.tar.gz
==> [2019-08-22-15:47:22.025637] '/usr/bin/curl' '-C' '-' '-o' '/cache/spack-stage/sinan/spack-stage-1RRl5T/PyJWT-1.7.1.tar.gz.part' '-f' '-D' '-' '-L' 'https://pypi.io/packages/source/P/PyJWT/PyJWT-1.7.1.tar.gz' '-#'
######################################################################## 100.0%
==> [2019-08-22-15:47:22.314185] '/usr/bin/tar' '--exclude=*/*/*' '-tf' '/cache/spack-stage/sinan/spack-stage-1RRl5T/PyJWT-1.7.1.tar.gz'
==> [2019-08-22-15:47:22.320293] WRITE LOCK: /cache/spack-stage/sinan/.lock[2324237983255820602:1] [Released]
==> [2019-08-22-15:47:22.320423] Checksummed 1 version of pyjwt
==> [2019-08-22-15:47:22.320472] This package looks like it uses the python build system
Traceback (most recent call last):
File "/disk/home/sinan/Downloads/spack/bin/spack", line 48, in <module>
sys.exit(spack.main.main())
File "/disk/home/sinan/Downloads/spack/lib/spack/spack/main.py", line 697, in main
return _invoke_command(command, parser, args, unknown)
File "/disk/home/sinan/Downloads/spack/lib/spack/spack/main.py", line 447, in _invoke_command
return_val = command(parser, args)
File "/disk/home/sinan/Downloads/spack/lib/spack/spack/cmd/create.py", line 719, in create
package = package_class(**constr_args)
TypeError: __init__() got an unexpected keyword argument 'url'
|
TypeError
|
def __init__(self, name, *args, **kwargs):
# If the user provided `--name perl-cpp`, don't rename it perl-perl-cpp
if not name.startswith("perl-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to perl-{0}".format(name))
name = "perl-{0}".format(name)
super(PerlmakePackageTemplate, self).__init__(name, *args, **kwargs)
|
def __init__(self, name, *args):
# If the user provided `--name perl-cpp`, don't rename it perl-perl-cpp
if not name.startswith("perl-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to perl-{0}".format(name))
name = "perl-{0}".format(name)
super(PerlmakePackageTemplate, self).__init__(name, *args)
|
https://github.com/spack/spack/issues/12529
|
$ spack -d create https://pypi.io/packages/source/P/PyJWT/PyJWT-1.7.1.tar.gz
==> [2019-08-22-15:47:19.578845] Imported create from built-in commands
==> [2019-08-22-15:47:19.581250] Imported create from built-in commands
==> [2019-08-22-15:47:19.779743] Reading config file /disk/home/sinan/Downloads/spack/etc/spack/defaults/modules.yaml
==> [2019-08-22-15:47:19.797824] Reading config file /disk/home/sinan/Downloads/spack/etc/spack/defaults/linux/modules.yaml
==> [2019-08-22-15:47:19.802880] Reading config file /disk/home/sinan/Downloads/spack/etc/spack/defaults/config.yaml
==> [2019-08-22-15:47:19.841457] This looks like a URL for PyJWT
==> [2019-08-22-15:47:19.842703] Warning: Spack will not check SSL certificates. You need to update your Python to enable certificate verification.
==> [2019-08-22-15:47:19.915762] HTTPError: HTTP Error 404: Not Found
==> [2019-08-22-15:47:19.917359] Warning: Spack will not check SSL certificates. You need to update your Python to enable certificate verification.
==> [2019-08-22-15:47:20.027930] HTTPError: HTTP Error 404: Not Found
==> [2019-08-22-15:47:20.043323] Found 1 version of pyjwt:
1.7.1 https://pypi.io/packages/source/P/PyJWT/PyJWT-1.7.1.tar.gz
==> [2019-08-22-15:47:20.043999] How many would you like to checksum? (default is 1, q to abort)
==> [2019-08-22-15:47:22.017241] Downloading...
==> [2019-08-22-15:47:22.021265] WRITE LOCK: /cache/spack-stage/sinan/.lock[2324237983255820602:1] [Acquiring]
==> [2019-08-22-15:47:22.021765] WRITE LOCK: /cache/spack-stage/sinan/.lock[2324237983255820602:1] [Acquired]
==> [2019-08-22-15:47:22.022706] Fetching https://pypi.io/packages/source/P/PyJWT/PyJWT-1.7.1.tar.gz
==> [2019-08-22-15:47:22.025637] '/usr/bin/curl' '-C' '-' '-o' '/cache/spack-stage/sinan/spack-stage-1RRl5T/PyJWT-1.7.1.tar.gz.part' '-f' '-D' '-' '-L' 'https://pypi.io/packages/source/P/PyJWT/PyJWT-1.7.1.tar.gz' '-#'
######################################################################## 100.0%
==> [2019-08-22-15:47:22.314185] '/usr/bin/tar' '--exclude=*/*/*' '-tf' '/cache/spack-stage/sinan/spack-stage-1RRl5T/PyJWT-1.7.1.tar.gz'
==> [2019-08-22-15:47:22.320293] WRITE LOCK: /cache/spack-stage/sinan/.lock[2324237983255820602:1] [Released]
==> [2019-08-22-15:47:22.320423] Checksummed 1 version of pyjwt
==> [2019-08-22-15:47:22.320472] This package looks like it uses the python build system
Traceback (most recent call last):
File "/disk/home/sinan/Downloads/spack/bin/spack", line 48, in <module>
sys.exit(spack.main.main())
File "/disk/home/sinan/Downloads/spack/lib/spack/spack/main.py", line 697, in main
return _invoke_command(command, parser, args, unknown)
File "/disk/home/sinan/Downloads/spack/lib/spack/spack/main.py", line 447, in _invoke_command
return_val = command(parser, args)
File "/disk/home/sinan/Downloads/spack/lib/spack/spack/cmd/create.py", line 719, in create
package = package_class(**constr_args)
TypeError: __init__() got an unexpected keyword argument 'url'
|
TypeError
|
def __init__(self, name, *args, **kwargs):
# If the user provided `--name octave-splines`, don't rename it
# octave-octave-splines
if not name.startswith("octave-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to octave-{0}".format(name)) # noqa
name = "octave-{0}".format(name)
super(OctavePackageTemplate, self).__init__(name, *args, **kwargs)
|
def __init__(self, name, *args):
# If the user provided `--name octave-splines`, don't rename it
# octave-octave-splines
if not name.startswith("octave-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to octave-{0}".format(name)) # noqa
name = "octave-{0}".format(name)
super(OctavePackageTemplate, self).__init__(name, *args)
|
https://github.com/spack/spack/issues/12529
|
$ spack -d create https://pypi.io/packages/source/P/PyJWT/PyJWT-1.7.1.tar.gz
==> [2019-08-22-15:47:19.578845] Imported create from built-in commands
==> [2019-08-22-15:47:19.581250] Imported create from built-in commands
==> [2019-08-22-15:47:19.779743] Reading config file /disk/home/sinan/Downloads/spack/etc/spack/defaults/modules.yaml
==> [2019-08-22-15:47:19.797824] Reading config file /disk/home/sinan/Downloads/spack/etc/spack/defaults/linux/modules.yaml
==> [2019-08-22-15:47:19.802880] Reading config file /disk/home/sinan/Downloads/spack/etc/spack/defaults/config.yaml
==> [2019-08-22-15:47:19.841457] This looks like a URL for PyJWT
==> [2019-08-22-15:47:19.842703] Warning: Spack will not check SSL certificates. You need to update your Python to enable certificate verification.
==> [2019-08-22-15:47:19.915762] HTTPError: HTTP Error 404: Not Found
==> [2019-08-22-15:47:19.917359] Warning: Spack will not check SSL certificates. You need to update your Python to enable certificate verification.
==> [2019-08-22-15:47:20.027930] HTTPError: HTTP Error 404: Not Found
==> [2019-08-22-15:47:20.043323] Found 1 version of pyjwt:
1.7.1 https://pypi.io/packages/source/P/PyJWT/PyJWT-1.7.1.tar.gz
==> [2019-08-22-15:47:20.043999] How many would you like to checksum? (default is 1, q to abort)
==> [2019-08-22-15:47:22.017241] Downloading...
==> [2019-08-22-15:47:22.021265] WRITE LOCK: /cache/spack-stage/sinan/.lock[2324237983255820602:1] [Acquiring]
==> [2019-08-22-15:47:22.021765] WRITE LOCK: /cache/spack-stage/sinan/.lock[2324237983255820602:1] [Acquired]
==> [2019-08-22-15:47:22.022706] Fetching https://pypi.io/packages/source/P/PyJWT/PyJWT-1.7.1.tar.gz
==> [2019-08-22-15:47:22.025637] '/usr/bin/curl' '-C' '-' '-o' '/cache/spack-stage/sinan/spack-stage-1RRl5T/PyJWT-1.7.1.tar.gz.part' '-f' '-D' '-' '-L' 'https://pypi.io/packages/source/P/PyJWT/PyJWT-1.7.1.tar.gz' '-#'
######################################################################## 100.0%
==> [2019-08-22-15:47:22.314185] '/usr/bin/tar' '--exclude=*/*/*' '-tf' '/cache/spack-stage/sinan/spack-stage-1RRl5T/PyJWT-1.7.1.tar.gz'
==> [2019-08-22-15:47:22.320293] WRITE LOCK: /cache/spack-stage/sinan/.lock[2324237983255820602:1] [Released]
==> [2019-08-22-15:47:22.320423] Checksummed 1 version of pyjwt
==> [2019-08-22-15:47:22.320472] This package looks like it uses the python build system
Traceback (most recent call last):
File "/disk/home/sinan/Downloads/spack/bin/spack", line 48, in <module>
sys.exit(spack.main.main())
File "/disk/home/sinan/Downloads/spack/lib/spack/spack/main.py", line 697, in main
return _invoke_command(command, parser, args, unknown)
File "/disk/home/sinan/Downloads/spack/lib/spack/spack/main.py", line 447, in _invoke_command
return_val = command(parser, args)
File "/disk/home/sinan/Downloads/spack/lib/spack/spack/cmd/create.py", line 719, in create
package = package_class(**constr_args)
TypeError: __init__() got an unexpected keyword argument 'url'
|
TypeError
|
def __init__(self, name, *args, **kwargs):
# If the user provided `--name py-pyqt4`, don't rename it py-py-pyqt4
if not name.startswith("py-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to py-{0}".format(name))
name = "py-{0}".format(name)
super(SIPPackageTemplate, self).__init__(name, *args, **kwargs)
|
def __init__(self, name, *args):
# If the user provided `--name py-pyqt4`, don't rename it py-py-pyqt4
if not name.startswith("py-"):
# Make it more obvious that we are renaming the package
tty.msg("Changing package name from {0} to py-{0}".format(name))
name = "py-{0}".format(name)
super(SIPPackageTemplate, self).__init__(name, *args)
|
https://github.com/spack/spack/issues/12529
|
$ spack -d create https://pypi.io/packages/source/P/PyJWT/PyJWT-1.7.1.tar.gz
==> [2019-08-22-15:47:19.578845] Imported create from built-in commands
==> [2019-08-22-15:47:19.581250] Imported create from built-in commands
==> [2019-08-22-15:47:19.779743] Reading config file /disk/home/sinan/Downloads/spack/etc/spack/defaults/modules.yaml
==> [2019-08-22-15:47:19.797824] Reading config file /disk/home/sinan/Downloads/spack/etc/spack/defaults/linux/modules.yaml
==> [2019-08-22-15:47:19.802880] Reading config file /disk/home/sinan/Downloads/spack/etc/spack/defaults/config.yaml
==> [2019-08-22-15:47:19.841457] This looks like a URL for PyJWT
==> [2019-08-22-15:47:19.842703] Warning: Spack will not check SSL certificates. You need to update your Python to enable certificate verification.
==> [2019-08-22-15:47:19.915762] HTTPError: HTTP Error 404: Not Found
==> [2019-08-22-15:47:19.917359] Warning: Spack will not check SSL certificates. You need to update your Python to enable certificate verification.
==> [2019-08-22-15:47:20.027930] HTTPError: HTTP Error 404: Not Found
==> [2019-08-22-15:47:20.043323] Found 1 version of pyjwt:
1.7.1 https://pypi.io/packages/source/P/PyJWT/PyJWT-1.7.1.tar.gz
==> [2019-08-22-15:47:20.043999] How many would you like to checksum? (default is 1, q to abort)
==> [2019-08-22-15:47:22.017241] Downloading...
==> [2019-08-22-15:47:22.021265] WRITE LOCK: /cache/spack-stage/sinan/.lock[2324237983255820602:1] [Acquiring]
==> [2019-08-22-15:47:22.021765] WRITE LOCK: /cache/spack-stage/sinan/.lock[2324237983255820602:1] [Acquired]
==> [2019-08-22-15:47:22.022706] Fetching https://pypi.io/packages/source/P/PyJWT/PyJWT-1.7.1.tar.gz
==> [2019-08-22-15:47:22.025637] '/usr/bin/curl' '-C' '-' '-o' '/cache/spack-stage/sinan/spack-stage-1RRl5T/PyJWT-1.7.1.tar.gz.part' '-f' '-D' '-' '-L' 'https://pypi.io/packages/source/P/PyJWT/PyJWT-1.7.1.tar.gz' '-#'
######################################################################## 100.0%
==> [2019-08-22-15:47:22.314185] '/usr/bin/tar' '--exclude=*/*/*' '-tf' '/cache/spack-stage/sinan/spack-stage-1RRl5T/PyJWT-1.7.1.tar.gz'
==> [2019-08-22-15:47:22.320293] WRITE LOCK: /cache/spack-stage/sinan/.lock[2324237983255820602:1] [Released]
==> [2019-08-22-15:47:22.320423] Checksummed 1 version of pyjwt
==> [2019-08-22-15:47:22.320472] This package looks like it uses the python build system
Traceback (most recent call last):
File "/disk/home/sinan/Downloads/spack/bin/spack", line 48, in <module>
sys.exit(spack.main.main())
File "/disk/home/sinan/Downloads/spack/lib/spack/spack/main.py", line 697, in main
return _invoke_command(command, parser, args, unknown)
File "/disk/home/sinan/Downloads/spack/lib/spack/spack/main.py", line 447, in _invoke_command
return_val = command(parser, args)
File "/disk/home/sinan/Downloads/spack/lib/spack/spack/cmd/create.py", line 719, in create
package = package_class(**constr_args)
TypeError: __init__() got an unexpected keyword argument 'url'
|
TypeError
|
def read_cle_release_file():
"""Read the CLE release file and return a dict with its attributes.
This file is present on newer versions of Cray.
The release file looks something like this::
RELEASE=6.0.UP07
BUILD=6.0.7424
...
The dictionary we produce looks like this::
{
"RELEASE": "6.0.UP07",
"BUILD": "6.0.7424",
...
}
Returns:
dict: dictionary of release attributes
"""
with open(_cle_release_file) as release_file:
result = {}
for line in release_file:
# use partition instead of split() to ensure we only split on
# the first '=' in the line.
key, _, value = line.partition("=")
result[key] = value.strip()
return result
|
def read_cle_release_file():
"""Read the CLE release file and return a dict with its attributes.
The release file looks something like this::
RELEASE=6.0.UP07
BUILD=6.0.7424
...
The dictionary we produce looks like this::
{
"RELEASE": "6.0.UP07",
"BUILD": "6.0.7424",
...
}
"""
with open(_cle_release_file) as release_file:
result = {}
for line in release_file:
# use partition instead of split() to ensure we only split on
# the first '=' in the line.
key, _, value = line.partition("=")
result[key] = value.strip()
return result
|
https://github.com/spack/spack/issues/12187
|
Traceback (most recent call last):
File "/u/sciteam/stewart1/spack/bin/spack", line 48, in <module>
sys.exit(spack.main.main())
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/main.py", line 704, in main
if spack.config.get('config:debug'):
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 627, in get
return config.get(path, default, scope)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 558, in __getattr__
return getattr(self.instance, name)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 554, in instance
self._instance = self.factory()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 609, in _config
_add_platform_scope(cfg, ConfigScope, name, path)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 562, in _add_platform_scope
platform = spack.architecture.platform().name
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 184, in _memoized_function
func.cache[args] = func(*args)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/architecture.py", line 388, in platform
return platform_cls()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/platforms/cray.py", line 76, in __init__
back_distro = Cnl()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/operating_systems/cnl.py", line 57, in __init__
version = self._detect_crayos_version()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/operating_systems/cnl.py", line 66, in _detect_crayos_version
release_attrs = read_cle_release_file()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/operating_systems/cnl.py", line 37, in read_cle_release_file
with open(_cle_release_file) as release_file:
IOError: [Errno 2] No such file or directory: '/etc/opt/cray/release/cle-release'
|
IOError
|
def _detect_crayos_version(cls):
if os.path.isfile(_cle_release_file):
release_attrs = read_cle_release_file()
v = spack.version.Version(release_attrs["RELEASE"])
return v[0]
elif os.path.isfile(_clerelease_file):
v = read_clerelease_file()
return spack.version.Version(v)[0]
else:
raise spack.error.UnsupportedPlatformError("Unable to detect Cray OS version")
|
def _detect_crayos_version(cls):
release_attrs = read_cle_release_file()
v = spack.version.Version(release_attrs["RELEASE"])
return v[0]
|
https://github.com/spack/spack/issues/12187
|
Traceback (most recent call last):
File "/u/sciteam/stewart1/spack/bin/spack", line 48, in <module>
sys.exit(spack.main.main())
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/main.py", line 704, in main
if spack.config.get('config:debug'):
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 627, in get
return config.get(path, default, scope)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 558, in __getattr__
return getattr(self.instance, name)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 554, in instance
self._instance = self.factory()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 609, in _config
_add_platform_scope(cfg, ConfigScope, name, path)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/config.py", line 562, in _add_platform_scope
platform = spack.architecture.platform().name
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/llnl/util/lang.py", line 184, in _memoized_function
func.cache[args] = func(*args)
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/architecture.py", line 388, in platform
return platform_cls()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/platforms/cray.py", line 76, in __init__
back_distro = Cnl()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/operating_systems/cnl.py", line 57, in __init__
version = self._detect_crayos_version()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/operating_systems/cnl.py", line 66, in _detect_crayos_version
release_attrs = read_cle_release_file()
File "/mnt/a/u/sciteam/stewart1/spack/lib/spack/spack/operating_systems/cnl.py", line 37, in read_cle_release_file
with open(_cle_release_file) as release_file:
IOError: [Errno 2] No such file or directory: '/etc/opt/cray/release/cle-release'
|
IOError
|
def setup_dependent_package(self, module, dependent_spec):
"""Called before perl modules' install() methods.
In most cases, extensions will only need to have one line:
perl('Makefile.PL','INSTALL_BASE=%s' % self.prefix)
"""
# If system perl is used through packages.yaml
# there cannot be extensions.
if dependent_spec.package.is_extension:
# perl extension builds can have a global perl
# executable function
module.perl = self.spec["perl"].command
# Add variables for library directory
module.perl_lib_dir = dependent_spec.prefix.lib.perl5
# Make the site packages directory for extensions,
# if it does not exist already.
mkdirp(module.perl_lib_dir)
|
def setup_dependent_package(self, module, dependent_spec):
"""Called before perl modules' install() methods.
In most cases, extensions will only need to have one line:
perl('Makefile.PL','INSTALL_BASE=%s' % self.prefix)
"""
# perl extension builds can have a global perl executable function
module.perl = self.spec["perl"].command
# Add variables for library directory
module.perl_lib_dir = dependent_spec.prefix.lib.perl5
# Make the site packages directory for extensions,
# if it does not exist already.
if dependent_spec.package.is_extension:
mkdirp(module.perl_lib_dir)
|
https://github.com/spack/spack/issues/11033
|
==> [2019-03-28-09:45:40.882685] Error: RuntimeError: Unable to locate perl command in None/bin
/scratch/gartung/work/spack/var/spack/repos/builtin/packages/perl/package.py:194, in setup_dependent_package:
191 """
192
193 # perl extension builds can have a global perl executable function
>> 194 module.perl = self.spec['perl'].command
195
196 # Add variables for library directory
197 module.perl_lib_dir = dependent_spec.prefix.lib.perl5
Traceback (most recent call last):
File "/scratch/gartung/work/spack/lib/spack/spack/build_environment.py", line 766, in child_process
setup_package(pkg, dirty=dirty)
File "/scratch/gartung/work/spack/lib/spack/spack/build_environment.py", line 687, in setup_package
dpkg.setup_dependent_package(pkg.module, spec)
File "/scratch/gartung/work/spack/var/spack/repos/builtin/packages/perl/package.py", line 195, in setup_dependent_package
File "/scratch/gartung/work/spack/lib/spack/spack/spec.py", line 810, in __get__
value = f()
File "/scratch/gartung/work/spack/lib/spack/spack/spec.py", line 802, in <lambda>
callbacks_chain.append(lambda: self.default(self, instance, cls))
File "/scratch/gartung/work/spack/lib/spack/spack/spec.py", line 654, in _command_default_handler
raise RuntimeError(msg.format(spec.name, spec.prefix.bin))
RuntimeError: Unable to locate perl command in None/bin
|
RuntimeError
|
def _read_from_file(self, stream, format="json"):
"""
Fill database from file, do not maintain old data
Translate the spec portions from node-dict form to spec form
Does not do any locking.
"""
if format.lower() == "json":
load = sjson.load
elif format.lower() == "yaml":
load = syaml.load
else:
raise ValueError("Invalid database format: %s" % format)
try:
if isinstance(stream, basestring):
with open(stream, "r") as f:
fdata = load(f)
else:
fdata = load(stream)
except MarkedYAMLError as e:
raise syaml.SpackYAMLError("error parsing YAML database:", str(e))
except Exception as e:
raise CorruptDatabaseError("error parsing database:", str(e))
if fdata is None:
return
def check(cond, msg):
if not cond:
raise CorruptDatabaseError(
"Spack database is corrupt: %s" % msg, self._index_path
)
check("database" in fdata, "No 'database' attribute in YAML.")
# High-level file checks
db = fdata["database"]
check("installs" in db, "No 'installs' in YAML DB.")
check("version" in db, "No 'version' in YAML DB.")
installs = db["installs"]
# TODO: better version checking semantics.
version = Version(db["version"])
if version > _db_version:
raise InvalidDatabaseVersionError(_db_version, version)
elif version < _db_version:
self.reindex(spack.store.layout)
installs = dict((k, v.to_dict()) for k, v in self._data.items())
def invalid_record(hash_key, error):
msg = "Invalid record in Spack database: hash: %s, cause: %s: %s"
msg %= (hash_key, type(e).__name__, str(e))
raise CorruptDatabaseError(msg, self._index_path)
# Build up the database in three passes:
#
# 1. Read in all specs without dependencies.
# 2. Hook dependencies up among specs.
# 3. Mark all specs concrete.
#
# The database is built up so that ALL specs in it share nodes
# (i.e., its specs are a true Merkle DAG, unlike most specs.)
# Pass 1: Iterate through database and build specs w/o dependencies
data = {}
for hash_key, rec in installs.items():
try:
# This constructs a spec DAG from the list of all installs
spec = self._read_spec_from_dict(hash_key, installs)
# Insert the brand new spec in the database. Each
# spec has its own copies of its dependency specs.
# TODO: would a more immmutable spec implementation simplify
# this?
data[hash_key] = InstallRecord.from_dict(spec, rec)
except Exception as e:
invalid_record(hash_key, e)
# Pass 2: Assign dependencies once all specs are created.
for hash_key in data:
try:
self._assign_dependencies(hash_key, installs, data)
except Exception as e:
invalid_record(hash_key, e)
# Pass 3: Mark all specs concrete. Specs representing real
# installations must be explicitly marked.
# We do this *after* all dependencies are connected because if we
# do it *while* we're constructing specs,it causes hashes to be
# cached prematurely.
for hash_key, rec in data.items():
rec.spec._mark_concrete()
rec.spec.package.spec._mark_concrete()
self._data = data
|
def _read_from_file(self, stream, format="json"):
"""
Fill database from file, do not maintain old data
Translate the spec portions from node-dict form to spec form
Does not do any locking.
"""
if format.lower() == "json":
load = sjson.load
elif format.lower() == "yaml":
load = syaml.load
else:
raise ValueError("Invalid database format: %s" % format)
try:
if isinstance(stream, basestring):
with open(stream, "r") as f:
fdata = load(f)
else:
fdata = load(stream)
except MarkedYAMLError as e:
raise syaml.SpackYAMLError("error parsing YAML database:", str(e))
except Exception as e:
raise CorruptDatabaseError("error parsing database:", str(e))
if fdata is None:
return
def check(cond, msg):
if not cond:
raise CorruptDatabaseError(
"Spack database is corrupt: %s" % msg, self._index_path
)
check("database" in fdata, "No 'database' attribute in YAML.")
# High-level file checks
db = fdata["database"]
check("installs" in db, "No 'installs' in YAML DB.")
check("version" in db, "No 'version' in YAML DB.")
installs = db["installs"]
# TODO: better version checking semantics.
version = Version(db["version"])
if version > _db_version:
raise InvalidDatabaseVersionError(_db_version, version)
elif version < _db_version:
self.reindex(spack.store.layout)
installs = dict((k, v.to_dict()) for k, v in self._data.items())
def invalid_record(hash_key, error):
msg = "Invalid record in Spack database: hash: %s, cause: %s: %s"
msg %= (hash_key, type(e).__name__, str(e))
raise CorruptDatabaseError(msg, self._index_path)
# Build up the database in three passes:
#
# 1. Read in all specs without dependencies.
# 2. Hook dependencies up among specs.
# 3. Mark all specs concrete.
#
# The database is built up so that ALL specs in it share nodes
# (i.e., its specs are a true Merkle DAG, unlike most specs.)
# Pass 1: Iterate through database and build specs w/o dependencies
data = {}
for hash_key, rec in installs.items():
try:
# This constructs a spec DAG from the list of all installs
spec = self._read_spec_from_dict(hash_key, installs)
# Insert the brand new spec in the database. Each
# spec has its own copies of its dependency specs.
# TODO: would a more immmutable spec implementation simplify
# this?
data[hash_key] = InstallRecord.from_dict(spec, rec)
except Exception as e:
invalid_record(hash_key, e)
# Pass 2: Assign dependencies once all specs are created.
for hash_key in data:
try:
self._assign_dependencies(hash_key, installs, data)
except Exception as e:
invalid_record(hash_key, e)
# Pass 3: Mark all specs concrete. Specs representing real
# installations must be explicitly marked.
# We do this *after* all dependencies are connected because if we
# do it *while* we're constructing specs,it causes hashes to be
# cached prematurely.
for hash_key, rec in data.items():
rec.spec._mark_concrete()
self._data = data
|
https://github.com/spack/spack/issues/2911
|
Traceback (most recent call last):
File "/soft/spack-0.10.0/bin/spack", line 212, in <module>
main(sys.argv)
File "/soft/spack-0.10.0/bin/spack", line 208, in main
_main(args, unknown)
File "/soft/spack-0.10.0/bin/spack", line 174, in _main
return_val = command(parser, args)
File "/blues/gpfs/home/software/spack-0.10.0/lib/spack/spack/cmd/module.py", line 259, in module
callbacks[args.subparser_name](module_type, specs, args)
File "/blues/gpfs/home/software/spack-0.10.0/lib/spack/spack/cmd/module.py", line 243, in refresh
x.write(overwrite=True)
File "/blues/gpfs/home/software/spack-0.10.0/lib/spack/spack/modules.py", line 380, in write
set_module_variables_for_package(package, mod)
File "/blues/gpfs/home/software/spack-0.10.0/lib/spack/spack/build_environment.py", line 380, in set_module_variables_for_package
m.std_cmake_args = spack.CMakePackage._std_args(pkg)
File "/blues/gpfs/home/software/spack-0.10.0/lib/spack/spack/build_systems/cmake.py", line 95, in _std_args
rpaths = ':'.join(spack.build_environment.get_rpaths(pkg))
File "/blues/gpfs/home/software/spack-0.10.0/lib/spack/spack/build_environment.py", line 424, in get_rpaths
rpaths.extend(d.prefix.lib for d in deps
File "/blues/gpfs/home/software/spack-0.10.0/lib/spack/spack/build_environment.py", line 425, in <genexpr>
if os.path.isdir(d.prefix.lib))
File "/blues/gpfs/home/software/spack-0.10.0/lib/spack/spack/spec.py", line 1156, in prefix
return Prefix(spack.store.layout.path_for_spec(self))
File "/blues/gpfs/home/software/spack-0.10.0/lib/spack/spack/directory_layout.py", line 124, in path_for_spec
_check_concrete(spec)
File "/blues/gpfs/home/software/spack-0.10.0/lib/spack/spack/directory_layout.py", line 42, in _check_concrete
raise ValueError('Specs passed to a DirectoryLayout must be concrete!')
ValueError: Specs passed to a DirectoryLayout must be concrete!
|
ValueError
|
def _read_from_file(self, stream, format="json"):
"""
Fill database from file, do not maintain old data
Translate the spec portions from node-dict form to spec form
Does not do any locking.
"""
if format.lower() == "json":
load = sjson.load
elif format.lower() == "yaml":
load = syaml.load
else:
raise ValueError("Invalid database format: %s" % format)
try:
if isinstance(stream, basestring):
with open(stream, "r") as f:
fdata = load(f)
else:
fdata = load(stream)
except MarkedYAMLError as e:
raise syaml.SpackYAMLError("error parsing YAML database:", str(e))
except Exception as e:
raise CorruptDatabaseError("error parsing database:", str(e))
if fdata is None:
return
def check(cond, msg):
if not cond:
raise CorruptDatabaseError(
"Spack database is corrupt: %s" % msg, self._index_path
)
check("database" in fdata, "No 'database' attribute in YAML.")
# High-level file checks
db = fdata["database"]
check("installs" in db, "No 'installs' in YAML DB.")
check("version" in db, "No 'version' in YAML DB.")
installs = db["installs"]
# TODO: better version checking semantics.
version = Version(db["version"])
if version > _db_version:
raise InvalidDatabaseVersionError(_db_version, version)
elif version < _db_version:
self.reindex(spack.store.layout)
installs = dict((k, v.to_dict()) for k, v in self._data.items())
def invalid_record(hash_key, error):
msg = "Invalid record in Spack database: hash: %s, cause: %s: %s"
msg %= (hash_key, type(e).__name__, str(e))
raise CorruptDatabaseError(msg, self._index_path)
# Build up the database in three passes:
#
# 1. Read in all specs without dependencies.
# 2. Hook dependencies up among specs.
# 3. Mark all specs concrete.
#
# The database is built up so that ALL specs in it share nodes
# (i.e., its specs are a true Merkle DAG, unlike most specs.)
# Pass 1: Iterate through database and build specs w/o dependencies
data = {}
for hash_key, rec in installs.items():
try:
# This constructs a spec DAG from the list of all installs
spec = self._read_spec_from_dict(hash_key, installs)
# Insert the brand new spec in the database. Each
# spec has its own copies of its dependency specs.
# TODO: would a more immmutable spec implementation simplify
# this?
data[hash_key] = InstallRecord.from_dict(spec, rec)
except Exception as e:
invalid_record(hash_key, e)
# Pass 2: Assign dependencies once all specs are created.
for hash_key in data:
try:
self._assign_dependencies(hash_key, installs, data)
except Exception as e:
invalid_record(hash_key, e)
# Pass 3: Mark all specs concrete. Specs representing real
# installations must be explicitly marked.
# We do this *after* all dependencies are connected because if we
# do it *while* we're constructing specs,it causes hashes to be
# cached prematurely.
for hash_key, rec in data.items():
rec.spec._mark_concrete()
self._data = data
|
def _read_from_file(self, stream, format="json"):
"""
Fill database from file, do not maintain old data
Translate the spec portions from node-dict form to spec form
Does not do any locking.
"""
if format.lower() == "json":
load = sjson.load
elif format.lower() == "yaml":
load = syaml.load
else:
raise ValueError("Invalid database format: %s" % format)
try:
if isinstance(stream, basestring):
with open(stream, "r") as f:
fdata = load(f)
else:
fdata = load(stream)
except MarkedYAMLError as e:
raise syaml.SpackYAMLError("error parsing YAML database:", str(e))
except Exception as e:
raise CorruptDatabaseError("error parsing database:", str(e))
if fdata is None:
return
def check(cond, msg):
if not cond:
raise CorruptDatabaseError(
"Spack database is corrupt: %s" % msg, self._index_path
)
check("database" in fdata, "No 'database' attribute in YAML.")
# High-level file checks
db = fdata["database"]
check("installs" in db, "No 'installs' in YAML DB.")
check("version" in db, "No 'version' in YAML DB.")
installs = db["installs"]
# TODO: better version checking semantics.
version = Version(db["version"])
if version > _db_version:
raise InvalidDatabaseVersionError(_db_version, version)
elif version < _db_version:
self.reindex(spack.store.layout)
installs = dict((k, v.to_dict()) for k, v in self._data.items())
def invalid_record(hash_key, error):
msg = "Invalid record in Spack database: hash: %s, cause: %s: %s"
msg %= (hash_key, type(e).__name__, str(e))
raise CorruptDatabaseError(msg, self._index_path)
# Build up the database in three passes:
#
# 1. Read in all specs without dependencies.
# 2. Hook dependencies up among specs.
# 3. Mark all specs concrete.
#
# The database is built up so that ALL specs in it share nodes
# (i.e., its specs are a true Merkle DAG, unlike most specs.)
# Pass 1: Iterate through database and build specs w/o dependencies
data = {}
for hash_key, rec in installs.items():
try:
# This constructs a spec DAG from the list of all installs
spec = self._read_spec_from_dict(hash_key, installs)
# Insert the brand new spec in the database. Each
# spec has its own copies of its dependency specs.
# TODO: would a more immmutable spec implementation simplify
# this?
data[hash_key] = InstallRecord.from_dict(spec, rec)
except Exception as e:
invalid_record(hash_key, e)
# Pass 2: Assign dependencies once all specs are created.
for hash_key in data:
try:
self._assign_dependencies(hash_key, installs, data)
except Exception as e:
invalid_record(hash_key, e)
# Pass 3: Mark all specs concrete. Specs representing real
# installations must be explicitly marked.
# We do this *after* all dependencies are connected because if we
# do it *while* we're constructing specs,it causes hashes to be
# cached prematurely.
for hash_key, rec in data.items():
rec.spec._mark_concrete()
rec.spec.package.spec._mark_concrete()
self._data = data
|
https://github.com/spack/spack/issues/2911
|
Traceback (most recent call last):
File "/soft/spack-0.10.0/bin/spack", line 212, in <module>
main(sys.argv)
File "/soft/spack-0.10.0/bin/spack", line 208, in main
_main(args, unknown)
File "/soft/spack-0.10.0/bin/spack", line 174, in _main
return_val = command(parser, args)
File "/blues/gpfs/home/software/spack-0.10.0/lib/spack/spack/cmd/module.py", line 259, in module
callbacks[args.subparser_name](module_type, specs, args)
File "/blues/gpfs/home/software/spack-0.10.0/lib/spack/spack/cmd/module.py", line 243, in refresh
x.write(overwrite=True)
File "/blues/gpfs/home/software/spack-0.10.0/lib/spack/spack/modules.py", line 380, in write
set_module_variables_for_package(package, mod)
File "/blues/gpfs/home/software/spack-0.10.0/lib/spack/spack/build_environment.py", line 380, in set_module_variables_for_package
m.std_cmake_args = spack.CMakePackage._std_args(pkg)
File "/blues/gpfs/home/software/spack-0.10.0/lib/spack/spack/build_systems/cmake.py", line 95, in _std_args
rpaths = ':'.join(spack.build_environment.get_rpaths(pkg))
File "/blues/gpfs/home/software/spack-0.10.0/lib/spack/spack/build_environment.py", line 424, in get_rpaths
rpaths.extend(d.prefix.lib for d in deps
File "/blues/gpfs/home/software/spack-0.10.0/lib/spack/spack/build_environment.py", line 425, in <genexpr>
if os.path.isdir(d.prefix.lib))
File "/blues/gpfs/home/software/spack-0.10.0/lib/spack/spack/spec.py", line 1156, in prefix
return Prefix(spack.store.layout.path_for_spec(self))
File "/blues/gpfs/home/software/spack-0.10.0/lib/spack/spack/directory_layout.py", line 124, in path_for_spec
_check_concrete(spec)
File "/blues/gpfs/home/software/spack-0.10.0/lib/spack/spack/directory_layout.py", line 42, in _check_concrete
raise ValueError('Specs passed to a DirectoryLayout must be concrete!')
ValueError: Specs passed to a DirectoryLayout must be concrete!
|
ValueError
|
def setup_parser(subparser):
subparser.add_argument(
"--only",
default="package,dependencies",
dest="things_to_install",
choices=["package", "dependencies"],
help="""select the mode of installation.
the default is to install the package along with all its dependencies.
alternatively one can decide to install only the package or only
the dependencies""",
)
subparser.add_argument(
"-j",
"--jobs",
action="store",
type=int,
help="explicitly set number of make jobs. default is #cpus",
)
subparser.add_argument(
"--keep-prefix",
action="store_true",
dest="keep_prefix",
help="don't remove the install prefix if installation fails",
)
subparser.add_argument(
"--keep-stage",
action="store_true",
dest="keep_stage",
help="don't remove the build stage if installation succeeds",
)
subparser.add_argument(
"--restage",
action="store_true",
dest="restage",
help="if a partial install is detected, delete prior state",
)
subparser.add_argument(
"-n",
"--no-checksum",
action="store_true",
dest="no_checksum",
help="do not check packages against checksum",
)
subparser.add_argument(
"-v",
"--verbose",
action="store_true",
dest="verbose",
help="display verbose build output while installing",
)
subparser.add_argument(
"--fake",
action="store_true",
dest="fake",
help="fake install. just remove prefix and create a fake file",
)
cd_group = subparser.add_mutually_exclusive_group()
arguments.add_common_arguments(cd_group, ["clean", "dirty"])
subparser.add_argument(
"package", nargs=argparse.REMAINDER, help="spec of the package to install"
)
subparser.add_argument(
"--run-tests",
action="store_true",
dest="run_tests",
help="run package level tests during installation",
)
subparser.add_argument(
"--log-format",
default=None,
choices=["junit"],
help="format to be used for log files",
)
subparser.add_argument(
"--log-file",
default=None,
help="filename for the log file. if not passed a default will be used",
)
|
def setup_parser(subparser):
subparser.add_argument(
"--only",
default="package,dependencies",
dest="things_to_install",
choices=["package", "dependencies"],
help="""select the mode of installation.
the default is to install the package along with all its dependencies.
alternatively one can decide to install only the package or only
the dependencies""",
)
subparser.add_argument(
"-j",
"--jobs",
action="store",
type=int,
help="explicitly set number of make jobs. default is #cpus",
)
subparser.add_argument(
"--keep-prefix",
action="store_true",
dest="keep_prefix",
help="don't remove the install prefix if installation fails",
)
subparser.add_argument(
"--keep-stage",
action="store_true",
dest="keep_stage",
help="don't remove the build stage if installation succeeds",
)
subparser.add_argument(
"-n",
"--no-checksum",
action="store_true",
dest="no_checksum",
help="do not check packages against checksum",
)
subparser.add_argument(
"-v",
"--verbose",
action="store_true",
dest="verbose",
help="display verbose build output while installing",
)
subparser.add_argument(
"--fake",
action="store_true",
dest="fake",
help="fake install. just remove prefix and create a fake file",
)
cd_group = subparser.add_mutually_exclusive_group()
arguments.add_common_arguments(cd_group, ["clean", "dirty"])
subparser.add_argument(
"package", nargs=argparse.REMAINDER, help="spec of the package to install"
)
subparser.add_argument(
"--run-tests",
action="store_true",
dest="run_tests",
help="run package level tests during installation",
)
subparser.add_argument(
"--log-format",
default=None,
choices=["junit"],
help="format to be used for log files",
)
subparser.add_argument(
"--log-file",
default=None,
help="filename for the log file. if not passed a default will be used",
)
|
https://github.com/spack/spack/issues/2794
|
amklinv@edison10:~/edison/spack/bin> ./spack -vd install xsdk arch=cray-CNL-ivybridge
==> Reading config file /global/u1/a/amklinv/edison/spack/etc/spack/defaults/packages.yaml
==> Reading config file /global/homes/a/amklinv/.spack/cray/packages.yaml
==> READ LOCK: /global/homes/a/amklinv/.spack/cache/providers/.builtin-index.yaml.lock[0:0] [Acquiring]
==> READ LOCK: /global/homes/a/amklinv/.spack/cache/providers/.builtin-index.yaml.lock[0:0] [Released]
==> '/usr/bin/ibv_devices'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-hdf5-parallel/1.8.12'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-hdf5-parallel/1.8.12'
==> Reading config file /global/homes/a/amklinv/.spack/cray/compilers.yaml
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-hdf5-parallel/1.8.12'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-hdf5-parallel/1.8.12'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'PrgEnv-gnu'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-x1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-x2'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-cray'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-intel'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-pgi'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-pathscale'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'load' 'PrgEnv-gnu'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'gcc/6.1.0'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'gcc'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'gcc-cross-aarch64'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'load' 'gcc/6.1.0'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'PrgEnv-gnu'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-x1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-x2'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-cray'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-intel'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-pgi'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-pathscale'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'load' 'PrgEnv-gnu'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'gcc/6.1.0'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'gcc'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'gcc-cross-aarch64'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'load' 'gcc/6.1.0'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-hdf5-parallel/1.8.12'
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[6653323931864697468:1] [Acquiring]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[6653323931864697468:1] [Released]
==> Installing xsdk
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[2794471412757580533:1] [Acquiring]
==> hypre is already installed in /global/u1/a/amklinv/edison/spack/opt/spack/cray-CNL-ivybridge/gcc-6.1.0/hypre-develop-jwh6ty3izsk6ukytgs2i23qxgo3i4lrq
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/lock[0:0] [Acquiring]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/lock[0:0] [Released]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[2794471412757580533:1] [Released]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[840206737987988608:1] [Acquiring]
==> superlu-dist is already installed in /global/u1/a/amklinv/edison/spack/opt/spack/cray-CNL-ivybridge/gcc-6.1.0/superlu-dist-develop-c5jaojzxkpmqdyxz7xts4crqshkhaxd4
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[840206737987988608:1] [Released]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[4617400482960019937:1] [Acquiring]
==> trilinos is already installed in /global/u1/a/amklinv/edison/spack/opt/spack/cray-CNL-ivybridge/gcc-6.1.0/trilinos-develop-qaujvdhjder4h77x3td7z7jkfi27rcop
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/lock[0:0] [Acquiring]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/lock[0:0] [Released]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[4617400482960019937:1] [Released]
Traceback (most recent call last):
File "./spack", line 212, in <module>
main(sys.argv)
File "./spack", line 208, in main
_main(args, unknown)
File "./spack", line 174, in _main
return_val = command(parser, args)
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/cmd/install.py", line 346, in install
package.do_install(**kwargs)
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/package.py", line 1185, in do_install
**kwargs
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/package.py", line 1157, in do_install
rec = spack.store.db.get_record(self.spec)
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/database.py", line 81, in converter
return function(self, spec_like, *args, **kwargs)
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/database.py", line 554, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/database.py", line 549, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! trilinos@develop%gcc@6.1.0+boost~debug+hdf5+hypre+metis~mumps~python+shared~suite-sparse~superlu+superlu-dist+xsdkflags arch=cray-CNL-ivybridge^boost@1.63.0%gcc@6.1.0+atomic+chrono+date_time~debug+filesystem~graph~icu+iostreams+locale+log+math~mpi+multithreaded+program_options~python+random+regex+serialization+shared+signals~singlethreaded+system~taggedlayout+test+thread+timer+wave arch=cray-CNL-ivybridge^bzip2@1.0.6%gcc@6.1.0 arch=cray-CNL-ivybridge^cmake@3.7.1%gcc@6.1.0~doc+ncurses+openssl+ownlibs~qt arch=cray-CNL-ivybridge^glm@0.9.7.1%gcc@6.1.0 arch=cray-CNL-ivybridge^hdf5@1.8.12%gcc@6.1.0+cxx~debug+fortran+mpi+pic+shared~szip~threadsafe arch=cray-CNL-ivybridge^hypre@develop%gcc@6.1.0~internal-superlu+shared arch=cray-CNL-ivybridge^libsigsegv@2.10%gcc@6.1.0 arch=cray-CNL-ivybridge^m4@1.4.17%gcc@6.1.0+sigsegv arch=cray-CNL-ivybridge^matio@1.5.9%gcc@6.1.0+hdf5+zlib arch=cray-CNL-ivybridge^metis@5.1.0%gcc@6.1.0~debug~gdb~idx64~real64+shared arch=cray-CNL-ivybridge^mpich@7.4.1%gcc@6.1.0+hydra+pmi+romio~verbs arch=cray-CNL-ivybridge^ncurses@6.0%gcc@6.1.0 arch=cray-CNL-ivybridge^netcdf@4.4.1%gcc@6.1.0~cdmremote~dap~hdf4 maxdims=1024 maxvars=8192+mpi~parallel-netcdf+shared arch=cray-CNL-ivybridge^openblas@0.2.19%gcc@6.1.0~openmp+pic+shared arch=cray-CNL-ivybridge^openssl@1.0.2j%gcc@6.1.0 arch=cray-CNL-ivybridge^parmetis@4.0.3%gcc@6.1.0~debug~gdb+shared arch=cray-CNL-ivybridge^superlu-dist@develop%gcc@6.1.0 arch=cray-CNL-ivybridge^zlib@1.2.10%gcc@6.1.0+pic arch=cray-CNL-ivybridge'
|
KeyError
|
def install(parser, args, **kwargs):
if not args.package:
tty.die("install requires at least one package argument")
if args.jobs is not None:
if args.jobs <= 0:
tty.die("The -j option must be a positive integer!")
if args.no_checksum:
spack.do_checksum = False # TODO: remove this global.
# Parse cli arguments and construct a dictionary
# that will be passed to Package.do_install API
kwargs.update(
{
"keep_prefix": args.keep_prefix,
"keep_stage": args.keep_stage,
"restage": args.restage,
"install_deps": "dependencies" in args.things_to_install,
"make_jobs": args.jobs,
"run_tests": args.run_tests,
"verbose": args.verbose,
"fake": args.fake,
"dirty": args.dirty,
}
)
# Spec from cli
specs = spack.cmd.parse_specs(args.package, concretize=True)
if len(specs) == 0:
tty.error("The `spack install` command requires a spec to install.")
for spec in specs:
# Check if we were asked to produce some log for dashboards
if args.log_format is not None:
# Compute the filename for logging
log_filename = args.log_file
if not log_filename:
log_filename = default_log_file(spec)
# Create the test suite in which to log results
test_suite = TestSuite(spec)
# Decorate PackageBase.do_install to get installation status
PackageBase.do_install = junit_output(spec, test_suite)(
PackageBase.do_install
)
# Do the actual installation
if args.things_to_install == "dependencies":
# Install dependencies as-if they were installed
# for root (explicit=False in the DB)
kwargs["explicit"] = False
for s in spec.dependencies():
p = spack.repo.get(s)
p.do_install(**kwargs)
else:
package = spack.repo.get(spec)
kwargs["explicit"] = True
package.do_install(**kwargs)
# Dump log file if asked to
if args.log_format is not None:
test_suite.dump(log_filename)
|
def install(parser, args, **kwargs):
if not args.package:
tty.die("install requires at least one package argument")
if args.jobs is not None:
if args.jobs <= 0:
tty.die("The -j option must be a positive integer!")
if args.no_checksum:
spack.do_checksum = False # TODO: remove this global.
# Parse cli arguments and construct a dictionary
# that will be passed to Package.do_install API
kwargs.update(
{
"keep_prefix": args.keep_prefix,
"keep_stage": args.keep_stage,
"install_deps": "dependencies" in args.things_to_install,
"make_jobs": args.jobs,
"run_tests": args.run_tests,
"verbose": args.verbose,
"fake": args.fake,
"dirty": args.dirty,
}
)
# Spec from cli
specs = spack.cmd.parse_specs(args.package, concretize=True)
if len(specs) == 0:
tty.error("The `spack install` command requires a spec to install.")
for spec in specs:
# Check if we were asked to produce some log for dashboards
if args.log_format is not None:
# Compute the filename for logging
log_filename = args.log_file
if not log_filename:
log_filename = default_log_file(spec)
# Create the test suite in which to log results
test_suite = TestSuite(spec)
# Decorate PackageBase.do_install to get installation status
PackageBase.do_install = junit_output(spec, test_suite)(
PackageBase.do_install
)
# Do the actual installation
if args.things_to_install == "dependencies":
# Install dependencies as-if they were installed
# for root (explicit=False in the DB)
kwargs["explicit"] = False
for s in spec.dependencies():
p = spack.repo.get(s)
p.do_install(**kwargs)
else:
package = spack.repo.get(spec)
kwargs["explicit"] = True
package.do_install(**kwargs)
# Dump log file if asked to
if args.log_format is not None:
test_suite.dump(log_filename)
|
https://github.com/spack/spack/issues/2794
|
amklinv@edison10:~/edison/spack/bin> ./spack -vd install xsdk arch=cray-CNL-ivybridge
==> Reading config file /global/u1/a/amklinv/edison/spack/etc/spack/defaults/packages.yaml
==> Reading config file /global/homes/a/amklinv/.spack/cray/packages.yaml
==> READ LOCK: /global/homes/a/amklinv/.spack/cache/providers/.builtin-index.yaml.lock[0:0] [Acquiring]
==> READ LOCK: /global/homes/a/amklinv/.spack/cache/providers/.builtin-index.yaml.lock[0:0] [Released]
==> '/usr/bin/ibv_devices'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-hdf5-parallel/1.8.12'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-hdf5-parallel/1.8.12'
==> Reading config file /global/homes/a/amklinv/.spack/cray/compilers.yaml
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-hdf5-parallel/1.8.12'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-hdf5-parallel/1.8.12'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'PrgEnv-gnu'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-x1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-x2'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-cray'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-intel'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-pgi'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-pathscale'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'load' 'PrgEnv-gnu'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'gcc/6.1.0'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'gcc'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'gcc-cross-aarch64'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'load' 'gcc/6.1.0'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'PrgEnv-gnu'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-x1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-x2'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-cray'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-intel'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-pgi'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-pathscale'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'load' 'PrgEnv-gnu'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'gcc/6.1.0'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'gcc'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'gcc-cross-aarch64'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'load' 'gcc/6.1.0'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-hdf5-parallel/1.8.12'
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[6653323931864697468:1] [Acquiring]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[6653323931864697468:1] [Released]
==> Installing xsdk
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[2794471412757580533:1] [Acquiring]
==> hypre is already installed in /global/u1/a/amklinv/edison/spack/opt/spack/cray-CNL-ivybridge/gcc-6.1.0/hypre-develop-jwh6ty3izsk6ukytgs2i23qxgo3i4lrq
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/lock[0:0] [Acquiring]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/lock[0:0] [Released]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[2794471412757580533:1] [Released]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[840206737987988608:1] [Acquiring]
==> superlu-dist is already installed in /global/u1/a/amklinv/edison/spack/opt/spack/cray-CNL-ivybridge/gcc-6.1.0/superlu-dist-develop-c5jaojzxkpmqdyxz7xts4crqshkhaxd4
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[840206737987988608:1] [Released]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[4617400482960019937:1] [Acquiring]
==> trilinos is already installed in /global/u1/a/amklinv/edison/spack/opt/spack/cray-CNL-ivybridge/gcc-6.1.0/trilinos-develop-qaujvdhjder4h77x3td7z7jkfi27rcop
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/lock[0:0] [Acquiring]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/lock[0:0] [Released]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[4617400482960019937:1] [Released]
Traceback (most recent call last):
File "./spack", line 212, in <module>
main(sys.argv)
File "./spack", line 208, in main
_main(args, unknown)
File "./spack", line 174, in _main
return_val = command(parser, args)
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/cmd/install.py", line 346, in install
package.do_install(**kwargs)
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/package.py", line 1185, in do_install
**kwargs
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/package.py", line 1157, in do_install
rec = spack.store.db.get_record(self.spec)
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/database.py", line 81, in converter
return function(self, spec_like, *args, **kwargs)
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/database.py", line 554, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/database.py", line 549, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! trilinos@develop%gcc@6.1.0+boost~debug+hdf5+hypre+metis~mumps~python+shared~suite-sparse~superlu+superlu-dist+xsdkflags arch=cray-CNL-ivybridge^boost@1.63.0%gcc@6.1.0+atomic+chrono+date_time~debug+filesystem~graph~icu+iostreams+locale+log+math~mpi+multithreaded+program_options~python+random+regex+serialization+shared+signals~singlethreaded+system~taggedlayout+test+thread+timer+wave arch=cray-CNL-ivybridge^bzip2@1.0.6%gcc@6.1.0 arch=cray-CNL-ivybridge^cmake@3.7.1%gcc@6.1.0~doc+ncurses+openssl+ownlibs~qt arch=cray-CNL-ivybridge^glm@0.9.7.1%gcc@6.1.0 arch=cray-CNL-ivybridge^hdf5@1.8.12%gcc@6.1.0+cxx~debug+fortran+mpi+pic+shared~szip~threadsafe arch=cray-CNL-ivybridge^hypre@develop%gcc@6.1.0~internal-superlu+shared arch=cray-CNL-ivybridge^libsigsegv@2.10%gcc@6.1.0 arch=cray-CNL-ivybridge^m4@1.4.17%gcc@6.1.0+sigsegv arch=cray-CNL-ivybridge^matio@1.5.9%gcc@6.1.0+hdf5+zlib arch=cray-CNL-ivybridge^metis@5.1.0%gcc@6.1.0~debug~gdb~idx64~real64+shared arch=cray-CNL-ivybridge^mpich@7.4.1%gcc@6.1.0+hydra+pmi+romio~verbs arch=cray-CNL-ivybridge^ncurses@6.0%gcc@6.1.0 arch=cray-CNL-ivybridge^netcdf@4.4.1%gcc@6.1.0~cdmremote~dap~hdf4 maxdims=1024 maxvars=8192+mpi~parallel-netcdf+shared arch=cray-CNL-ivybridge^openblas@0.2.19%gcc@6.1.0~openmp+pic+shared arch=cray-CNL-ivybridge^openssl@1.0.2j%gcc@6.1.0 arch=cray-CNL-ivybridge^parmetis@4.0.3%gcc@6.1.0~debug~gdb+shared arch=cray-CNL-ivybridge^superlu-dist@develop%gcc@6.1.0 arch=cray-CNL-ivybridge^zlib@1.2.10%gcc@6.1.0+pic arch=cray-CNL-ivybridge'
|
KeyError
|
def _read(self):
"""Re-read Database from the data in the set location.
This does no locking, with one exception: it will automatically
migrate an index.yaml to an index.json if possible. This requires
taking a write lock.
"""
if os.path.isfile(self._index_path):
# Read from JSON file if a JSON database exists
self._read_from_file(self._index_path, format="json")
elif os.path.isfile(self._old_yaml_index_path):
if os.access(self._db_dir, os.R_OK | os.W_OK):
# if we can write, then read AND write a JSON file.
self._read_from_file(self._old_yaml_index_path, format="yaml")
with WriteTransaction(self.lock, timeout=_db_lock_timeout):
self._write(None, None, None)
else:
# Read chck for a YAML file if we can't find JSON.
self._read_from_file(self._old_yaml_index_path, format="yaml")
else:
# The file doesn't exist, try to traverse the directory.
# reindex() takes its own write lock, so no lock here.
with WriteTransaction(self.lock, timeout=_db_lock_timeout):
self._write(None, None, None)
self.reindex(spack.store.layout)
|
def _read(self):
"""Re-read Database from the data in the set location.
This does no locking, with one exception: it will automatically
migrate an index.yaml to an index.json if possible. This requires
taking a write lock.
"""
if os.path.isfile(self._index_path):
# Read from JSON file if a JSON database exists
self._read_from_file(self._index_path, format="json")
elif os.path.isfile(self._old_yaml_index_path):
if os.access(self._db_dir, os.R_OK | os.W_OK):
# if we can write, then read AND write a JSON file.
self._read_from_file(self._old_yaml_index_path, format="yaml")
with WriteTransaction(self.lock, timeout=_db_lock_timeout):
self._write(None, None, None)
else:
# Read chck for a YAML file if we can't find JSON.
self._read_from_file(self._old_yaml_index_path, format="yaml")
else:
# The file doesn't exist, try to traverse the directory.
# reindex() takes its own write lock, so no lock here.
self.reindex(spack.store.layout)
|
https://github.com/spack/spack/issues/2794
|
amklinv@edison10:~/edison/spack/bin> ./spack -vd install xsdk arch=cray-CNL-ivybridge
==> Reading config file /global/u1/a/amklinv/edison/spack/etc/spack/defaults/packages.yaml
==> Reading config file /global/homes/a/amklinv/.spack/cray/packages.yaml
==> READ LOCK: /global/homes/a/amklinv/.spack/cache/providers/.builtin-index.yaml.lock[0:0] [Acquiring]
==> READ LOCK: /global/homes/a/amklinv/.spack/cache/providers/.builtin-index.yaml.lock[0:0] [Released]
==> '/usr/bin/ibv_devices'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-hdf5-parallel/1.8.12'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-hdf5-parallel/1.8.12'
==> Reading config file /global/homes/a/amklinv/.spack/cray/compilers.yaml
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-hdf5-parallel/1.8.12'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-hdf5-parallel/1.8.12'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'PrgEnv-gnu'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-x1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-x2'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-cray'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-intel'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-pgi'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-pathscale'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'load' 'PrgEnv-gnu'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'gcc/6.1.0'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'gcc'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'gcc-cross-aarch64'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'load' 'gcc/6.1.0'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'PrgEnv-gnu'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-x1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-x2'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-cray'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-intel'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-pgi'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-pathscale'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'load' 'PrgEnv-gnu'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'gcc/6.1.0'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'gcc'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'gcc-cross-aarch64'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'load' 'gcc/6.1.0'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-hdf5-parallel/1.8.12'
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[6653323931864697468:1] [Acquiring]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[6653323931864697468:1] [Released]
==> Installing xsdk
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[2794471412757580533:1] [Acquiring]
==> hypre is already installed in /global/u1/a/amklinv/edison/spack/opt/spack/cray-CNL-ivybridge/gcc-6.1.0/hypre-develop-jwh6ty3izsk6ukytgs2i23qxgo3i4lrq
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/lock[0:0] [Acquiring]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/lock[0:0] [Released]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[2794471412757580533:1] [Released]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[840206737987988608:1] [Acquiring]
==> superlu-dist is already installed in /global/u1/a/amklinv/edison/spack/opt/spack/cray-CNL-ivybridge/gcc-6.1.0/superlu-dist-develop-c5jaojzxkpmqdyxz7xts4crqshkhaxd4
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[840206737987988608:1] [Released]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[4617400482960019937:1] [Acquiring]
==> trilinos is already installed in /global/u1/a/amklinv/edison/spack/opt/spack/cray-CNL-ivybridge/gcc-6.1.0/trilinos-develop-qaujvdhjder4h77x3td7z7jkfi27rcop
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/lock[0:0] [Acquiring]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/lock[0:0] [Released]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[4617400482960019937:1] [Released]
Traceback (most recent call last):
File "./spack", line 212, in <module>
main(sys.argv)
File "./spack", line 208, in main
_main(args, unknown)
File "./spack", line 174, in _main
return_val = command(parser, args)
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/cmd/install.py", line 346, in install
package.do_install(**kwargs)
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/package.py", line 1185, in do_install
**kwargs
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/package.py", line 1157, in do_install
rec = spack.store.db.get_record(self.spec)
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/database.py", line 81, in converter
return function(self, spec_like, *args, **kwargs)
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/database.py", line 554, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/database.py", line 549, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! trilinos@develop%gcc@6.1.0+boost~debug+hdf5+hypre+metis~mumps~python+shared~suite-sparse~superlu+superlu-dist+xsdkflags arch=cray-CNL-ivybridge^boost@1.63.0%gcc@6.1.0+atomic+chrono+date_time~debug+filesystem~graph~icu+iostreams+locale+log+math~mpi+multithreaded+program_options~python+random+regex+serialization+shared+signals~singlethreaded+system~taggedlayout+test+thread+timer+wave arch=cray-CNL-ivybridge^bzip2@1.0.6%gcc@6.1.0 arch=cray-CNL-ivybridge^cmake@3.7.1%gcc@6.1.0~doc+ncurses+openssl+ownlibs~qt arch=cray-CNL-ivybridge^glm@0.9.7.1%gcc@6.1.0 arch=cray-CNL-ivybridge^hdf5@1.8.12%gcc@6.1.0+cxx~debug+fortran+mpi+pic+shared~szip~threadsafe arch=cray-CNL-ivybridge^hypre@develop%gcc@6.1.0~internal-superlu+shared arch=cray-CNL-ivybridge^libsigsegv@2.10%gcc@6.1.0 arch=cray-CNL-ivybridge^m4@1.4.17%gcc@6.1.0+sigsegv arch=cray-CNL-ivybridge^matio@1.5.9%gcc@6.1.0+hdf5+zlib arch=cray-CNL-ivybridge^metis@5.1.0%gcc@6.1.0~debug~gdb~idx64~real64+shared arch=cray-CNL-ivybridge^mpich@7.4.1%gcc@6.1.0+hydra+pmi+romio~verbs arch=cray-CNL-ivybridge^ncurses@6.0%gcc@6.1.0 arch=cray-CNL-ivybridge^netcdf@4.4.1%gcc@6.1.0~cdmremote~dap~hdf4 maxdims=1024 maxvars=8192+mpi~parallel-netcdf+shared arch=cray-CNL-ivybridge^openblas@0.2.19%gcc@6.1.0~openmp+pic+shared arch=cray-CNL-ivybridge^openssl@1.0.2j%gcc@6.1.0 arch=cray-CNL-ivybridge^parmetis@4.0.3%gcc@6.1.0~debug~gdb+shared arch=cray-CNL-ivybridge^superlu-dist@develop%gcc@6.1.0 arch=cray-CNL-ivybridge^zlib@1.2.10%gcc@6.1.0+pic arch=cray-CNL-ivybridge'
|
KeyError
|
def do_install(
self,
keep_prefix=False,
keep_stage=False,
install_deps=True,
skip_patch=False,
verbose=False,
make_jobs=None,
run_tests=False,
fake=False,
explicit=False,
dirty=None,
**kwargs,
):
"""Called by commands to install a package and its dependencies.
Package implementations should override install() to describe
their build process.
Args:
keep_prefix (bool): Keep install prefix on failure. By default,
destroys it.
keep_stage (bool): By default, stage is destroyed only if there
are no exceptions during build. Set to True to keep the stage
even with exceptions.
install_deps (bool): Install dependencies before installing this
package
skip_patch (bool): Skip patch stage of build if True.
verbose (bool): Display verbose build output (by default,
suppresses it)
make_jobs (int): Number of make jobs to use for install. Default
is ncpus
run_tests (bool): Run tests within the package's install()
fake (bool): Don't really build; install fake stub files instead.
explicit (bool): True if package was explicitly installed, False
if package was implicitly installed (as a dependency).
dirty (bool): Don't clean the build environment before installing.
force (bool): Install again, even if already installed.
"""
if not self.spec.concrete:
raise ValueError("Can only install concrete packages: %s." % self.spec.name)
# For external packages the workflow is simplified, and basically
# consists in module file generation and registration in the DB
if self.spec.external:
return self._process_external_package(explicit)
restage = kwargs.get("restage", False)
partial = self.check_for_unfinished_installation(keep_prefix, restage)
# Ensure package is not already installed
layout = spack.store.layout
with spack.store.db.prefix_read_lock(self.spec):
if partial:
tty.msg("Continuing from partial install of %s" % self.name)
elif layout.check_installed(self.spec):
msg = "{0.name} is already installed in {0.prefix}"
tty.msg(msg.format(self))
rec = spack.store.db.get_record(self.spec)
return self._update_explicit_entry_in_db(rec, explicit)
# Dirty argument takes precedence over dirty config setting.
if dirty is None:
dirty = spack.dirty
self._do_install_pop_kwargs(kwargs)
# First, install dependencies recursively.
if install_deps:
tty.debug("Installing {0} dependencies".format(self.name))
for dep in self.spec.dependencies():
dep.package.do_install(
keep_prefix=keep_prefix,
keep_stage=keep_stage,
install_deps=install_deps,
fake=fake,
skip_patch=skip_patch,
verbose=verbose,
make_jobs=make_jobs,
run_tests=run_tests,
dirty=dirty,
**kwargs,
)
tty.msg("Installing %s" % self.name)
# Set run_tests flag before starting build.
self.run_tests = run_tests
# Set parallelism before starting build.
self.make_jobs = make_jobs
# Then install the package itself.
def build_process(input_stream):
"""Forked for each build. Has its own process and python
module space set up by build_environment.fork()."""
# We are in the child process. This means that our sys.stdin is
# equal to open(os.devnull). Python did this to prevent our process
# and the parent process from possible simultaneous reading from
# the original standard input. But we assume that the parent
# process is not going to read from it till we are done here,
# otherwise it should not have passed us the copy of the stream.
# Thus, we are free to work with the the copy (input_stream)
# however we want. For example, we might want to call functions
# (e.g. input()) that implicitly read from whatever stream is
# assigned to sys.stdin. Since we want them to work with the
# original input stream, we are making the following assignment:
sys.stdin = input_stream
start_time = time.time()
if not fake:
if not skip_patch:
self.do_patch()
else:
self.do_stage()
tty.msg("Building {0} [{1}]".format(self.name, self.build_system_class))
self.stage.keep = keep_stage
with self._stage_and_write_lock():
# Run the pre-install hook in the child process after
# the directory is created.
spack.hooks.pre_install(self.spec)
if fake:
self.do_fake_install()
else:
# Do the real install in the source directory.
self.stage.chdir_to_source()
# Save the build environment in a file before building.
env_path = join_path(os.getcwd(), "spack-build.env")
# Redirect I/O to a build log (and optionally to
# the terminal)
log_path = join_path(os.getcwd(), "spack-build.out")
# FIXME : refactor this assignment
self.log_path = log_path
self.env_path = env_path
dump_environment(env_path)
# Spawn a daemon that reads from a pipe and redirects
# everything to log_path
redirection_context = log_output(
log_path,
echo=verbose,
force_color=sys.stdout.isatty(),
debug=True,
input_stream=input_stream,
)
with redirection_context as log_redirection:
for phase_name, phase in zip(
self.phases, self._InstallPhase_phases
):
tty.msg("Executing phase : '{0}'".format(phase_name))
# Redirect stdout and stderr to daemon pipe
with log_redirection:
getattr(self, phase)(self.spec, self.prefix)
self.log()
# Run post install hooks before build stage is removed.
spack.hooks.post_install(self.spec)
# Stop timer.
self._total_time = time.time() - start_time
build_time = self._total_time - self._fetch_time
tty.msg(
"Successfully installed %s" % self.name,
"Fetch: %s. Build: %s. Total: %s."
% (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)),
)
print_pkg(self.prefix)
try:
# Create the install prefix and fork the build process.
if not os.path.exists(self.prefix):
spack.store.layout.create_install_directory(self.spec)
# Fork a child to do the actual installation
spack.build_environment.fork(self, build_process, dirty=dirty)
# If we installed then we should keep the prefix
keep_prefix = self.last_phase is None or keep_prefix
# note: PARENT of the build process adds the new package to
# the database, so that we don't need to re-read from file.
spack.store.db.add(self.spec, spack.store.layout, explicit=explicit)
except directory_layout.InstallDirectoryAlreadyExistsError:
# Abort install if install directory exists.
# But do NOT remove it (you'd be overwriting someone else's stuff)
tty.warn("Keeping existing install prefix in place.")
raise
except StopIteration as e:
# A StopIteration exception means that do_install
# was asked to stop early from clients
tty.msg(e.message)
tty.msg("Package stage directory : {0}".format(self.stage.source_path))
finally:
# Remove the install prefix if anything went wrong during install.
if not keep_prefix:
self.remove_prefix()
|
def do_install(
self,
keep_prefix=False,
keep_stage=False,
install_deps=True,
skip_patch=False,
verbose=False,
make_jobs=None,
run_tests=False,
fake=False,
explicit=False,
dirty=None,
**kwargs,
):
"""Called by commands to install a package and its dependencies.
Package implementations should override install() to describe
their build process.
Args:
keep_prefix (bool): Keep install prefix on failure. By default,
destroys it.
keep_stage (bool): By default, stage is destroyed only if there
are no exceptions during build. Set to True to keep the stage
even with exceptions.
install_deps (bool): Install dependencies before installing this
package
skip_patch (bool): Skip patch stage of build if True.
verbose (bool): Display verbose build output (by default,
suppresses it)
make_jobs (int): Number of make jobs to use for install. Default
is ncpus
run_tests (bool): Run tests within the package's install()
fake (bool): Don't really build; install fake stub files instead.
explicit (bool): True if package was explicitly installed, False
if package was implicitly installed (as a dependency).
dirty (bool): Don't clean the build environment before installing.
force (bool): Install again, even if already installed.
"""
if not self.spec.concrete:
raise ValueError("Can only install concrete packages: %s." % self.spec.name)
# For external packages the workflow is simplified, and basically
# consists in module file generation and registration in the DB
if self.spec.external:
return self._process_external_package(explicit)
# Ensure package is not already installed
layout = spack.store.layout
with spack.store.db.prefix_read_lock(self.spec):
if keep_prefix and os.path.isdir(self.prefix) and (not self.installed):
tty.msg("Continuing from partial install of %s" % self.name)
elif layout.check_installed(self.spec):
msg = "{0.name} is already installed in {0.prefix}"
tty.msg(msg.format(self))
rec = spack.store.db.get_record(self.spec)
return self._update_explicit_entry_in_db(rec, explicit)
# Dirty argument takes precedence over dirty config setting.
if dirty is None:
dirty = spack.dirty
self._do_install_pop_kwargs(kwargs)
# First, install dependencies recursively.
if install_deps:
tty.debug("Installing {0} dependencies".format(self.name))
for dep in self.spec.dependencies():
dep.package.do_install(
keep_prefix=keep_prefix,
keep_stage=keep_stage,
install_deps=install_deps,
fake=fake,
skip_patch=skip_patch,
verbose=verbose,
make_jobs=make_jobs,
run_tests=run_tests,
dirty=dirty,
**kwargs,
)
tty.msg("Installing %s" % self.name)
# Set run_tests flag before starting build.
self.run_tests = run_tests
# Set parallelism before starting build.
self.make_jobs = make_jobs
# Then install the package itself.
def build_process(input_stream):
"""Forked for each build. Has its own process and python
module space set up by build_environment.fork()."""
# We are in the child process. This means that our sys.stdin is
# equal to open(os.devnull). Python did this to prevent our process
# and the parent process from possible simultaneous reading from
# the original standard input. But we assume that the parent
# process is not going to read from it till we are done here,
# otherwise it should not have passed us the copy of the stream.
# Thus, we are free to work with the the copy (input_stream)
# however we want. For example, we might want to call functions
# (e.g. input()) that implicitly read from whatever stream is
# assigned to sys.stdin. Since we want them to work with the
# original input stream, we are making the following assignment:
sys.stdin = input_stream
start_time = time.time()
if not fake:
if not skip_patch:
self.do_patch()
else:
self.do_stage()
tty.msg("Building {0} [{1}]".format(self.name, self.build_system_class))
self.stage.keep = keep_stage
with self._stage_and_write_lock():
# Run the pre-install hook in the child process after
# the directory is created.
spack.hooks.pre_install(self.spec)
if fake:
self.do_fake_install()
else:
# Do the real install in the source directory.
self.stage.chdir_to_source()
# Save the build environment in a file before building.
env_path = join_path(os.getcwd(), "spack-build.env")
# Redirect I/O to a build log (and optionally to
# the terminal)
log_path = join_path(os.getcwd(), "spack-build.out")
# FIXME : refactor this assignment
self.log_path = log_path
self.env_path = env_path
dump_environment(env_path)
# Spawn a daemon that reads from a pipe and redirects
# everything to log_path
redirection_context = log_output(
log_path,
echo=verbose,
force_color=sys.stdout.isatty(),
debug=True,
input_stream=input_stream,
)
with redirection_context as log_redirection:
for phase_name, phase in zip(
self.phases, self._InstallPhase_phases
):
tty.msg("Executing phase : '{0}'".format(phase_name))
# Redirect stdout and stderr to daemon pipe
with log_redirection:
getattr(self, phase)(self.spec, self.prefix)
self.log()
# Run post install hooks before build stage is removed.
spack.hooks.post_install(self.spec)
# Stop timer.
self._total_time = time.time() - start_time
build_time = self._total_time - self._fetch_time
tty.msg(
"Successfully installed %s" % self.name,
"Fetch: %s. Build: %s. Total: %s."
% (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)),
)
print_pkg(self.prefix)
try:
# Create the install prefix and fork the build process.
spack.store.layout.create_install_directory(self.spec)
# Fork a child to do the actual installation
spack.build_environment.fork(self, build_process, dirty=dirty)
# If we installed then we should keep the prefix
keep_prefix = self.last_phase is None or keep_prefix
# note: PARENT of the build process adds the new package to
# the database, so that we don't need to re-read from file.
spack.store.db.add(self.spec, spack.store.layout, explicit=explicit)
except directory_layout.InstallDirectoryAlreadyExistsError:
# Abort install if install directory exists.
# But do NOT remove it (you'd be overwriting someone else's stuff)
tty.warn("Keeping existing install prefix in place.")
raise
except StopIteration as e:
# A StopIteration exception means that do_install
# was asked to stop early from clients
tty.msg(e.message)
tty.msg("Package stage directory : {0}".format(self.stage.source_path))
finally:
# Remove the install prefix if anything went wrong during install.
if not keep_prefix:
self.remove_prefix()
|
https://github.com/spack/spack/issues/2794
|
amklinv@edison10:~/edison/spack/bin> ./spack -vd install xsdk arch=cray-CNL-ivybridge
==> Reading config file /global/u1/a/amklinv/edison/spack/etc/spack/defaults/packages.yaml
==> Reading config file /global/homes/a/amklinv/.spack/cray/packages.yaml
==> READ LOCK: /global/homes/a/amklinv/.spack/cache/providers/.builtin-index.yaml.lock[0:0] [Acquiring]
==> READ LOCK: /global/homes/a/amklinv/.spack/cache/providers/.builtin-index.yaml.lock[0:0] [Released]
==> '/usr/bin/ibv_devices'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-hdf5-parallel/1.8.12'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-hdf5-parallel/1.8.12'
==> Reading config file /global/homes/a/amklinv/.spack/cray/compilers.yaml
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-hdf5-parallel/1.8.12'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-hdf5-parallel/1.8.12'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'PrgEnv-gnu'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-x1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-x2'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-cray'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-intel'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-pgi'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-pathscale'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'load' 'PrgEnv-gnu'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'gcc/6.1.0'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'gcc'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'gcc-cross-aarch64'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'load' 'gcc/6.1.0'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-mpich/7.4.1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'PrgEnv-gnu'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-x1'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-x2'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-cray'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-intel'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-pgi'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'PrgEnv-pathscale'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'load' 'PrgEnv-gnu'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'gcc/6.1.0'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'gcc'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'unload' 'gcc-cross-aarch64'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'load' 'gcc/6.1.0'
==> '/opt/modules/3.2.10.4/bin/modulecmd' 'python' 'show' 'cray-hdf5-parallel/1.8.12'
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[6653323931864697468:1] [Acquiring]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[6653323931864697468:1] [Released]
==> Installing xsdk
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[2794471412757580533:1] [Acquiring]
==> hypre is already installed in /global/u1/a/amklinv/edison/spack/opt/spack/cray-CNL-ivybridge/gcc-6.1.0/hypre-develop-jwh6ty3izsk6ukytgs2i23qxgo3i4lrq
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/lock[0:0] [Acquiring]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/lock[0:0] [Released]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[2794471412757580533:1] [Released]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[840206737987988608:1] [Acquiring]
==> superlu-dist is already installed in /global/u1/a/amklinv/edison/spack/opt/spack/cray-CNL-ivybridge/gcc-6.1.0/superlu-dist-develop-c5jaojzxkpmqdyxz7xts4crqshkhaxd4
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[840206737987988608:1] [Released]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[4617400482960019937:1] [Acquiring]
==> trilinos is already installed in /global/u1/a/amklinv/edison/spack/opt/spack/cray-CNL-ivybridge/gcc-6.1.0/trilinos-develop-qaujvdhjder4h77x3td7z7jkfi27rcop
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/lock[0:0] [Acquiring]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/lock[0:0] [Released]
==> READ LOCK: /global/u1/a/amklinv/edison/spack/opt/spack/.spack-db/prefix_lock[4617400482960019937:1] [Released]
Traceback (most recent call last):
File "./spack", line 212, in <module>
main(sys.argv)
File "./spack", line 208, in main
_main(args, unknown)
File "./spack", line 174, in _main
return_val = command(parser, args)
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/cmd/install.py", line 346, in install
package.do_install(**kwargs)
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/package.py", line 1185, in do_install
**kwargs
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/package.py", line 1157, in do_install
rec = spack.store.db.get_record(self.spec)
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/database.py", line 81, in converter
return function(self, spec_like, *args, **kwargs)
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/database.py", line 554, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/global/u1/a/amklinv/edison/spack/lib/spack/spack/database.py", line 549, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! trilinos@develop%gcc@6.1.0+boost~debug+hdf5+hypre+metis~mumps~python+shared~suite-sparse~superlu+superlu-dist+xsdkflags arch=cray-CNL-ivybridge^boost@1.63.0%gcc@6.1.0+atomic+chrono+date_time~debug+filesystem~graph~icu+iostreams+locale+log+math~mpi+multithreaded+program_options~python+random+regex+serialization+shared+signals~singlethreaded+system~taggedlayout+test+thread+timer+wave arch=cray-CNL-ivybridge^bzip2@1.0.6%gcc@6.1.0 arch=cray-CNL-ivybridge^cmake@3.7.1%gcc@6.1.0~doc+ncurses+openssl+ownlibs~qt arch=cray-CNL-ivybridge^glm@0.9.7.1%gcc@6.1.0 arch=cray-CNL-ivybridge^hdf5@1.8.12%gcc@6.1.0+cxx~debug+fortran+mpi+pic+shared~szip~threadsafe arch=cray-CNL-ivybridge^hypre@develop%gcc@6.1.0~internal-superlu+shared arch=cray-CNL-ivybridge^libsigsegv@2.10%gcc@6.1.0 arch=cray-CNL-ivybridge^m4@1.4.17%gcc@6.1.0+sigsegv arch=cray-CNL-ivybridge^matio@1.5.9%gcc@6.1.0+hdf5+zlib arch=cray-CNL-ivybridge^metis@5.1.0%gcc@6.1.0~debug~gdb~idx64~real64+shared arch=cray-CNL-ivybridge^mpich@7.4.1%gcc@6.1.0+hydra+pmi+romio~verbs arch=cray-CNL-ivybridge^ncurses@6.0%gcc@6.1.0 arch=cray-CNL-ivybridge^netcdf@4.4.1%gcc@6.1.0~cdmremote~dap~hdf4 maxdims=1024 maxvars=8192+mpi~parallel-netcdf+shared arch=cray-CNL-ivybridge^openblas@0.2.19%gcc@6.1.0~openmp+pic+shared arch=cray-CNL-ivybridge^openssl@1.0.2j%gcc@6.1.0 arch=cray-CNL-ivybridge^parmetis@4.0.3%gcc@6.1.0~debug~gdb+shared arch=cray-CNL-ivybridge^superlu-dist@develop%gcc@6.1.0 arch=cray-CNL-ivybridge^zlib@1.2.10%gcc@6.1.0+pic arch=cray-CNL-ivybridge'
|
KeyError
|
def get_number(prompt, **kwargs):
default = kwargs.get("default", None)
abort = kwargs.get("abort", None)
if default is not None and abort is not None:
prompt += " (default is %s, %s to abort) " % (default, abort)
elif default is not None:
prompt += " (default is %s) " % default
elif abort is not None:
prompt += " (%s to abort) " % abort
number = None
while number is None:
msg(prompt, newline=False)
ans = input()
if ans == str(abort):
return None
if ans:
try:
number = int(ans)
if number < 1:
msg("Please enter a valid number.")
number = None
except ValueError:
msg("Please enter a valid number.")
elif default is not None:
number = default
return number
|
def get_number(prompt, **kwargs):
default = kwargs.get("default", None)
abort = kwargs.get("abort", None)
if default is not None and abort is not None:
prompt += " (default is %s, %s to abort) " % (default, abort)
elif default is not None:
prompt += " (default is %s) " % default
elif abort is not None:
prompt += " (%s to abort) " % abort
number = None
while number is None:
msg(prompt, newline=False)
ans = raw_input()
if ans == str(abort):
return None
if ans:
try:
number = int(ans)
if number < 1:
msg("Please enter a valid number.")
number = None
except ValueError:
msg("Please enter a valid number.")
elif default is not None:
number = default
return number
|
https://github.com/spack/spack/issues/3960
|
$ spack uninstall --all
...
==> Do you want to proceed? [y/N] Traceback (most recent call last):
File "/home/christoph/spack/bin/spack", line 220, in <module>
main(sys.argv)
File "/home/christoph/spack/bin/spack", line 216, in main
_main(args, unknown)
File "/home/christoph/spack/bin/spack", line 183, in _main
return_val = command(parser, args)
File "/home/christoph/spack/lib/spack/spack/cmd/uninstall.py", line 213, in uninstall
answer = tty.get_yes_or_no('Do you want to proceed?', default=False)
File "/home/christoph/spack/lib/spack/llnl/util/tty/__init__.py", line 200, in get_yes_or_no
ans = raw_input().lower()
NameError: name 'raw_input' is not defined
|
NameError
|
def get_yes_or_no(prompt, **kwargs):
default_value = kwargs.get("default", None)
if default_value is None:
prompt += " [y/n] "
elif default_value is True:
prompt += " [Y/n] "
elif default_value is False:
prompt += " [y/N] "
else:
raise ValueError("default for get_yes_no() must be True, False, or None.")
result = None
while result is None:
msg(prompt, newline=False)
ans = input().lower()
if not ans:
result = default_value
if result is None:
print("Please enter yes or no.")
else:
if ans == "y" or ans == "yes":
result = True
elif ans == "n" or ans == "no":
result = False
return result
|
def get_yes_or_no(prompt, **kwargs):
default_value = kwargs.get("default", None)
if default_value is None:
prompt += " [y/n] "
elif default_value is True:
prompt += " [Y/n] "
elif default_value is False:
prompt += " [y/N] "
else:
raise ValueError("default for get_yes_no() must be True, False, or None.")
result = None
while result is None:
msg(prompt, newline=False)
ans = raw_input().lower()
if not ans:
result = default_value
if result is None:
print("Please enter yes or no.")
else:
if ans == "y" or ans == "yes":
result = True
elif ans == "n" or ans == "no":
result = False
return result
|
https://github.com/spack/spack/issues/3960
|
$ spack uninstall --all
...
==> Do you want to proceed? [y/N] Traceback (most recent call last):
File "/home/christoph/spack/bin/spack", line 220, in <module>
main(sys.argv)
File "/home/christoph/spack/bin/spack", line 216, in main
_main(args, unknown)
File "/home/christoph/spack/bin/spack", line 183, in _main
return_val = command(parser, args)
File "/home/christoph/spack/lib/spack/spack/cmd/uninstall.py", line 213, in uninstall
answer = tty.get_yes_or_no('Do you want to proceed?', default=False)
File "/home/christoph/spack/lib/spack/llnl/util/tty/__init__.py", line 200, in get_yes_or_no
ans = raw_input().lower()
NameError: name 'raw_input' is not defined
|
NameError
|
def do_install(
self,
keep_prefix=False,
keep_stage=False,
install_deps=True,
skip_patch=False,
verbose=False,
make_jobs=None,
run_tests=False,
fake=False,
explicit=False,
dirty=None,
**kwargs,
):
"""Called by commands to install a package and its dependencies.
Package implementations should override install() to describe
their build process.
:param bool keep_prefix: Keep install prefix on failure. By default,
destroys it.
:param bool keep_stage: By default, stage is destroyed only if there
are no exceptions during build. Set to True to keep the stage
even with exceptions.
:param bool install_deps: Install dependencies before installing this
package
:param bool skip_patch: Skip patch stage of build if True.
:param bool verbose: Display verbose build output (by default,
suppresses it)
:param int make_jobs: Number of make jobs to use for install. Default
is ncpus
:param bool run_tests: Run tests within the package's install()
:param bool fake: Don't really build; install fake stub files instead.
:param bool explicit: True if package was explicitly installed, False
if package was implicitly installed (as a dependency).
:param bool dirty: Don't clean the build environment before installing.
:param bool force: Install again, even if already installed.
"""
if not self.spec.concrete:
raise ValueError("Can only install concrete packages: %s." % self.spec.name)
# For external packages the workflow is simplified, and basically
# consists in module file generation and registration in the DB
if self.spec.external:
return self._process_external_package(explicit)
# Ensure package is not already installed
layout = spack.store.layout
with spack.store.db.prefix_read_lock(self.spec):
if keep_prefix and os.path.isdir(self.prefix) and (not self.installed):
tty.msg("Continuing from partial install of %s" % self.name)
elif layout.check_installed(self.spec):
msg = "{0.name} is already installed in {0.prefix}"
tty.msg(msg.format(self))
rec = spack.store.db.get_record(self.spec)
return self._update_explicit_entry_in_db(rec, explicit)
# Dirty argument takes precedence over dirty config setting.
if dirty is None:
dirty = spack.dirty
self._do_install_pop_kwargs(kwargs)
# First, install dependencies recursively.
if install_deps:
tty.debug("Installing {0} dependencies".format(self.name))
for dep in self.spec.dependencies():
dep.package.do_install(
keep_prefix=keep_prefix,
keep_stage=keep_stage,
install_deps=install_deps,
fake=fake,
skip_patch=skip_patch,
verbose=verbose,
make_jobs=make_jobs,
run_tests=run_tests,
dirty=dirty,
**kwargs,
)
tty.msg("Installing %s" % self.name)
# Set run_tests flag before starting build.
self.run_tests = run_tests
# Set parallelism before starting build.
self.make_jobs = make_jobs
# Then install the package itself.
def build_process(input_stream):
"""Forked for each build. Has its own process and python
module space set up by build_environment.fork()."""
# We are in the child process. This means that our sys.stdin is
# equal to open(os.devnull). Python did this to prevent our process
# and the parent process from possible simultaneous reading from
# the original standard input. But we assume that the parent
# process is not going to read from it till we are done here,
# otherwise it should not have passed us the copy of the stream.
# Thus, we are free to work with the the copy (input_stream)
# however we want. For example, we might want to call functions
# (e.g. input()) that implicitly read from whatever stream is
# assigned to sys.stdin. Since we want them to work with the
# original input stream, we are making the following assignment:
sys.stdin = input_stream
start_time = time.time()
if not fake:
if not skip_patch:
self.do_patch()
else:
self.do_stage()
tty.msg("Building {0} [{1}]".format(self.name, self.build_system_class))
self.stage.keep = keep_stage
with self._stage_and_write_lock():
# Run the pre-install hook in the child process after
# the directory is created.
spack.hooks.pre_install(self)
if fake:
self.do_fake_install()
else:
# Do the real install in the source directory.
self.stage.chdir_to_source()
# Save the build environment in a file before building.
env_path = join_path(os.getcwd(), "spack-build.env")
# Redirect I/O to a build log (and optionally to
# the terminal)
log_path = join_path(os.getcwd(), "spack-build.out")
# FIXME : refactor this assignment
self.log_path = log_path
self.env_path = env_path
dump_environment(env_path)
# Spawn a daemon that reads from a pipe and redirects
# everything to log_path
redirection_context = log_output(
log_path,
echo=verbose,
force_color=sys.stdout.isatty(),
debug=True,
input_stream=input_stream,
)
with redirection_context as log_redirection:
for phase_name, phase in zip(
self.phases, self._InstallPhase_phases
):
tty.msg("Executing phase : '{0}'".format(phase_name))
# Redirect stdout and stderr to daemon pipe
with log_redirection:
getattr(self, phase)(self.spec, self.prefix)
self.log()
# Run post install hooks before build stage is removed.
spack.hooks.post_install(self)
# Stop timer.
self._total_time = time.time() - start_time
build_time = self._total_time - self._fetch_time
tty.msg(
"Successfully installed %s" % self.name,
"Fetch: %s. Build: %s. Total: %s."
% (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)),
)
print_pkg(self.prefix)
try:
# Create the install prefix and fork the build process.
spack.store.layout.create_install_directory(self.spec)
# Fork a child to do the actual installation
spack.build_environment.fork(self, build_process, dirty=dirty)
# If we installed then we should keep the prefix
keep_prefix = self.last_phase is None or keep_prefix
# note: PARENT of the build process adds the new package to
# the database, so that we don't need to re-read from file.
spack.store.db.add(self.spec, spack.store.layout, explicit=explicit)
except directory_layout.InstallDirectoryAlreadyExistsError:
# Abort install if install directory exists.
# But do NOT remove it (you'd be overwriting someone else's stuff)
tty.warn("Keeping existing install prefix in place.")
raise
except StopIteration as e:
# A StopIteration exception means that do_install
# was asked to stop early from clients
tty.msg(e.message)
tty.msg("Package stage directory : {0}".format(self.stage.source_path))
finally:
# Remove the install prefix if anything went wrong during install.
if not keep_prefix:
self.remove_prefix()
|
def do_install(
self,
keep_prefix=False,
keep_stage=False,
install_deps=True,
skip_patch=False,
verbose=False,
make_jobs=None,
run_tests=False,
fake=False,
explicit=False,
dirty=None,
**kwargs,
):
"""Called by commands to install a package and its dependencies.
Package implementations should override install() to describe
their build process.
:param bool keep_prefix: Keep install prefix on failure. By default,
destroys it.
:param bool keep_stage: By default, stage is destroyed only if there
are no exceptions during build. Set to True to keep the stage
even with exceptions.
:param bool install_deps: Install dependencies before installing this
package
:param bool skip_patch: Skip patch stage of build if True.
:param bool verbose: Display verbose build output (by default,
suppresses it)
:param int make_jobs: Number of make jobs to use for install. Default
is ncpus
:param bool run_tests: Run tests within the package's install()
:param bool fake: Don't really build; install fake stub files instead.
:param bool explicit: True if package was explicitly installed, False
if package was implicitly installed (as a dependency).
:param bool dirty: Don't clean the build environment before installing.
:param bool force: Install again, even if already installed.
"""
if not self.spec.concrete:
raise ValueError("Can only install concrete packages: %s." % self.spec.name)
# For external packages the workflow is simplified, and basically
# consists in module file generation and registration in the DB
if self.spec.external:
return self._process_external_package(explicit)
# Ensure package is not already installed
layout = spack.store.layout
with spack.store.db.prefix_read_lock(self.spec):
if keep_prefix and os.path.isdir(self.prefix) and (not self.installed):
tty.msg("Continuing from partial install of %s" % self.name)
elif layout.check_installed(self.spec):
msg = "{0.name} is already installed in {0.prefix}"
tty.msg(msg.format(self))
rec = spack.store.db.get_record(self.spec)
return self._update_explicit_entry_in_db(rec, explicit)
# Dirty argument takes precedence over dirty config setting.
if dirty is None:
dirty = spack.dirty
self._do_install_pop_kwargs(kwargs)
# First, install dependencies recursively.
if install_deps:
tty.debug("Installing {0} dependencies".format(self.name))
for dep in self.spec.dependencies():
dep.package.do_install(
keep_prefix=keep_prefix,
keep_stage=keep_stage,
install_deps=install_deps,
fake=fake,
skip_patch=skip_patch,
verbose=verbose,
make_jobs=make_jobs,
run_tests=run_tests,
dirty=dirty,
**kwargs,
)
tty.msg("Installing %s" % self.name)
# Set run_tests flag before starting build.
self.run_tests = run_tests
# Set parallelism before starting build.
self.make_jobs = make_jobs
# Then install the package itself.
def build_process(input_stream):
"""Forked for each build. Has its own process and python
module space set up by build_environment.fork()."""
# We are in the child process. This means that our sys.stdin is
# equal to open(os.devnull). Python did this to prevent our process
# and the parent process from possible simultaneous reading from
# the original standard input. But we assume that the parent
# process is not going to read from it till we are done here,
# otherwise it should not have passed us the copy of the stream.
# Thus, we are free to work with the the copy (input_stream)
# however we want. For example, we might want to call functions
# (e.g. raw_input()) that implicitly read from whatever stream is
# assigned to sys.stdin. Since we want them to work with the
# original input stream, we are making the following assignment:
sys.stdin = input_stream
start_time = time.time()
if not fake:
if not skip_patch:
self.do_patch()
else:
self.do_stage()
tty.msg("Building {0} [{1}]".format(self.name, self.build_system_class))
self.stage.keep = keep_stage
with self._stage_and_write_lock():
# Run the pre-install hook in the child process after
# the directory is created.
spack.hooks.pre_install(self)
if fake:
self.do_fake_install()
else:
# Do the real install in the source directory.
self.stage.chdir_to_source()
# Save the build environment in a file before building.
env_path = join_path(os.getcwd(), "spack-build.env")
# Redirect I/O to a build log (and optionally to
# the terminal)
log_path = join_path(os.getcwd(), "spack-build.out")
# FIXME : refactor this assignment
self.log_path = log_path
self.env_path = env_path
dump_environment(env_path)
# Spawn a daemon that reads from a pipe and redirects
# everything to log_path
redirection_context = log_output(
log_path,
echo=verbose,
force_color=sys.stdout.isatty(),
debug=True,
input_stream=input_stream,
)
with redirection_context as log_redirection:
for phase_name, phase in zip(
self.phases, self._InstallPhase_phases
):
tty.msg("Executing phase : '{0}'".format(phase_name))
# Redirect stdout and stderr to daemon pipe
with log_redirection:
getattr(self, phase)(self.spec, self.prefix)
self.log()
# Run post install hooks before build stage is removed.
spack.hooks.post_install(self)
# Stop timer.
self._total_time = time.time() - start_time
build_time = self._total_time - self._fetch_time
tty.msg(
"Successfully installed %s" % self.name,
"Fetch: %s. Build: %s. Total: %s."
% (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)),
)
print_pkg(self.prefix)
try:
# Create the install prefix and fork the build process.
spack.store.layout.create_install_directory(self.spec)
# Fork a child to do the actual installation
spack.build_environment.fork(self, build_process, dirty=dirty)
# If we installed then we should keep the prefix
keep_prefix = self.last_phase is None or keep_prefix
# note: PARENT of the build process adds the new package to
# the database, so that we don't need to re-read from file.
spack.store.db.add(self.spec, spack.store.layout, explicit=explicit)
except directory_layout.InstallDirectoryAlreadyExistsError:
# Abort install if install directory exists.
# But do NOT remove it (you'd be overwriting someone else's stuff)
tty.warn("Keeping existing install prefix in place.")
raise
except StopIteration as e:
# A StopIteration exception means that do_install
# was asked to stop early from clients
tty.msg(e.message)
tty.msg("Package stage directory : {0}".format(self.stage.source_path))
finally:
# Remove the install prefix if anything went wrong during install.
if not keep_prefix:
self.remove_prefix()
|
https://github.com/spack/spack/issues/3960
|
$ spack uninstall --all
...
==> Do you want to proceed? [y/N] Traceback (most recent call last):
File "/home/christoph/spack/bin/spack", line 220, in <module>
main(sys.argv)
File "/home/christoph/spack/bin/spack", line 216, in main
_main(args, unknown)
File "/home/christoph/spack/bin/spack", line 183, in _main
return_val = command(parser, args)
File "/home/christoph/spack/lib/spack/spack/cmd/uninstall.py", line 213, in uninstall
answer = tty.get_yes_or_no('Do you want to proceed?', default=False)
File "/home/christoph/spack/lib/spack/llnl/util/tty/__init__.py", line 200, in get_yes_or_no
ans = raw_input().lower()
NameError: name 'raw_input' is not defined
|
NameError
|
def build_process(input_stream):
"""Forked for each build. Has its own process and python
module space set up by build_environment.fork()."""
# We are in the child process. This means that our sys.stdin is
# equal to open(os.devnull). Python did this to prevent our process
# and the parent process from possible simultaneous reading from
# the original standard input. But we assume that the parent
# process is not going to read from it till we are done here,
# otherwise it should not have passed us the copy of the stream.
# Thus, we are free to work with the the copy (input_stream)
# however we want. For example, we might want to call functions
# (e.g. input()) that implicitly read from whatever stream is
# assigned to sys.stdin. Since we want them to work with the
# original input stream, we are making the following assignment:
sys.stdin = input_stream
start_time = time.time()
if not fake:
if not skip_patch:
self.do_patch()
else:
self.do_stage()
tty.msg("Building {0} [{1}]".format(self.name, self.build_system_class))
self.stage.keep = keep_stage
with self._stage_and_write_lock():
# Run the pre-install hook in the child process after
# the directory is created.
spack.hooks.pre_install(self)
if fake:
self.do_fake_install()
else:
# Do the real install in the source directory.
self.stage.chdir_to_source()
# Save the build environment in a file before building.
env_path = join_path(os.getcwd(), "spack-build.env")
# Redirect I/O to a build log (and optionally to
# the terminal)
log_path = join_path(os.getcwd(), "spack-build.out")
# FIXME : refactor this assignment
self.log_path = log_path
self.env_path = env_path
dump_environment(env_path)
# Spawn a daemon that reads from a pipe and redirects
# everything to log_path
redirection_context = log_output(
log_path,
echo=verbose,
force_color=sys.stdout.isatty(),
debug=True,
input_stream=input_stream,
)
with redirection_context as log_redirection:
for phase_name, phase in zip(self.phases, self._InstallPhase_phases):
tty.msg("Executing phase : '{0}'".format(phase_name))
# Redirect stdout and stderr to daemon pipe
with log_redirection:
getattr(self, phase)(self.spec, self.prefix)
self.log()
# Run post install hooks before build stage is removed.
spack.hooks.post_install(self)
# Stop timer.
self._total_time = time.time() - start_time
build_time = self._total_time - self._fetch_time
tty.msg(
"Successfully installed %s" % self.name,
"Fetch: %s. Build: %s. Total: %s."
% (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)),
)
print_pkg(self.prefix)
|
def build_process(input_stream):
"""Forked for each build. Has its own process and python
module space set up by build_environment.fork()."""
# We are in the child process. This means that our sys.stdin is
# equal to open(os.devnull). Python did this to prevent our process
# and the parent process from possible simultaneous reading from
# the original standard input. But we assume that the parent
# process is not going to read from it till we are done here,
# otherwise it should not have passed us the copy of the stream.
# Thus, we are free to work with the the copy (input_stream)
# however we want. For example, we might want to call functions
# (e.g. raw_input()) that implicitly read from whatever stream is
# assigned to sys.stdin. Since we want them to work with the
# original input stream, we are making the following assignment:
sys.stdin = input_stream
start_time = time.time()
if not fake:
if not skip_patch:
self.do_patch()
else:
self.do_stage()
tty.msg("Building {0} [{1}]".format(self.name, self.build_system_class))
self.stage.keep = keep_stage
with self._stage_and_write_lock():
# Run the pre-install hook in the child process after
# the directory is created.
spack.hooks.pre_install(self)
if fake:
self.do_fake_install()
else:
# Do the real install in the source directory.
self.stage.chdir_to_source()
# Save the build environment in a file before building.
env_path = join_path(os.getcwd(), "spack-build.env")
# Redirect I/O to a build log (and optionally to
# the terminal)
log_path = join_path(os.getcwd(), "spack-build.out")
# FIXME : refactor this assignment
self.log_path = log_path
self.env_path = env_path
dump_environment(env_path)
# Spawn a daemon that reads from a pipe and redirects
# everything to log_path
redirection_context = log_output(
log_path,
echo=verbose,
force_color=sys.stdout.isatty(),
debug=True,
input_stream=input_stream,
)
with redirection_context as log_redirection:
for phase_name, phase in zip(self.phases, self._InstallPhase_phases):
tty.msg("Executing phase : '{0}'".format(phase_name))
# Redirect stdout and stderr to daemon pipe
with log_redirection:
getattr(self, phase)(self.spec, self.prefix)
self.log()
# Run post install hooks before build stage is removed.
spack.hooks.post_install(self)
# Stop timer.
self._total_time = time.time() - start_time
build_time = self._total_time - self._fetch_time
tty.msg(
"Successfully installed %s" % self.name,
"Fetch: %s. Build: %s. Total: %s."
% (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)),
)
print_pkg(self.prefix)
|
https://github.com/spack/spack/issues/3960
|
$ spack uninstall --all
...
==> Do you want to proceed? [y/N] Traceback (most recent call last):
File "/home/christoph/spack/bin/spack", line 220, in <module>
main(sys.argv)
File "/home/christoph/spack/bin/spack", line 216, in main
_main(args, unknown)
File "/home/christoph/spack/bin/spack", line 183, in _main
return_val = command(parser, args)
File "/home/christoph/spack/lib/spack/spack/cmd/uninstall.py", line 213, in uninstall
answer = tty.get_yes_or_no('Do you want to proceed?', default=False)
File "/home/christoph/spack/lib/spack/llnl/util/tty/__init__.py", line 200, in get_yes_or_no
ans = raw_input().lower()
NameError: name 'raw_input' is not defined
|
NameError
|
def install(self, spec, prefix):
env["GEOS_DIR"] = spec["geos"].prefix
setup_py("install", "--prefix=%s" % prefix)
# We are not sure if this fix is needed before Python 3.5.2.
# If it is needed, this test should be changed.
# See: https://github.com/LLNL/spack/pull/1964
if spec["python"].version >= Version("3.5.2"):
# Use symlinks to join the two mpl_toolkits/ directories into
# one, inside of basemap. This is because Basemap tries to
# "add to" an existing package in Matplotlib, which is only
# legal Python for "Implicit Namespace Packages":
# https://www.python.org/dev/peps/pep-0420/
# https://github.com/Homebrew/homebrew-python/issues/112
# In practice, Python will see only the basemap version of
# mpl_toolkits
path_m = find_package_dir(spec["py-matplotlib"].prefix, "mpl_toolkits")
path_b = find_package_dir(spec.prefix, "mpl_toolkits")
link_dir(path_m, path_b)
|
def install(self, spec, prefix):
env["GEOS_DIR"] = spec["geos"].prefix
setup_py("install", "--prefix=%s" % prefix)
|
https://github.com/spack/spack/issues/1948
|
import mpl_toolkits.basemap
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/home2/rpfische/spack3/opt/spack/linux-centos7-x86_64/gcc-4.9.3/py-basemap-1.0.7-ruhltkr62akc3yp2llvsqupikvhkq5b5/lib/python3.5/site-packages/mpl_toolkits/basemap/__init__.py", line 31, in <module>
from mpl_toolkits.axes_grid1 import make_axes_locatable
ImportError: No module named 'mpl_toolkits.axes_grid1'
|
ImportError
|
def concretize_architecture(self, spec):
"""If the spec is empty provide the defaults of the platform. If the
architecture is not a string type, then check if either the platform,
target or operating system are concretized. If any of the fields are
changed then return True. If everything is concretized (i.e the
architecture attribute is a namedtuple of classes) then return False.
If the target is a string type, then convert the string into a
concretized architecture. If it has no architecture and the root of the
DAG has an architecture, then use the root otherwise use the defaults
on the platform.
"""
root_arch = spec.root.architecture
sys_arch = spack.spec.ArchSpec(spack.architecture.sys_type())
spec_changed = False
if spec.architecture is None:
spec.architecture = spack.spec.ArchSpec(sys_arch)
spec_changed = True
default_archs = list(x for x in [root_arch, sys_arch] if x)
for arch in default_archs:
if spec.architecture.concrete:
break
replacement_fields = [
k
for k, v in iteritems(arch.to_cmp_dict())
if v and not getattr(spec.architecture, k)
]
for field in replacement_fields:
setattr(spec.architecture, field, getattr(arch, field))
spec_changed = True
if not spec.architecture.concrete:
raise InsufficientArchitectureInfoError(spec, default_archs)
return spec_changed
|
def concretize_architecture(self, spec):
"""If the spec is empty provide the defaults of the platform. If the
architecture is not a string type, then check if either the platform,
target or operating system are concretized. If any of the fields are
changed then return True. If everything is concretized (i.e the
architecture attribute is a namedtuple of classes) then return False.
If the target is a string type, then convert the string into a
concretized architecture. If it has no architecture and the root of the
DAG has an architecture, then use the root otherwise use the defaults
on the platform.
"""
root_arch = spec.root.architecture
sys_arch = spack.spec.ArchSpec(spack.architecture.sys_type())
spec_changed = False
if spec.architecture is None:
spec.architecture = spack.spec.ArchSpec(sys_arch)
spec_changed = True
default_archs = [root_arch, sys_arch]
while not spec.architecture.concrete and default_archs:
arch = default_archs.pop(0)
replacement_fields = [
k
for k, v in iteritems(arch.to_cmp_dict())
if v and not getattr(spec.architecture, k)
]
for field in replacement_fields:
setattr(spec.architecture, field, getattr(arch, field))
spec_changed = True
return spec_changed
|
https://github.com/spack/spack/issues/2587
|
$ spack arch
cray-CNL-mic_knl
$ spack spec tau
Input spec
--------------------------------
tau
Normalized
--------------------------------
tau
^pdt
Concretized
--------------------------------
Traceback (most recent call last):
File "/home/kumbhar/spack/bin/spack", line 198, in <module>
main()
File "/home/kumbhar/spack/bin/spack", line 175, in main
return_val = command(parser, args)
File "/xxxx/home/kumbhar/spack/lib/spack/spack/cmd/spec.py", line 83, in spec
spec.concretize()
File "/xxxx/home/kumbhar/spack/lib/spack/spack/spec.py", line 1543, in concretize
self._concretize_helper())
File "/xxxx/home/kumbhar/spack/lib/spack/spack/spec.py", line 1362, in _concretize_helper
changed |= dep.spec._concretize_helper(presets, visited)
File "/xxxx/home/kumbhar/spack/lib/spack/spack/spec.py", line 1362, in _concretize_helper
changed |= dep.spec._concretize_helper(presets, visited)
File "/xxxx/home/kumbhar/spack/lib/spack/spack/spec.py", line 1362, in _concretize_helper
changed |= dep.spec._concretize_helper(presets, visited)
File "/xxxx/home/kumbhar/spack/lib/spack/spack/spec.py", line 1372, in _concretize_helper
(spack.concretizer.concretize_architecture(self),
File "/xxxx/home/kumbhar/spack/lib/spack/spack/concretize.py", line 270, in concretize_architecture
replacement_fields = [k for k, v in arch.to_cmp_dict().iteritems()
AttributeError: 'NoneType' object has no attribute 'to_cmp_dict'
|
AttributeError
|
def create_db_tarball(args):
tar = which("tar")
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
tarball_path = os.path.abspath(tarball_name)
base = os.path.basename(spack.store.root)
transform_args = []
if "GNU" in tar("--version", output=str):
transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
else:
transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
wd = os.path.dirname(spack.store.root)
with working_dir(wd):
files = [spack.store.db._index_path]
files += glob("%s/*/*/*/.spack/spec.yaml" % base)
files = [os.path.relpath(f) for f in files]
args = ["-czf", tarball_path]
args += transform_args
args += files
tar(*args)
tty.msg("Created %s" % tarball_name)
|
def create_db_tarball(args):
tar = which("tar")
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
tarball_path = os.path.abspath(tarball_name)
base = os.path.basename(spack.install_path)
transform_args = []
if "GNU" in tar("--version", output=str):
transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
else:
transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
wd = os.path.dirname(spack.install_path)
with working_dir(wd):
files = [spack.installed_db._index_path]
files += glob("%s/*/*/*/.spack/spec.yaml" % base)
files = [os.path.relpath(f) for f in files]
args = ["-czf", tarball_path]
args += transform_args
args += files
tar(*args)
tty.msg("Created %s" % tarball_name)
|
https://github.com/spack/spack/issues/2123
|
$ spack install eigen
==> Installing eigen
==> metis is already installed in /blues/gpfs/home/software/spack-0.9.1/opt/spack/linux-centos6-x86_64/gcc-6.1.0/metis-5.1.0-k4pqzigdgdsuk2r3xls7eyhk6uwvxp46
Traceback (most recent call last):
File "/soft/spack-0.9.1/bin/spack", line 202, in <module>
main()
File "/soft/spack-0.9.1/bin/spack", line 179, in main
return_val = command(parser, args)
File "/blues/gpfs/home/software/spack-0.9.1/lib/spack/spack/cmd/install.py", line 117, in install
package.do_install(**kwargs)
File "/blues/gpfs/home/software/spack-0.9.1/lib/spack/spack/package.py", line 1183, in do_install
dirty=dirty)
File "/blues/gpfs/home/software/spack-0.9.1/lib/spack/spack/package.py", line 1160, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/blues/gpfs/home/software/spack-0.9.1/lib/spack/spack/database.py", line 79, in converter
return function(self, spec_like, *args, **kwargs)
File "/blues/gpfs/home/software/spack-0.9.1/lib/spack/spack/database.py", line 527, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/blues/gpfs/home/software/spack-0.9.1/lib/spack/spack/database.py", line 519, in _get_matching_spec_key
match = self.query_one(spec, **kwargs)
File "/blues/gpfs/home/software/spack-0.9.1/lib/spack/spack/database.py", line 653, in query_one
assert len(concrete_specs) <= 1
AssertionError
|
AssertionError
|
def graph(parser, args):
concretize = not args.normalize
if args.installed:
if args.specs:
tty.die("Can't specify specs with --installed")
args.dot = True
specs = spack.store.db.query()
else:
specs = spack.cmd.parse_specs(args.specs, normalize=True, concretize=concretize)
if not specs:
setup_parser.parser.print_help()
return 1
deptype = nobuild
if args.deptype:
deptype = tuple(args.deptype.split(","))
validate_deptype(deptype)
deptype = canonical_deptype(deptype)
if args.dot: # Dot graph only if asked for.
graph_dot(specs, static=args.static, deptype=deptype)
elif specs: # ascii is default: user doesn't need to provide it explicitly
graph_ascii(specs[0], debug=spack.debug, deptype=deptype)
for spec in specs[1:]:
print # extra line bt/w independent graphs
graph_ascii(spec, debug=spack.debug)
|
def graph(parser, args):
concretize = not args.normalize
if args.installed:
if args.specs:
tty.die("Can't specify specs with --installed")
args.dot = True
specs = spack.installed_db.query()
else:
specs = spack.cmd.parse_specs(args.specs, normalize=True, concretize=concretize)
if not specs:
setup_parser.parser.print_help()
return 1
deptype = nobuild
if args.deptype:
deptype = tuple(args.deptype.split(","))
validate_deptype(deptype)
deptype = canonical_deptype(deptype)
if args.dot: # Dot graph only if asked for.
graph_dot(specs, static=args.static, deptype=deptype)
elif specs: # ascii is default: user doesn't need to provide it explicitly
graph_ascii(specs[0], debug=spack.debug, deptype=deptype)
for spec in specs[1:]:
print # extra line bt/w independent graphs
graph_ascii(spec, debug=spack.debug)
|
https://github.com/spack/spack/issues/2316
|
$ spack setup pism@dev
Traceback (most recent call last):
File "/home2/rpfische/spack4/bin/spack", line 203, in <module>
main()
File "/home2/rpfische/spack4/bin/spack", line 180, in main
return_val = command(parser, args)
File "/home2/rpfische/spack4/lib/spack/spack/cmd/setup.py", line 133, in setup
with spack.installed_db.write_transaction():
AttributeError: 'module' object has no attribute 'installed_db'
|
AttributeError
|
def setup(self, args):
if not args.spec:
tty.die("spack setup requires a package spec argument.")
specs = spack.cmd.parse_specs(args.spec)
if len(specs) > 1:
tty.die("spack setup only takes one spec.")
# Take a write lock before checking for existence.
with spack.store.db.write_transaction():
spec = specs[0]
if not spack.repo.exists(spec.name):
tty.warn("No such package: %s" % spec.name)
create = tty.get_yes_or_no("Create this package?", default=False)
if not create:
tty.msg("Exiting without creating.")
sys.exit(1)
else:
tty.msg("Running 'spack edit -f %s'" % spec.name)
edit_package(spec.name, spack.repo.first_repo(), None, True)
return
if not spec.versions.concrete:
tty.die(
"spack setup spec must have a single, concrete version. "
"Did you forget a package version number?"
)
spec.concretize()
package = spack.repo.get(spec)
if not isinstance(package, spack.CMakePackage):
tty.die(
"Support for {0} derived packages not yet implemented".format(
package.build_system_class
)
)
# It's OK if the package is already installed.
# Forces the build to run out of the current directory.
package.stage = DIYStage(os.getcwd())
# TODO: make this an argument, not a global.
spack.do_checksum = False
# Install dependencies if requested to do so
if not args.ignore_deps:
parser = argparse.ArgumentParser()
install.setup_parser(parser)
inst_args = copy.deepcopy(args)
inst_args = parser.parse_args(
["--only=dependencies"] + args.spec, namespace=inst_args
)
install.install(parser, inst_args)
# Generate spconfig.py
tty.msg("Generating spconfig.py [{0}]".format(package.spec.cshort_spec))
write_spconfig(package)
# Install this package to register it in the DB and permit
# module file regeneration
inst_args = copy.deepcopy(args)
inst_args = parser.parse_args(
["--only=package", "--fake"] + args.spec, namespace=inst_args
)
install.install(parser, inst_args)
|
def setup(self, args):
if not args.spec:
tty.die("spack setup requires a package spec argument.")
specs = spack.cmd.parse_specs(args.spec)
if len(specs) > 1:
tty.die("spack setup only takes one spec.")
# Take a write lock before checking for existence.
with spack.installed_db.write_transaction():
spec = specs[0]
if not spack.repo.exists(spec.name):
tty.warn("No such package: %s" % spec.name)
create = tty.get_yes_or_no("Create this package?", default=False)
if not create:
tty.msg("Exiting without creating.")
sys.exit(1)
else:
tty.msg("Running 'spack edit -f %s'" % spec.name)
edit_package(spec.name, spack.repo.first_repo(), None, True)
return
if not spec.versions.concrete:
tty.die(
"spack setup spec must have a single, concrete version. "
"Did you forget a package version number?"
)
spec.concretize()
package = spack.repo.get(spec)
if not isinstance(package, spack.CMakePackage):
tty.die(
"Support for {0} derived packages not yet implemented".format(
package.build_system_class
)
)
# It's OK if the package is already installed.
# Forces the build to run out of the current directory.
package.stage = DIYStage(os.getcwd())
# TODO: make this an argument, not a global.
spack.do_checksum = False
# Install dependencies if requested to do so
if not args.ignore_deps:
parser = argparse.ArgumentParser()
install.setup_parser(parser)
inst_args = copy.deepcopy(args)
inst_args = parser.parse_args(
["--only=dependencies"] + args.spec, namespace=inst_args
)
install.install(parser, inst_args)
# Generate spconfig.py
tty.msg("Generating spconfig.py [{0}]".format(package.spec.cshort_spec))
write_spconfig(package)
# Install this package to register it in the DB and permit
# module file regeneration
inst_args = copy.deepcopy(args)
inst_args = parser.parse_args(
["--only=package", "--fake"] + args.spec, namespace=inst_args
)
install.install(parser, inst_args)
|
https://github.com/spack/spack/issues/2316
|
$ spack setup pism@dev
Traceback (most recent call last):
File "/home2/rpfische/spack4/bin/spack", line 203, in <module>
main()
File "/home2/rpfische/spack4/bin/spack", line 180, in main
return_val = command(parser, args)
File "/home2/rpfische/spack4/lib/spack/spack/cmd/setup.py", line 133, in setup
with spack.installed_db.write_transaction():
AttributeError: 'module' object has no attribute 'installed_db'
|
AttributeError
|
def install(self, spec, prefix):
self.check_variants(spec)
base_components = "ALL" # when in doubt, install everything
mpi_components = ""
mkl_components = ""
daal_components = ""
ipp_components = ""
if not spec.satisfies("+all"):
all_components = get_all_components()
regex = "(comp|openmp|intel-tbb|icc|ifort|psxe|icsxe-pset)"
base_components = filter_pick(all_components, re.compile(regex).search)
regex = "(icsxe|imb|mpi|itac|intel-ta|intel-tc|clck)"
mpi_components = filter_pick(all_components, re.compile(regex).search)
mkl_components = filter_pick(all_components, re.compile("(mkl)").search)
daal_components = filter_pick(all_components, re.compile("(daal)").search)
ipp_components = filter_pick(all_components, re.compile("(ipp)").search)
regex = "(gdb|vtune|inspector|advisor)"
tool_components = filter_pick(all_components, re.compile(regex).search)
components = base_components
if not spec.satisfies("+all"):
if spec.satisfies("+mpi"):
components += mpi_components
if spec.satisfies("+mkl"):
components += mkl_components
if spec.satisfies("+daal"):
components += daal_components
if spec.satisfies("+ipp"):
components += ipp_components
if spec.satisfies("+tools") and (
spec.satisfies("@cluster") or spec.satisfies("@professional")
):
components += tool_components
if spec.satisfies("+all"):
self.intel_components = "ALL"
else:
self.intel_components = ";".join(components)
IntelInstaller.install(self, spec, prefix)
absbindir = os.path.dirname(os.path.realpath(os.path.join(self.prefix.bin, "icc")))
abslibdir = os.path.dirname(
os.path.realpath(os.path.join(self.prefix.lib, "intel64", "libimf.a"))
)
os.symlink(self.global_license_file, os.path.join(absbindir, "license.lic"))
if spec.satisfies("+tools") and (
spec.satisfies("@cluster") or spec.satisfies("@professional")
):
inspector_dir = "inspector_xe/licenses"
advisor_dir = "advisor_xe/licenses"
vtune_amplifier_dir = "vtune_amplifier_xe/licenses"
year = int(str(self.version).split(".")[1])
if year >= 2017:
inspector_dir = "inspector/licenses"
advisor_dir = "advisor/licenses"
os.mkdir(os.path.join(self.prefix, inspector_dir))
os.symlink(
self.global_license_file,
os.path.join(self.prefix, inspector_dir, "license.lic"),
)
os.mkdir(os.path.join(self.prefix, advisor_dir))
os.symlink(
self.global_license_file,
os.path.join(self.prefix, advisor_dir, "license.lic"),
)
os.mkdir(os.path.join(self.prefix, vtune_amplifier_dir))
os.symlink(
self.global_license_file,
os.path.join(self.prefix, vtune_amplifier_dir, "license.lic"),
)
if (spec.satisfies("+all") or spec.satisfies("+mpi")) and spec.satisfies(
"@cluster"
):
for ifile in os.listdir(os.path.join(self.prefix, "itac")):
if os.path.isdir(os.path.join(self.prefix, "itac", ifile)):
os.symlink(
self.global_license_file,
os.path.join(self.prefix, "itac", ifile, "license.lic"),
)
if os.path.isdir(os.path.join(self.prefix, "itac", ifile, "intel64")):
os.symlink(
self.global_license_file,
os.path.join(self.prefix, "itac", ifile, "intel64", "license.lic"),
)
if spec.satisfies("~newdtags"):
wrappers = [
"mpif77",
"mpif77",
"mpif90",
"mpif90",
"mpigcc",
"mpigcc",
"mpigxx",
"mpigxx",
"mpiicc",
"mpiicc",
"mpiicpc",
"mpiicpc",
"mpiifort",
"mpiifort",
]
wrapper_paths = []
for root, dirs, files in os.walk(spec.prefix):
for name in files:
if name in wrappers:
wrapper_paths.append(os.path.join(spec.prefix, root, name))
for wrapper in wrapper_paths:
filter_file(r"-Xlinker --enable-new-dtags", r" ", wrapper)
if spec.satisfies("+rpath"):
for compiler_command in ["icc", "icpc", "ifort"]:
cfgfilename = os.path.join(absbindir, "%s.cfg" % compiler_command)
with open(cfgfilename, "w") as f:
f.write("-Xlinker -rpath -Xlinker %s\n" % abslibdir)
os.symlink(
os.path.join(self.prefix.man, "common", "man1"),
os.path.join(self.prefix.man, "man1"),
)
|
def install(self, spec, prefix):
self.check_variants(spec)
base_components = "ALL" # when in doubt, install everything
mpi_components = ""
mkl_components = ""
daal_components = ""
ipp_components = ""
if not spec.satisfies("+all"):
all_components = get_all_components()
regex = "(comp|openmp|intel-tbb|icc|ifort|psxe|icsxe-pset)"
base_components = filter_pick(all_components, re.compile(regex).search)
regex = "(icsxe|imb|mpi|itac|intel-ta|intel-tc|clck)"
mpi_components = filter_pick(all_components, re.compile(regex).search)
mkl_components = filter_pick(all_components, re.compile("(mkl)").search)
daal_components = filter_pick(all_components, re.compile("(daal)").search)
ipp_components = filter_pick(all_components, re.compile("(ipp)").search)
regex = "(gdb|vtune|inspector|advisor)"
tool_components = filter_pick(all_components, re.compile(regex).search)
components = base_components
if not spec.satisfies("+all"):
if spec.satisfies("+mpi"):
components += mpi_components
if spec.satisfies("+mkl"):
components += mkl_components
if spec.satisfies("+daal"):
components += daal_components
if spec.satisfies("+ipp"):
components += ipp_components
if spec.satisfies("+tools") and (
spec.satisfies("@cluster") or spec.satisfies("@professional")
):
components += tool_components
if spec.satisfies("+all"):
self.intel_components = "ALL"
else:
self.intel_components = ";".join(components)
IntelInstaller.install(self, spec, prefix)
absbindir = os.path.dirname(os.path.realpath(os.path.join(self.prefix.bin, "icc")))
abslibdir = os.path.dirname(
os.path.realpath(os.path.join(self.prefix.lib, "intel64", "libimf.a"))
)
os.symlink(self.global_license_file, os.path.join(absbindir, "license.lic"))
if spec.satisfies("+tools") and (
spec.satisfies("@cluster") or spec.satisfies("@professional")
):
os.mkdir(os.path.join(self.prefix, "inspector_xe/licenses"))
os.symlink(
self.global_license_file,
os.path.join(self.prefix, "inspector_xe/licenses", "license.lic"),
)
os.mkdir(os.path.join(self.prefix, "advisor_xe/licenses"))
os.symlink(
self.global_license_file,
os.path.join(self.prefix, "advisor_xe/licenses", "license.lic"),
)
os.mkdir(os.path.join(self.prefix, "vtune_amplifier_xe/licenses"))
os.symlink(
self.global_license_file,
os.path.join(self.prefix, "vtune_amplifier_xe/licenses", "license.lic"),
)
if (spec.satisfies("+all") or spec.satisfies("+mpi")) and spec.satisfies(
"@cluster"
):
for ifile in os.listdir(os.path.join(self.prefix, "itac")):
if os.path.isdir(os.path.join(self.prefix, "itac", ifile)):
os.symlink(
self.global_license_file,
os.path.join(self.prefix, "itac", ifile, "license.lic"),
)
if os.path.isdir(os.path.join(self.prefix, "itac", ifile, "intel64")):
os.symlink(
self.global_license_file,
os.path.join(self.prefix, "itac", ifile, "intel64", "license.lic"),
)
if spec.satisfies("~newdtags"):
wrappers = [
"mpif77",
"mpif77",
"mpif90",
"mpif90",
"mpigcc",
"mpigcc",
"mpigxx",
"mpigxx",
"mpiicc",
"mpiicc",
"mpiicpc",
"mpiicpc",
"mpiifort",
"mpiifort",
]
wrapper_paths = []
for root, dirs, files in os.walk(spec.prefix):
for name in files:
if name in wrappers:
wrapper_paths.append(os.path.join(spec.prefix, root, name))
for wrapper in wrapper_paths:
filter_file(r"-Xlinker --enable-new-dtags", r" ", wrapper)
if spec.satisfies("+rpath"):
for compiler_command in ["icc", "icpc", "ifort"]:
cfgfilename = os.path.join(absbindir, "%s.cfg" % compiler_command)
with open(cfgfilename, "w") as f:
f.write("-Xlinker -rpath -Xlinker %s\n" % abslibdir)
os.symlink(
os.path.join(self.prefix.man, "common", "man1"),
os.path.join(self.prefix.man, "man1"),
)
|
https://github.com/spack/spack/issues/2153
|
Traceback (most recent call last):
File "/soft/spack-0.9.1/bin/spack", line 202, in <module>
main()
File "/soft/spack-0.9.1/bin/spack", line 179, in main
return_val = command(parser, args)
File "/blues/gpfs/home/software/spack-0.9.1/lib/spack/spack/cmd/install.py", line 340, in install
package.do_install(**kwargs)
File "/blues/gpfs/home/software/spack-0.9.1/lib/spack/spack/package.py", line 1270, in do_install
spack.build_environment.fork(self, build_process, dirty=dirty)
File "/blues/gpfs/home/software/spack-0.9.1/lib/spack/spack/build_environment.py", line 547, in fork
raise exception
OSError: [Errno 2] No such file or directory: '/blues/gpfs/home/software/spack-0.9.1/opt/spack/linux-centos6-x86_64/gcc-4.4.7/intel-parallel-studio-professional.2017.0-7o7tfdlavk3ibijg7mnqwpnutao5busx/inspector_xe/licenses'
|
OSError
|
def __init__(self, path, start=0, length=0):
"""Construct a new lock on the file at ``path``.
By default, the lock applies to the whole file. Optionally,
caller can specify a byte range beginning ``start`` bytes from
the start of the file and extending ``length`` bytes from there.
This exposes a subset of fcntl locking functionality. It does
not currently expose the ``whence`` parameter -- ``whence`` is
always os.SEEK_SET and ``start`` is always evaluated from the
beginning of the file.
"""
self.path = path
self._file = None
self._reads = 0
self._writes = 0
# byte range parameters
self._start = start
self._length = length
# PID and host of lock holder
self.pid = self.old_pid = None
self.host = self.old_host = None
|
def __init__(self, file_path):
self._file_path = file_path
self._fd = None
self._reads = 0
self._writes = 0
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def _lock(self, op, timeout=_default_timeout):
"""This takes a lock using POSIX locks (``fnctl.lockf``).
The lock is implemented as a spin lock using a nonblocking call
to lockf().
On acquiring an exclusive lock, the lock writes this process's
pid and host to the lock file, in case the holding process needs
to be killed later.
If the lock times out, it raises a ``LockError``.
"""
start_time = time.time()
while (time.time() - start_time) < timeout:
try:
# If we could write the file, we'd have opened it 'r+'.
# Raise an error when we attempt to upgrade to a write lock.
if op == fcntl.LOCK_EX:
if self._file and self._file.mode == "r":
raise LockError(
"Can't take exclusive lock on read-only file: %s" % self.path
)
# Create file and parent directories if they don't exist.
if self._file is None:
self._ensure_parent_directory()
# Prefer to open 'r+' to allow upgrading to write
# lock later if possible. Open read-only if we can't
# write the lock file at all.
os_mode, fd_mode = (os.O_RDWR | os.O_CREAT), "r+"
if os.path.exists(self.path) and not os.access(self.path, os.W_OK):
os_mode, fd_mode = os.O_RDONLY, "r"
fd = os.open(self.path, os_mode)
self._file = os.fdopen(fd, fd_mode)
# Try to get the lock (will raise if not available.)
fcntl.lockf(
self._file, op | fcntl.LOCK_NB, self._length, self._start, os.SEEK_SET
)
# All locks read the owner PID and host
self._read_lock_data()
# Exclusive locks write their PID/host
if op == fcntl.LOCK_EX:
self._write_lock_data()
return
except IOError as error:
if error.errno == errno.EAGAIN or error.errno == errno.EACCES:
pass
else:
raise
time.sleep(_sleep_time)
raise LockError("Timed out waiting for lock.")
|
def _lock(self, op, timeout):
"""This takes a lock using POSIX locks (``fnctl.lockf``).
The lock is implemented as a spin lock using a nonblocking
call to lockf().
On acquiring an exclusive lock, the lock writes this process's
pid and host to the lock file, in case the holding process
needs to be killed later.
If the lock times out, it raises a ``LockError``.
"""
start_time = time.time()
while (time.time() - start_time) < timeout:
try:
# If this is already open read-only and we want to
# upgrade to an exclusive write lock, close first.
if self._fd is not None:
flags = fcntl.fcntl(self._fd, fcntl.F_GETFL)
if op == fcntl.LOCK_EX and flags | os.O_RDONLY:
os.close(self._fd)
self._fd = None
if self._fd is None:
mode = os.O_RDWR if op == fcntl.LOCK_EX else os.O_RDONLY
self._fd = os.open(self._file_path, mode)
fcntl.lockf(self._fd, op | fcntl.LOCK_NB)
if op == fcntl.LOCK_EX:
os.write(self._fd, "pid=%s,host=%s" % (os.getpid(), socket.getfqdn()))
return
except IOError as error:
if error.errno == errno.EAGAIN or error.errno == errno.EACCES:
pass
else:
raise
time.sleep(_sleep_time)
raise LockError("Timed out waiting for lock.")
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def _unlock(self):
"""Releases a lock using POSIX locks (``fcntl.lockf``)
Releases the lock regardless of mode. Note that read locks may
be masquerading as write locks, but this removes either.
"""
fcntl.lockf(self._file, fcntl.LOCK_UN, self._length, self._start, os.SEEK_SET)
self._file.close()
self._file = None
|
def _unlock(self):
"""Releases a lock using POSIX locks (``fcntl.lockf``)
Releases the lock regardless of mode. Note that read locks may
be masquerading as write locks, but this removes either.
"""
fcntl.lockf(self._fd, fcntl.LOCK_UN)
os.close(self._fd)
self._fd = None
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def acquire_read(self, timeout=_default_timeout):
"""Acquires a recursive, shared lock for reading.
Read and write locks can be acquired and released in arbitrary
order, but the POSIX lock is held until all local read and
write locks are released.
Returns True if it is the first acquire and actually acquires
the POSIX lock, False if it is a nested transaction.
"""
if self._reads == 0 and self._writes == 0:
tty.debug(
"READ LOCK: {0.path}[{0._start}:{0._length}] [Acquiring]".format(self)
)
self._lock(fcntl.LOCK_SH, timeout=timeout) # can raise LockError.
self._reads += 1
return True
else:
self._reads += 1
return False
|
def acquire_read(self, timeout=_default_timeout):
"""Acquires a recursive, shared lock for reading.
Read and write locks can be acquired and released in arbitrary
order, but the POSIX lock is held until all local read and
write locks are released.
Returns True if it is the first acquire and actually acquires
the POSIX lock, False if it is a nested transaction.
"""
if self._reads == 0 and self._writes == 0:
self._lock(fcntl.LOCK_SH, timeout) # can raise LockError.
self._reads += 1
return True
else:
self._reads += 1
return False
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def acquire_write(self, timeout=_default_timeout):
"""Acquires a recursive, exclusive lock for writing.
Read and write locks can be acquired and released in arbitrary
order, but the POSIX lock is held until all local read and
write locks are released.
Returns True if it is the first acquire and actually acquires
the POSIX lock, False if it is a nested transaction.
"""
if self._writes == 0:
tty.debug(
"WRITE LOCK: {0.path}[{0._start}:{0._length}] [Acquiring]".format(self)
)
self._lock(fcntl.LOCK_EX, timeout=timeout) # can raise LockError.
self._writes += 1
return True
else:
self._writes += 1
return False
|
def acquire_write(self, timeout=_default_timeout):
"""Acquires a recursive, exclusive lock for writing.
Read and write locks can be acquired and released in arbitrary
order, but the POSIX lock is held until all local read and
write locks are released.
Returns True if it is the first acquire and actually acquires
the POSIX lock, False if it is a nested transaction.
"""
if self._writes == 0:
self._lock(fcntl.LOCK_EX, timeout) # can raise LockError.
self._writes += 1
return True
else:
self._writes += 1
return False
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def release_read(self):
"""Releases a read lock.
Returns True if the last recursive lock was released, False if
there are still outstanding locks.
Does limited correctness checking: if a read lock is released
when none are held, this will raise an assertion error.
"""
assert self._reads > 0
if self._reads == 1 and self._writes == 0:
tty.debug("READ LOCK: {0.path}[{0._start}:{0._length}] [Released]".format(self))
self._unlock() # can raise LockError.
self._reads -= 1
return True
else:
self._reads -= 1
return False
|
def release_read(self):
"""Releases a read lock.
Returns True if the last recursive lock was released, False if
there are still outstanding locks.
Does limited correctness checking: if a read lock is released
when none are held, this will raise an assertion error.
"""
assert self._reads > 0
if self._reads == 1 and self._writes == 0:
self._unlock() # can raise LockError.
self._reads -= 1
return True
else:
self._reads -= 1
return False
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def release_write(self):
"""Releases a write lock.
Returns True if the last recursive lock was released, False if
there are still outstanding locks.
Does limited correctness checking: if a read lock is released
when none are held, this will raise an assertion error.
"""
assert self._writes > 0
if self._writes == 1 and self._reads == 0:
tty.debug(
"WRITE LOCK: {0.path}[{0._start}:{0._length}] [Released]".format(self)
)
self._unlock() # can raise LockError.
self._writes -= 1
return True
else:
self._writes -= 1
return False
|
def release_write(self):
"""Releases a write lock.
Returns True if the last recursive lock was released, False if
there are still outstanding locks.
Does limited correctness checking: if a read lock is released
when none are held, this will raise an assertion error.
"""
assert self._writes > 0
if self._writes == 1 and self._reads == 0:
self._unlock() # can raise LockError.
self._writes -= 1
return True
else:
self._writes -= 1
return False
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def _debug_tarball_suffix():
now = datetime.now()
suffix = now.strftime("%Y-%m-%d-%H%M%S")
git = which("git")
if not git:
return "nobranch-nogit-%s" % suffix
with working_dir(spack.spack_root):
if not os.path.isdir(".git"):
return "nobranch.nogit.%s" % suffix
# Get symbolic branch name and strip any special chars (mainly '/')
symbolic = git(
"rev-parse", "--abbrev-ref", "--short", "HEAD", output=str
).strip()
symbolic = re.sub(r"[^\w.-]", "-", symbolic)
# Get the commit hash too.
commit = git("rev-parse", "--short", "HEAD", output=str).strip()
if symbolic == commit:
return "nobranch.%s.%s" % (commit, suffix)
else:
return "%s.%s.%s" % (symbolic, commit, suffix)
|
def _debug_tarball_suffix():
now = datetime.now()
suffix = now.strftime("%Y-%m-%d-%H%M%S")
git = which("git")
if not git:
return "nobranch-nogit-%s" % suffix
with working_dir(spack.spack_root):
if not os.path.isdir(".git"):
return "nobranch.nogit.%s" % suffix
symbolic = git(
"rev-parse", "--abbrev-ref", "--short", "HEAD", output=str
).strip()
commit = git("rev-parse", "--short", "HEAD", output=str).strip()
if symbolic == commit:
return "nobranch.%s.%s" % (commit, suffix)
else:
return "%s.%s.%s" % (symbolic, commit, suffix)
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def create_db_tarball(args):
tar = which("tar")
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
tarball_path = os.path.abspath(tarball_name)
base = os.path.basename(spack.install_path)
transform_args = []
if "GNU" in tar("--version", output=str):
transform_args = ["--transform", "s/^%s/%s/" % (base, tarball_name)]
else:
transform_args = ["-s", "/^%s/%s/" % (base, tarball_name)]
wd = os.path.dirname(spack.install_path)
with working_dir(wd):
files = [spack.installed_db._index_path]
files += glob("%s/*/*/*/.spack/spec.yaml" % base)
files = [os.path.relpath(f) for f in files]
args = ["-czf", tarball_path]
args += transform_args
args += files
tar(*args)
tty.msg("Created %s" % tarball_name)
|
def create_db_tarball(args):
tar = which("tar")
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
tarball_path = os.path.abspath(tarball_name)
with working_dir(spack.spack_root):
files = [spack.installed_db._index_path]
files += glob("%s/*/*/*/.spack/spec.yaml" % spack.install_path)
files = [os.path.relpath(f) for f in files]
tar("-czf", tarball_path, *files)
tty.msg("Created %s" % tarball_name)
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def diy(self, args):
if not args.spec:
tty.die("spack diy requires a package spec argument.")
specs = spack.cmd.parse_specs(args.spec)
if len(specs) > 1:
tty.die("spack diy only takes one spec.")
spec = specs[0]
if not spack.repo.exists(spec.name):
tty.warn("No such package: %s" % spec.name)
create = tty.get_yes_or_no("Create this package?", default=False)
if not create:
tty.msg("Exiting without creating.")
sys.exit(1)
else:
tty.msg("Running 'spack edit -f %s'" % spec.name)
edit_package(spec.name, spack.repo.first_repo(), None, True)
return
if not spec.versions.concrete:
tty.die(
"spack diy spec must have a single, concrete version. "
"Did you forget a package version number?"
)
spec.concretize()
package = spack.repo.get(spec)
if package.installed:
tty.error("Already installed in %s" % package.prefix)
tty.msg("Uninstall or try adding a version suffix for this DIY build.")
sys.exit(1)
# Forces the build to run out of the current directory.
package.stage = DIYStage(os.getcwd())
# TODO: make this an argument, not a global.
spack.do_checksum = False
package.do_install(
keep_prefix=args.keep_prefix,
install_deps=not args.ignore_deps,
verbose=not args.quiet,
keep_stage=True, # don't remove source dir for DIY.
dirty=args.dirty,
)
|
def diy(self, args):
if not args.spec:
tty.die("spack diy requires a package spec argument.")
specs = spack.cmd.parse_specs(args.spec)
if len(specs) > 1:
tty.die("spack diy only takes one spec.")
# Take a write lock before checking for existence.
with spack.installed_db.write_transaction():
spec = specs[0]
if not spack.repo.exists(spec.name):
tty.warn("No such package: %s" % spec.name)
create = tty.get_yes_or_no("Create this package?", default=False)
if not create:
tty.msg("Exiting without creating.")
sys.exit(1)
else:
tty.msg("Running 'spack edit -f %s'" % spec.name)
edit_package(spec.name, spack.repo.first_repo(), None, True)
return
if not spec.versions.concrete:
tty.die(
"spack diy spec must have a single, concrete version. "
"Did you forget a package version number?"
)
spec.concretize()
package = spack.repo.get(spec)
if package.installed:
tty.error("Already installed in %s" % package.prefix)
tty.msg("Uninstall or try adding a version suffix for this DIY build.")
sys.exit(1)
# Forces the build to run out of the current directory.
package.stage = DIYStage(os.getcwd())
# TODO: make this an argument, not a global.
spack.do_checksum = False
package.do_install(
keep_prefix=args.keep_prefix,
install_deps=not args.ignore_deps,
verbose=not args.quiet,
keep_stage=True, # don't remove source dir for DIY.
dirty=args.dirty,
)
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def install(parser, args):
if not args.packages:
tty.die("install requires at least one package argument")
if args.jobs is not None:
if args.jobs <= 0:
tty.die("The -j option must be a positive integer!")
if args.no_checksum:
spack.do_checksum = False # TODO: remove this global.
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
package = spack.repo.get(spec)
package.do_install(
keep_prefix=args.keep_prefix,
keep_stage=args.keep_stage,
install_deps=not args.ignore_deps,
install_self=not args.deps_only,
make_jobs=args.jobs,
run_tests=args.run_tests,
verbose=args.verbose,
fake=args.fake,
dirty=args.dirty,
explicit=True,
)
|
def install(parser, args):
if not args.packages:
tty.die("install requires at least one package argument")
if args.jobs is not None:
if args.jobs <= 0:
tty.die("The -j option must be a positive integer!")
if args.no_checksum:
spack.do_checksum = False # TODO: remove this global.
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
package = spack.repo.get(spec)
with spack.installed_db.write_transaction():
package.do_install(
keep_prefix=args.keep_prefix,
keep_stage=args.keep_stage,
install_deps=not args.ignore_deps,
install_self=not args.deps_only,
make_jobs=args.jobs,
run_tests=args.run_tests,
verbose=args.verbose,
fake=args.fake,
dirty=args.dirty,
explicit=True,
)
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def uninstall(parser, args):
if not args.packages and not args.all:
tty.die("uninstall requires at least one package argument.")
uninstall_list = get_uninstall_list(args)
if not args.yes_to_all:
tty.msg("The following packages will be uninstalled : ")
print("")
spack.cmd.display_specs(uninstall_list, **display_args)
print("")
spack.cmd.ask_for_confirmation("Do you want to proceed ? ")
# Uninstall everything on the list
do_uninstall(uninstall_list, args.force)
|
def uninstall(parser, args):
if not args.packages and not args.all:
tty.die("uninstall requires at least one package argument.")
with spack.installed_db.write_transaction():
uninstall_list = get_uninstall_list(args)
if not args.yes_to_all:
tty.msg("The following packages will be uninstalled : ")
print("")
spack.cmd.display_specs(uninstall_list, **display_args)
print("")
spack.cmd.ask_for_confirmation("Do you want to proceed ? ")
# Uninstall everything on the list
do_uninstall(uninstall_list, args.force)
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def __init__(self, root, db_dir=None):
"""Create a Database for Spack installations under ``root``.
A Database is a cache of Specs data from ``$prefix/spec.yaml``
files in Spack installation directories.
By default, Database files (data and lock files) are stored
under ``root/.spack-db``, which is created if it does not
exist. This is the ``db_dir``.
The Database will attempt to read an ``index.yaml`` file in
``db_dir``. If it does not find one, it will be created when
needed by scanning the entire Database root for ``spec.yaml``
files according to Spack's ``DirectoryLayout``.
Caller may optionally provide a custom ``db_dir`` parameter
where data will be stored. This is intended to be used for
testing the Database class.
"""
self.root = root
if db_dir is None:
# If the db_dir is not provided, default to within the db root.
self._db_dir = join_path(self.root, _db_dirname)
else:
# Allow customizing the database directory location for testing.
self._db_dir = db_dir
# Set up layout of database files within the db dir
self._index_path = join_path(self._db_dir, "index.yaml")
self._lock_path = join_path(self._db_dir, "lock")
# This is for other classes to use to lock prefix directories.
self.prefix_lock_path = join_path(self._db_dir, "prefix_lock")
# Create needed directories and files
if not os.path.exists(self._db_dir):
mkdirp(self._db_dir)
# initialize rest of state.
self.lock = Lock(self._lock_path)
self._data = {}
# whether there was an error at the start of a read transaction
self._error = None
|
def __init__(self, root, db_dir=None):
"""Create a Database for Spack installations under ``root``.
A Database is a cache of Specs data from ``$prefix/spec.yaml``
files in Spack installation directories.
By default, Database files (data and lock files) are stored
under ``root/.spack-db``, which is created if it does not
exist. This is the ``db_dir``.
The Database will attempt to read an ``index.yaml`` file in
``db_dir``. If it does not find one, it will be created when
needed by scanning the entire Database root for ``spec.yaml``
files according to Spack's ``DirectoryLayout``.
Caller may optionally provide a custom ``db_dir`` parameter
where data will be stored. This is intended to be used for
testing the Database class.
"""
self.root = root
if db_dir is None:
# If the db_dir is not provided, default to within the db root.
self._db_dir = join_path(self.root, _db_dirname)
else:
# Allow customizing the database directory location for testing.
self._db_dir = db_dir
# Set up layout of database files within the db dir
self._index_path = join_path(self._db_dir, "index.yaml")
self._lock_path = join_path(self._db_dir, "lock")
# Create needed directories and files
if not os.path.exists(self._db_dir):
mkdirp(self._db_dir)
if not os.path.exists(self._lock_path):
touch(self._lock_path)
# initialize rest of state.
self.lock = Lock(self._lock_path)
self._data = {}
# whether there was an error at the start of a read transaction
self._error = None
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def _get_lock(self, key):
"""Create a lock for a key, if necessary, and return a lock object."""
if key not in self._locks:
self._locks[key] = Lock(self._lock_path(key))
return self._locks[key]
|
def _get_lock(self, key):
"""Create a lock for a key, if necessary, and return a lock object."""
if key not in self._locks:
lock_file = self._lock_path(key)
if not os.path.exists(lock_file):
touch(lock_file)
self._locks[key] = Lock(lock_file)
return self._locks[key]
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def do_install(
self,
keep_prefix=False,
keep_stage=False,
install_deps=True,
install_self=True,
skip_patch=False,
verbose=False,
make_jobs=None,
run_tests=False,
fake=False,
explicit=False,
dirty=False,
install_phases=install_phases,
):
"""Called by commands to install a package and its dependencies.
Package implementations should override install() to describe
their build process.
:param keep_prefix: Keep install prefix on failure. By default, \
destroys it.
:param keep_stage: By default, stage is destroyed only if there are \
no exceptions during build. Set to True to keep the stage
even with exceptions.
:param install_deps: Install dependencies before installing this \
package
:param install_self: Install this package once dependencies have \
been installed.
:param fake: Don't really build; install fake stub files instead.
:param skip_patch: Skip patch stage of build if True.
:param verbose: Display verbose build output (by default, suppresses \
it)
:param dirty: Don't clean the build environment before installing.
:param make_jobs: Number of make jobs to use for install. Default is \
ncpus
:param force: Install again, even if already installed.
:param run_tests: Run tests within the package's install()
"""
if not self.spec.concrete:
raise ValueError("Can only install concrete packages: %s." % self.spec.name)
# No installation needed if package is external
if self.spec.external:
tty.msg("%s is externally installed in %s" % (self.name, self.spec.external))
return
# Ensure package is not already installed
layout = spack.install_layout
with self._prefix_read_lock():
if "install" in install_phases and layout.check_installed(self.spec):
tty.msg("%s is already installed in %s" % (self.name, self.prefix))
rec = spack.installed_db.get_record(self.spec)
if (not rec.explicit) and explicit:
with spack.installed_db.write_transaction():
rec = spack.installed_db.get_record(self.spec)
rec.explicit = True
return
tty.msg("Installing %s" % self.name)
# First, install dependencies recursively.
if install_deps:
for dep in self.spec.dependencies():
dep.package.do_install(
keep_prefix=keep_prefix,
keep_stage=keep_stage,
install_deps=install_deps,
install_self=True,
fake=fake,
skip_patch=skip_patch,
verbose=verbose,
make_jobs=make_jobs,
run_tests=run_tests,
dirty=dirty,
)
# The rest of this function is to install ourself,
# once deps have been installed.
if not install_self:
return
# Set run_tests flag before starting build.
self.run_tests = run_tests
# Set parallelism before starting build.
self.make_jobs = make_jobs
# ------------------- BEGIN def build_process()
# Then install the package itself.
def build_process():
"""Forked for each build. Has its own process and python
module space set up by build_environment.fork()."""
start_time = time.time()
if not fake:
if not skip_patch:
self.do_patch()
else:
self.do_stage()
tty.msg("Building %s" % self.name)
self.stage.keep = keep_stage
self.install_phases = install_phases
self.build_directory = join_path(self.stage.path, "spack-build")
self.source_directory = self.stage.source_path
with contextlib.nested(self.stage, self._prefix_write_lock()):
# Run the pre-install hook in the child process after
# the directory is created.
spack.hooks.pre_install(self)
if fake:
self.do_fake_install()
else:
# Do the real install in the source directory.
self.stage.chdir_to_source()
# Save the build environment in a file before building.
env_path = join_path(os.getcwd(), "spack-build.env")
try:
# Redirect I/O to a build log (and optionally to
# the terminal)
log_path = join_path(os.getcwd(), "spack-build.out")
log_file = open(log_path, "w")
with log_output(log_file, verbose, sys.stdout.isatty(), True):
dump_environment(env_path)
self.install(self.spec, self.prefix)
except ProcessError as e:
# Annotate ProcessErrors with the location of
# the build log
e.build_log = log_path
raise e
# Ensure that something was actually installed.
if "install" in self.install_phases:
self.sanity_check_prefix()
# Copy provenance into the install directory on success
if "provenance" in self.install_phases:
log_install_path = layout.build_log_path(self.spec)
env_install_path = layout.build_env_path(self.spec)
packages_dir = layout.build_packages_path(self.spec)
# Remove first if we're overwriting another build
# (can happen with spack setup)
try:
# log_install_path and env_install_path are here
shutil.rmtree(packages_dir)
except:
pass
install(log_path, log_install_path)
install(env_path, env_install_path)
dump_packages(self.spec, packages_dir)
# Run post install hooks before build stage is removed.
spack.hooks.post_install(self)
# Stop timer.
self._total_time = time.time() - start_time
build_time = self._total_time - self._fetch_time
tty.msg(
"Successfully installed %s" % self.name,
"Fetch: %s. Build: %s. Total: %s."
% (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)),
)
print_pkg(self.prefix)
# ------------------- END def build_process()
try:
# Create the install prefix and fork the build process.
spack.install_layout.create_install_directory(self.spec)
except directory_layout.InstallDirectoryAlreadyExistsError:
if "install" in install_phases:
# Abort install if install directory exists.
# But do NOT remove it (you'd be overwriting someone's data)
tty.warn("Keeping existing install prefix in place.")
raise
else:
# We're not installing anyway, so don't worry if someone
# else has already written in the install directory
pass
try:
spack.build_environment.fork(self, build_process, dirty=dirty)
except:
# remove the install prefix if anything went wrong during install.
if not keep_prefix:
self.remove_prefix()
else:
tty.warn(
"Keeping install prefix in place despite error.",
"Spack will think this package is installed. "
+ "Manually remove this directory to fix:",
self.prefix,
wrap=False,
)
raise
# Parent of the build process adds the new package to
# the database, so that we don't need to re-read from file.
# NOTE: add() implicitly acquires a write-lock
spack.installed_db.add(self.spec, spack.install_layout, explicit=explicit)
|
def do_install(
self,
keep_prefix=False,
keep_stage=False,
install_deps=True,
install_self=True,
skip_patch=False,
verbose=False,
make_jobs=None,
run_tests=False,
fake=False,
explicit=False,
dirty=False,
install_phases=install_phases,
):
"""Called by commands to install a package and its dependencies.
Package implementations should override install() to describe
their build process.
:param keep_prefix: Keep install prefix on failure. By default, \
destroys it.
:param keep_stage: By default, stage is destroyed only if there are \
no exceptions during build. Set to True to keep the stage
even with exceptions.
:param install_deps: Install dependencies before installing this \
package
:param install_self: Install this package once dependencies have \
been installed.
:param fake: Don't really build; install fake stub files instead.
:param skip_patch: Skip patch stage of build if True.
:param verbose: Display verbose build output (by default, suppresses \
it)
:param dirty: Don't clean the build environment before installing.
:param make_jobs: Number of make jobs to use for install. Default is \
ncpus
:param force: Install again, even if already installed.
:param run_tests: Run tests within the package's install()
"""
if not self.spec.concrete:
raise ValueError("Can only install concrete packages: %s." % self.spec.name)
# No installation needed if package is external
if self.spec.external:
tty.msg("%s is externally installed in %s" % (self.name, self.spec.external))
return
# Ensure package is not already installed
layout = spack.install_layout
if "install" in install_phases and layout.check_installed(self.spec):
tty.msg("%s is already installed in %s" % (self.name, self.prefix))
rec = spack.installed_db.get_record(self.spec)
if (not rec.explicit) and explicit:
with spack.installed_db.write_transaction():
rec = spack.installed_db.get_record(self.spec)
rec.explicit = True
return
tty.msg("Installing %s" % self.name)
# First, install dependencies recursively.
if install_deps:
for dep in self.spec.dependencies():
dep.package.do_install(
keep_prefix=keep_prefix,
keep_stage=keep_stage,
install_deps=install_deps,
install_self=True,
fake=fake,
skip_patch=skip_patch,
verbose=verbose,
make_jobs=make_jobs,
run_tests=run_tests,
dirty=dirty,
)
# The rest of this function is to install ourself,
# once deps have been installed.
if not install_self:
return
# Set run_tests flag before starting build.
self.run_tests = run_tests
# Set parallelism before starting build.
self.make_jobs = make_jobs
# ------------------- BEGIN def build_process()
# Then install the package itself.
def build_process():
"""Forked for each build. Has its own process and python
module space set up by build_environment.fork()."""
start_time = time.time()
if not fake:
if not skip_patch:
self.do_patch()
else:
self.do_stage()
tty.msg("Building %s" % self.name)
self.stage.keep = keep_stage
self.install_phases = install_phases
self.build_directory = join_path(self.stage.path, "spack-build")
self.source_directory = self.stage.source_path
with self.stage:
# Run the pre-install hook in the child process after
# the directory is created.
spack.hooks.pre_install(self)
if fake:
self.do_fake_install()
else:
# Do the real install in the source directory.
self.stage.chdir_to_source()
# Save the build environment in a file before building.
env_path = join_path(os.getcwd(), "spack-build.env")
try:
# Redirect I/O to a build log (and optionally to
# the terminal)
log_path = join_path(os.getcwd(), "spack-build.out")
log_file = open(log_path, "w")
with log_output(log_file, verbose, sys.stdout.isatty(), True):
dump_environment(env_path)
self.install(self.spec, self.prefix)
except ProcessError as e:
# Annotate ProcessErrors with the location of
# the build log
e.build_log = log_path
raise e
# Ensure that something was actually installed.
if "install" in self.install_phases:
self.sanity_check_prefix()
# Copy provenance into the install directory on success
if "provenance" in self.install_phases:
log_install_path = layout.build_log_path(self.spec)
env_install_path = layout.build_env_path(self.spec)
packages_dir = layout.build_packages_path(self.spec)
# Remove first if we're overwriting another build
# (can happen with spack setup)
try:
# log_install_path and env_install_path are here
shutil.rmtree(packages_dir)
except:
pass
install(log_path, log_install_path)
install(env_path, env_install_path)
dump_packages(self.spec, packages_dir)
# Run post install hooks before build stage is removed.
spack.hooks.post_install(self)
# Stop timer.
self._total_time = time.time() - start_time
build_time = self._total_time - self._fetch_time
tty.msg(
"Successfully installed %s" % self.name,
"Fetch: %s. Build: %s. Total: %s."
% (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)),
)
print_pkg(self.prefix)
# ------------------- END def build_process()
try:
# Create the install prefix and fork the build process.
spack.install_layout.create_install_directory(self.spec)
except directory_layout.InstallDirectoryAlreadyExistsError:
if "install" in install_phases:
# Abort install if install directory exists.
# But do NOT remove it (you'd be overwriting someone's data)
tty.warn("Keeping existing install prefix in place.")
raise
else:
# We're not installing anyway, so don't worry if someone
# else has already written in the install directory
pass
try:
spack.build_environment.fork(self, build_process, dirty=dirty)
except:
# remove the install prefix if anything went wrong during install.
if not keep_prefix:
self.remove_prefix()
else:
tty.warn(
"Keeping install prefix in place despite error.",
"Spack will think this package is installed. "
+ "Manually remove this directory to fix:",
self.prefix,
wrap=False,
)
raise
# note: PARENT of the build process adds the new package to
# the database, so that we don't need to re-read from file.
spack.installed_db.add(self.spec, spack.install_layout, explicit=explicit)
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def build_process():
"""Forked for each build. Has its own process and python
module space set up by build_environment.fork()."""
start_time = time.time()
if not fake:
if not skip_patch:
self.do_patch()
else:
self.do_stage()
tty.msg("Building %s" % self.name)
self.stage.keep = keep_stage
self.install_phases = install_phases
self.build_directory = join_path(self.stage.path, "spack-build")
self.source_directory = self.stage.source_path
with contextlib.nested(self.stage, self._prefix_write_lock()):
# Run the pre-install hook in the child process after
# the directory is created.
spack.hooks.pre_install(self)
if fake:
self.do_fake_install()
else:
# Do the real install in the source directory.
self.stage.chdir_to_source()
# Save the build environment in a file before building.
env_path = join_path(os.getcwd(), "spack-build.env")
try:
# Redirect I/O to a build log (and optionally to
# the terminal)
log_path = join_path(os.getcwd(), "spack-build.out")
log_file = open(log_path, "w")
with log_output(log_file, verbose, sys.stdout.isatty(), True):
dump_environment(env_path)
self.install(self.spec, self.prefix)
except ProcessError as e:
# Annotate ProcessErrors with the location of
# the build log
e.build_log = log_path
raise e
# Ensure that something was actually installed.
if "install" in self.install_phases:
self.sanity_check_prefix()
# Copy provenance into the install directory on success
if "provenance" in self.install_phases:
log_install_path = layout.build_log_path(self.spec)
env_install_path = layout.build_env_path(self.spec)
packages_dir = layout.build_packages_path(self.spec)
# Remove first if we're overwriting another build
# (can happen with spack setup)
try:
# log_install_path and env_install_path are here
shutil.rmtree(packages_dir)
except:
pass
install(log_path, log_install_path)
install(env_path, env_install_path)
dump_packages(self.spec, packages_dir)
# Run post install hooks before build stage is removed.
spack.hooks.post_install(self)
# Stop timer.
self._total_time = time.time() - start_time
build_time = self._total_time - self._fetch_time
tty.msg(
"Successfully installed %s" % self.name,
"Fetch: %s. Build: %s. Total: %s."
% (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)),
)
print_pkg(self.prefix)
|
def build_process():
"""Forked for each build. Has its own process and python
module space set up by build_environment.fork()."""
start_time = time.time()
if not fake:
if not skip_patch:
self.do_patch()
else:
self.do_stage()
tty.msg("Building %s" % self.name)
self.stage.keep = keep_stage
self.install_phases = install_phases
self.build_directory = join_path(self.stage.path, "spack-build")
self.source_directory = self.stage.source_path
with self.stage:
# Run the pre-install hook in the child process after
# the directory is created.
spack.hooks.pre_install(self)
if fake:
self.do_fake_install()
else:
# Do the real install in the source directory.
self.stage.chdir_to_source()
# Save the build environment in a file before building.
env_path = join_path(os.getcwd(), "spack-build.env")
try:
# Redirect I/O to a build log (and optionally to
# the terminal)
log_path = join_path(os.getcwd(), "spack-build.out")
log_file = open(log_path, "w")
with log_output(log_file, verbose, sys.stdout.isatty(), True):
dump_environment(env_path)
self.install(self.spec, self.prefix)
except ProcessError as e:
# Annotate ProcessErrors with the location of
# the build log
e.build_log = log_path
raise e
# Ensure that something was actually installed.
if "install" in self.install_phases:
self.sanity_check_prefix()
# Copy provenance into the install directory on success
if "provenance" in self.install_phases:
log_install_path = layout.build_log_path(self.spec)
env_install_path = layout.build_env_path(self.spec)
packages_dir = layout.build_packages_path(self.spec)
# Remove first if we're overwriting another build
# (can happen with spack setup)
try:
# log_install_path and env_install_path are here
shutil.rmtree(packages_dir)
except:
pass
install(log_path, log_install_path)
install(env_path, env_install_path)
dump_packages(self.spec, packages_dir)
# Run post install hooks before build stage is removed.
spack.hooks.post_install(self)
# Stop timer.
self._total_time = time.time() - start_time
build_time = self._total_time - self._fetch_time
tty.msg(
"Successfully installed %s" % self.name,
"Fetch: %s. Build: %s. Total: %s."
% (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)),
)
print_pkg(self.prefix)
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def do_uninstall(self, force=False):
if not self.installed:
# prefix may not exist, but DB may be inconsistent. Try to fix by
# removing, but omit hooks.
specs = spack.installed_db.query(self.spec, installed=True)
if specs:
spack.installed_db.remove(specs[0])
tty.msg("Removed stale DB entry for %s" % self.spec.short_spec)
return
else:
raise InstallError(str(self.spec) + " is not installed.")
if not force:
dependents = self.installed_dependents
if dependents:
raise PackageStillNeededError(self.spec, dependents)
# Pre-uninstall hook runs first.
with self._prefix_write_lock():
spack.hooks.pre_uninstall(self)
# Uninstalling in Spack only requires removing the prefix.
self.remove_prefix()
#
spack.installed_db.remove(self.spec)
tty.msg("Successfully uninstalled %s" % self.spec.short_spec)
# Once everything else is done, run post install hooks
spack.hooks.post_uninstall(self)
|
def do_uninstall(self, force=False):
if not self.installed:
# prefix may not exist, but DB may be inconsistent. Try to fix by
# removing, but omit hooks.
specs = spack.installed_db.query(self.spec, installed=True)
if specs:
spack.installed_db.remove(specs[0])
tty.msg("Removed stale DB entry for %s" % self.spec.short_spec)
return
else:
raise InstallError(str(self.spec) + " is not installed.")
if not force:
dependents = self.installed_dependents
if dependents:
raise PackageStillNeededError(self.spec, dependents)
# Pre-uninstall hook runs first.
spack.hooks.pre_uninstall(self)
# Uninstalling in Spack only requires removing the prefix.
self.remove_prefix()
spack.installed_db.remove(self.spec)
tty.msg("Successfully uninstalled %s" % self.spec.short_spec)
# Once everything else is done, run post install hooks
spack.hooks.post_uninstall(self)
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def dag_hash(self, length=None):
"""Return a hash of the entire spec DAG, including connectivity."""
if self._hash:
return self._hash[:length]
else:
yaml_text = syaml.dump(
self.to_node_dict(), default_flow_style=True, width=sys.maxint
)
sha = hashlib.sha1(yaml_text)
b32_hash = base64.b32encode(sha.digest()).lower()[:length]
if self.concrete:
self._hash = b32_hash
return b32_hash
|
def dag_hash(self, length=None):
"""
Return a hash of the entire spec DAG, including connectivity.
"""
if self._hash:
return self._hash[:length]
else:
# XXX(deptype): ignore 'build' dependencies here
yaml_text = syaml.dump(
self.to_node_dict(), default_flow_style=True, width=sys.maxint
)
sha = hashlib.sha1(yaml_text)
b32_hash = base64.b32encode(sha.digest()).lower()[:length]
if self.concrete:
self._hash = b32_hash
return b32_hash
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def to_node_dict(self):
d = syaml_dict()
if self.versions:
d.update(self.versions.to_dict())
if self.compiler:
d.update(self.compiler.to_dict())
if self.namespace:
d["namespace"] = self.namespace
params = syaml_dict(sorted((name, v.value) for name, v in self.variants.items()))
params.update(sorted(self.compiler_flags.items()))
if params:
d["parameters"] = params
if self.architecture:
d["arch"] = self.architecture.to_dict()
# TODO: restore build dependencies here once we have less picky
# TODO: concretization.
deps = self.dependencies_dict(deptype=("link", "run"))
if deps:
d["dependencies"] = syaml_dict(
[
(
name,
syaml_dict(
[
("hash", dspec.spec.dag_hash()),
("type", sorted(str(s) for s in dspec.deptypes)),
]
),
)
for name, dspec in sorted(deps.items())
]
)
return syaml_dict([(self.name, d)])
|
def to_node_dict(self):
d = syaml_dict()
if self.versions:
d.update(self.versions.to_dict())
if self.compiler:
d.update(self.compiler.to_dict())
if self.namespace:
d["namespace"] = self.namespace
params = syaml_dict(sorted((name, v.value) for name, v in self.variants.items()))
params.update(sorted(self.compiler_flags.items()))
if params:
d["parameters"] = params
if self.architecture:
d["arch"] = self.architecture.to_dict()
deps = self.dependencies_dict(deptype=("link", "run"))
if deps:
d["dependencies"] = syaml_dict(
[
(
name,
syaml_dict(
[
("hash", dspec.spec.dag_hash()),
("type", sorted(str(s) for s in dspec.deptypes)),
]
),
)
for name, dspec in sorted(deps.items())
]
)
return syaml_dict([(self.name, d)])
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def __init__(
self,
url_or_fetch_strategy,
name=None,
mirror_path=None,
keep=False,
path=None,
lock=True,
):
"""Create a stage object.
Parameters:
url_or_fetch_strategy
URL of the archive to be downloaded into this stage, OR
a valid FetchStrategy.
name
If a name is provided, then this stage is a named stage
and will persist between runs (or if you construct another
stage object later). If name is not provided, then this
stage will be given a unique name automatically.
mirror_path
If provided, Stage will search Spack's mirrors for
this archive at the mirror_path, before using the
default fetch strategy.
keep
By default, when used as a context manager, the Stage
is deleted on exit when no exceptions are raised.
Pass True to keep the stage intact even if no
exceptions are raised.
"""
# TODO: fetch/stage coupling needs to be reworked -- the logic
# TODO: here is convoluted and not modular enough.
if isinstance(url_or_fetch_strategy, basestring):
self.fetcher = fs.from_url(url_or_fetch_strategy)
elif isinstance(url_or_fetch_strategy, fs.FetchStrategy):
self.fetcher = url_or_fetch_strategy
else:
raise ValueError("Can't construct Stage without url or fetch strategy")
self.fetcher.set_stage(self)
# self.fetcher can change with mirrors.
self.default_fetcher = self.fetcher
# used for mirrored archives of repositories.
self.skip_checksum_for_mirror = True
# TODO : this uses a protected member of tempfile, but seemed the only
# TODO : way to get a temporary name besides, the temporary link name
# TODO : won't be the same as the temporary stage area in tmp_root
self.name = name
if name is None:
self.name = STAGE_PREFIX + next(tempfile._get_candidate_names())
self.mirror_path = mirror_path
self.tmp_root = find_tmp_root()
# Try to construct here a temporary name for the stage directory
# If this is a named stage, then construct a named path.
if path is not None:
self.path = path
else:
self.path = join_path(spack.stage_path, self.name)
# Flag to decide whether to delete the stage folder on exit or not
self.keep = keep
# File lock for the stage directory. We use one file for all
# stage locks. See Spec.prefix_lock for details on this approach.
self._lock = None
if lock:
if self.name not in Stage.stage_locks:
sha1 = hashlib.sha1(self.name).digest()
lock_id = prefix_bits(sha1, bit_length(sys.maxsize))
stage_lock_path = join_path(spack.stage_path, ".lock")
Stage.stage_locks[self.name] = llnl.util.lock.Lock(
stage_lock_path, lock_id, 1
)
self._lock = Stage.stage_locks[self.name]
|
def __init__(
self, url_or_fetch_strategy, name=None, mirror_path=None, keep=False, path=None
):
"""Create a stage object.
Parameters:
url_or_fetch_strategy
URL of the archive to be downloaded into this stage, OR
a valid FetchStrategy.
name
If a name is provided, then this stage is a named stage
and will persist between runs (or if you construct another
stage object later). If name is not provided, then this
stage will be given a unique name automatically.
mirror_path
If provided, Stage will search Spack's mirrors for
this archive at the mirror_path, before using the
default fetch strategy.
keep
By default, when used as a context manager, the Stage
is deleted on exit when no exceptions are raised.
Pass True to keep the stage intact even if no
exceptions are raised.
"""
# TODO: fetch/stage coupling needs to be reworked -- the logic
# TODO: here is convoluted and not modular enough.
if isinstance(url_or_fetch_strategy, basestring):
self.fetcher = fs.from_url(url_or_fetch_strategy)
elif isinstance(url_or_fetch_strategy, fs.FetchStrategy):
self.fetcher = url_or_fetch_strategy
else:
raise ValueError("Can't construct Stage without url or fetch strategy")
self.fetcher.set_stage(self)
# self.fetcher can change with mirrors.
self.default_fetcher = self.fetcher
# used for mirrored archives of repositories.
self.skip_checksum_for_mirror = True
# TODO : this uses a protected member of tempfile, but seemed the only
# TODO : way to get a temporary name besides, the temporary link name
# TODO : won't be the same as the temporary stage area in tmp_root
self.name = name
if name is None:
self.name = STAGE_PREFIX + next(tempfile._get_candidate_names())
self.mirror_path = mirror_path
self.tmp_root = find_tmp_root()
# Try to construct here a temporary name for the stage directory
# If this is a named stage, then construct a named path.
if path is not None:
self.path = path
else:
self.path = join_path(spack.stage_path, self.name)
# Flag to decide whether to delete the stage folder on exit or not
self.keep = keep
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def __enter__(self):
"""
Entering a stage context will create the stage directory
Returns:
self
"""
if self._lock is not None:
self._lock.acquire_write(timeout=60)
self.create()
return self
|
def __enter__(self):
"""
Entering a stage context will create the stage directory
Returns:
self
"""
self.create()
return self
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def __exit__(self, exc_type, exc_val, exc_tb):
"""
Exiting from a stage context will delete the stage directory unless:
- it was explicitly requested not to do so
- an exception has been raised
Args:
exc_type: exception type
exc_val: exception value
exc_tb: exception traceback
Returns:
Boolean
"""
# Delete when there are no exceptions, unless asked to keep.
if exc_type is None and not self.keep:
self.destroy()
if self._lock is not None:
self._lock.release_write()
|
def __exit__(self, exc_type, exc_val, exc_tb):
"""
Exiting from a stage context will delete the stage directory unless:
- it was explicitly requested not to do so
- an exception has been raised
Args:
exc_type: exception type
exc_val: exception value
exc_tb: exception traceback
Returns:
Boolean
"""
# Delete when there are no exceptions, unless asked to keep.
if exc_type is None and not self.keep:
self.destroy()
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def create(self):
"""
Creates the stage directory
If self.tmp_root evaluates to False, the stage directory is
created directly under spack.stage_path, otherwise this will
attempt to create a stage in a temporary directory and link it
into spack.stage_path.
Spack will use the first writable location in spack.tmp_dirs
to create a stage. If there is no valid location in tmp_dirs,
fall back to making the stage inside spack.stage_path.
"""
# Create the top-level stage directory
mkdirp(spack.stage_path)
remove_if_dead_link(self.path)
# If a tmp_root exists then create a directory there and then link it
# in the stage area, otherwise create the stage directory in self.path
if self._need_to_create_path():
if self.tmp_root:
tmp_dir = tempfile.mkdtemp("", STAGE_PREFIX, self.tmp_root)
os.symlink(tmp_dir, self.path)
else:
mkdirp(self.path)
# Make sure we can actually do something with the stage we made.
ensure_access(self.path)
|
def create(self):
"""
Creates the stage directory
If self.tmp_root evaluates to False, the stage directory is
created directly under spack.stage_path, otherwise this will
attempt to create a stage in a temporary directory and link it
into spack.stage_path.
Spack will use the first writable location in spack.tmp_dirs
to create a stage. If there is no valid location in tmp_dirs,
fall back to making the stage inside spack.stage_path.
"""
# Create the top-level stage directory
mkdirp(spack.stage_path)
remove_dead_links(spack.stage_path)
# If a tmp_root exists then create a directory there and then link it
# in the stage area, otherwise create the stage directory in self.path
if self._need_to_create_path():
if self.tmp_root:
tmp_dir = tempfile.mkdtemp("", STAGE_PREFIX, self.tmp_root)
os.symlink(tmp_dir, self.path)
else:
mkdirp(self.path)
# Make sure we can actually do something with the stage we made.
ensure_access(self.path)
|
https://github.com/spack/spack/issues/1266
|
==> Installing cactusext
==> openmpi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openmpi-2.0.0-5l3pomaqonrx3phvbm4kblzsms56ljtj
==> hwloc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hwloc-1.11.3-xgdwohjb2o26uftzaqtcmwf2ejxlhidw
==> git is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/git-2.8.1-jh72meyfuritl2wexqmckrshmufe7az4
==> papi is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/papi-5.4.3-tnovpyennp4cerqns4xnfscfvl6d3zlz
==> hdf5-blosc is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-blosc-master-6m7xtyqubxsb2merc36i2oyhtdvno3zp
==> lmod is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lmod-6.4.1-i2xu3l63aipam7bdlu3ig4eriqtjnbbd
==> hdf5 is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/hdf5-1.10.0-patch1-jfa4zar7ysebuadia7zlhoebrlaeo5lv
==> python is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/python-2.7.11-y6bfneoxof25qt6wgdsbfsra62grfdnz
==> lua is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/lua-5.3.2-xe6niqb4cwgkdpq33qjoi3rby3vcdnew
==> fftw is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/fftw-3.3.4-ojokm7aahpro46e4tvoqgsc57lzbfdum
==> openssl is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/openssl-1.0.2h-ookfh4fsbv5ijqjsewerc5vvf4hzfme6
==> julia is already installed in /home/ux452368/comet/src/spack/opt/spack/linux-centos6-x86_64/gcc-6.1.0-spack/julia-master-bzqdzwekzl3geaeyuexssnelbnwfiwsf
Traceback (most recent call last):
File "/home/ux452368/comet/src/spack/bin/spack", line 179, in <module>
main()
File "/home/ux452368/comet/src/spack/bin/spack", line 157, in main
return_val = command(parser, args)
File "/home/ux452368/comet/src/spack/lib/spack/spack/cmd/install.py", line 91, in install
explicit=True)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 943, in do_install
run_tests=run_tests)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 1096, in do_install_dependencies
dep.package.do_install(**kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/package.py", line 925, in do_install
rec = spack.installed_db.get_record(self.spec)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 76, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 461, in get_record
key = self._get_matching_spec_key(spec, **kwargs)
File "/home/ux452368/comet/src/spack/lib/spack/spack/database.py", line 456, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! julia@master%gcc@6.1.0-spack+binutils~cxx+hdf5+mpi~plot~python arch=linux-centos6-x86_64^binutils@2.25%gcc@6.1.0-spack+gold~krellpatch~libiberty arch=linux-centos6-x86_64^bzip2@1.0.6%gcc@6.1.0-spack arch=linux-centos6-x86_64^cmake@3.5.2%gcc@6.1.0-spack~doc+ncurses+openssl~qt arch=linux-centos6-x86_64^curl@7.49.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^expat@2.1.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^git@2.8.1%gcc@6.1.0-spack arch=linux-centos6-x86_64^hdf5@1.10.0-patch1%gcc@6.1.0-spack+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-centos6-x86_64^hwloc@1.11.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^libpciaccess@0.13.4%gcc@6.1.0-spack arch=linux-centos6-x86_64^libsigsegv@2.10%gcc@6.1.0-spack arch=linux-centos6-x86_64^m4@1.4.17%gcc@6.1.0-spack+sigsegv arch=linux-centos6-x86_64^ncurses@6.0%gcc@6.1.0-spack arch=linux-centos6-x86_64^openmpi@2.0.0%gcc@6.1.0-spack~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-centos6-x86_64^openssl@1.0.2h%gcc@6.1.0-spack arch=linux-centos6-x86_64^python@2.7.11%gcc@6.1.0-spack~ucs4 arch=linux-centos6-x86_64^readline@6.3%gcc@6.1.0-spack arch=linux-centos6-x86_64^sqlite@3.8.5%gcc@6.1.0-spack arch=linux-centos6-x86_64^zlib@1.2.8%gcc@6.1.0-spack arch=linux-centos6-x86_64'
|
KeyError
|
def _lock(self, op, timeout):
"""This takes a lock using POSIX locks (``fnctl.lockf``).
The lock is implemented as a spin lock using a nonblocking
call to lockf().
On acquiring an exclusive lock, the lock writes this process's
pid and host to the lock file, in case the holding process
needs to be killed later.
If the lock times out, it raises a ``LockError``.
"""
start_time = time.time()
while (time.time() - start_time) < timeout:
try:
# If this is already open read-only and we want to
# upgrade to an exclusive write lock, close first.
if self._fd is not None:
flags = fcntl.fcntl(self._fd, fcntl.F_GETFL)
if op == fcntl.LOCK_EX and flags | os.O_RDONLY:
os.close(self._fd)
self._fd = None
if self._fd is None:
mode = os.O_RDWR if op == fcntl.LOCK_EX else os.O_RDONLY
self._fd = os.open(self._file_path, mode)
fcntl.lockf(self._fd, op | fcntl.LOCK_NB)
if op == fcntl.LOCK_EX:
os.write(self._fd, "pid=%s,host=%s" % (os.getpid(), socket.getfqdn()))
return
except IOError as error:
if error.errno == errno.EAGAIN or error.errno == errno.EACCES:
pass
else:
raise
time.sleep(_sleep_time)
raise LockError("Timed out waiting for lock.")
|
def _lock(self, op, timeout):
"""This takes a lock using POSIX locks (``fnctl.lockf``).
The lock is implemented as a spin lock using a nonblocking
call to lockf().
On acquiring an exclusive lock, the lock writes this process's
pid and host to the lock file, in case the holding process
needs to be killed later.
If the lock times out, it raises a ``LockError``.
"""
start_time = time.time()
while (time.time() - start_time) < timeout:
try:
if self._fd is None:
mode = os.O_RDWR if op == fcntl.LOCK_EX else os.O_RDONLY
self._fd = os.open(self._file_path, mode)
fcntl.lockf(self._fd, op | fcntl.LOCK_NB)
if op == fcntl.LOCK_EX:
os.write(self._fd, "pid=%s,host=%s" % (os.getpid(), socket.getfqdn()))
return
except IOError as error:
if error.errno == errno.EAGAIN or error.errno == errno.EACCES:
pass
else:
raise
time.sleep(_sleep_time)
raise LockError("Timed out waiting for lock.")
|
https://github.com/spack/spack/issues/1904
|
$ ./spack/bin/spack find
Traceback (most recent call last):
File "./spack/bin/spack", line 192, in <module>
main()
File "./spack/bin/spack", line 169, in main
return_val = command(parser, args)
File "/users/kumbhar/spack/lib/spack/spack/cmd/find.py", line 148, in find
specs = set(spack.installed_db.query(**q_args))
File "/users/kumbhar/spack/lib/spack/spack/database.py", line 630, in query
with self.read_transaction():
File "/users/kumbhar/spack/lib/spack/llnl/util/lock.py", line 211, in __enter__
self._as = self._acquire_fn()
self._file_path = file_path
File "/users/kumbhar/spack/lib/spack/spack/database.py", line 447, in _read
self.reindex(spack.install_layout)
File "/users/kumbhar/spack/lib/spack/spack/database.py", line 351, in reindex
with transaction:
File "/users/kumbhar/spack/lib/spack/llnl/util/lock.py", line 210, in __enter__
if self._enter() and self._acquire_fn:
File "/users/kumbhar/spack/lib/spack/llnl/util/lock.py", line 241, in _enter
return self._lock.acquire_write(self._timeout)
File "/users/kumbhar/spack/lib/spack/llnl/util/lock.py", line 134, in acquire_write
self._lock(fcntl.LOCK_EX, timeout) # can raise LockError.
File "/users/kumbhar/spack/lib/spack/llnl/util/lock.py", line 76, in _lock
fcntl.lockf(self._fd, op | fcntl.LOCK_NB)
IOError: [Errno 9] Bad file descriptor
|
IOError
|
def __init__(self, spec, path, installed, ref_count=0):
self.spec = spec
self.path = str(path)
self.installed = bool(installed)
self.ref_count = ref_count
|
def __init__(self, spec, path, installed, ref_count=0):
self.spec = spec
self.path = path
self.installed = installed
self.ref_count = ref_count
|
https://github.com/spack/spack/issues/150
|
paul ~/C/s/bin > ./spack install memaxes
==> Installing memaxes
==> cmake is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/cmake-3.0.2-bv634arumx3m47mpynkw5ygmiqdlcykp.
==> Installing qt
==> glib is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/glib-2.42.1-dwjk6vntaa4dextqivrup3odqfjfg6mh.
==> libtiff is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/libtiff-4.0.3-n6k5fnztfshg3krijefks5qmnaptvigk.
==> libxml2 is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/libxml2-2.9.2-lt2vnkj75wp2cvi2q75azleq3kjr2alr.
==> dbus is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/dbus-1.9.0-vk3dr7ps5csbudicoqgpk3hvl27wlz4z.
==> zlib is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/zlib-1.2.8-5cowb3si2gbcqchtdewwxwhmypem4hdu.
==> jpeg is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/jpeg-9a-v6w3vn6ytc4owszkccwmolwhytnbvasw.
==> Installing gtkplus
==> gdk-pixbuf is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/gdk-pixbuf-2.31.2-n5bcojzjasynb3pn55lbyw5jebko3ptu.
==> pango is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/pango-1.36.8-boik7co2rayuuosdnv6qnzpkiklkl4ug.
==> atk is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/atk-2.14.0-6xy47vuatz52e3luykhccrcpm6bkptpw.
==> cairo is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/cairo-1.14.0-rqplcapyohdgxjolv26ktt7esmqptstj.
==> Trying to fetch from http://ftp.gnome.org/pub/gnome/sources/gtk+/2.24/gtk+-2.24.25.tar.xz
######################################################################## 100.0%
==> Staging archive: /home/paul/Clinic/spack/var/spack/stage/gtkplus-2.24.25-dqc66tudnnl3qte7r67vinptia3dnu7y/gtk+-2.24.25.tar.xz
==> Created stage in /home/paul/Clinic/spack/var/spack/stage/gtkplus-2.24.25-dqc66tudnnl3qte7r67vinptia3dnu7y.
==> Patched gtkplus
==> Building gtkplus.
Traceback (most recent call last):
File "/home/paul/Clinic/spack/lib/spack/spack/build_environment.py", line 278, in fork
function()
File "/home/paul/Clinic/spack/lib/spack/spack/package.py", line 839, in real_work
cleanup()
File "/home/paul/Clinic/spack/lib/spack/spack/package.py", line 786, in cleanup
self.remove_prefix()
File "/home/paul/Clinic/spack/lib/spack/spack/package.py", line 614, in remove_prefix
spack.installed_db.remove(self.spec)
File "/home/paul/Clinic/spack/lib/spack/spack/database.py", line 74, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/paul/Clinic/spack/lib/spack/spack/database.py", line 502, in remove
return self._remove(spec)
File "/home/paul/Clinic/spack/lib/spack/spack/database.py", line 472, in _remove
key = self._get_matching_spec_key(spec)
File "/home/paul/Clinic/spack/lib/spack/spack/database.py", line 442, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! gtkplus@2.24.25%gcc@5.2.1=x86_64^atk@2.14.0%gcc@5.2.1=x86_64^cairo@1.14.0%gcc@5.2.1=x86_64^fontconfig@2.11.1%gcc@5.2.1=x86_64^freetype@2.5.3%gcc@5.2.1=x86_64^gdk-pixbuf@2.31.2%gcc@5.2.1=x86_64^glib@2.42.1%gcc@5.2.1=x86_64^harfbuzz@0.9.37%gcc@5.2.1=x86_64^icu@54.1%gcc@5.2.1=x86_64^jpeg@9a%gcc@5.2.1=x86_64^libffi@3.2.1%gcc@5.2.1=x86_64^libpng@1.6.16%gcc@5.2.1=x86_64^libtiff@4.0.3%gcc@5.2.1=x86_64^pango@1.36.8%gcc@5.2.1=x86_64^pixman@0.32.6%gcc@5.2.1=x86_64^xz@5.2.2%gcc@5.2.1=x86_64^zlib@1.2.8%gcc@5.2.1=x86_64'
==> Error: Installation process had nonzero exit code.
|
KeyError
|
def _write_to_yaml(self, stream):
"""Write out the databsae to a YAML file.
This function does not do any locking or transactions.
"""
# map from per-spec hash code to installation record.
installs = dict((k, v.to_dict()) for k, v in self._data.items())
# database includes installation list and version.
# NOTE: this DB version does not handle multiple installs of
# the same spec well. If there are 2 identical specs with
# different paths, it can't differentiate.
# TODO: fix this before we support multiple install locations.
database = {"database": {"installs": installs, "version": str(_db_version)}}
try:
return yaml.dump(database, stream=stream, default_flow_style=False)
except YAMLError as e:
raise SpackYAMLError("error writing YAML database:", str(e))
|
def _write_to_yaml(self, stream):
"""Write out the databsae to a YAML file.
This function does not do any locking or transactions.
"""
# map from per-spec hash code to installation record.
installs = dict((k, v.to_dict()) for k, v in self._data.items())
# databaes includes installation list and version.
# NOTE: this DB version does not handle multiple installs of
# the same spec well. If there are 2 identical specs with
# different paths, it can't differentiate.
# TODO: fix this before we support multiple install locations.
database = {"database": {"installs": installs, "version": str(_db_version)}}
try:
return yaml.dump(database, stream=stream, default_flow_style=False)
except YAMLError as e:
raise SpackYAMLError("error writing YAML database:", str(e))
|
https://github.com/spack/spack/issues/150
|
paul ~/C/s/bin > ./spack install memaxes
==> Installing memaxes
==> cmake is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/cmake-3.0.2-bv634arumx3m47mpynkw5ygmiqdlcykp.
==> Installing qt
==> glib is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/glib-2.42.1-dwjk6vntaa4dextqivrup3odqfjfg6mh.
==> libtiff is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/libtiff-4.0.3-n6k5fnztfshg3krijefks5qmnaptvigk.
==> libxml2 is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/libxml2-2.9.2-lt2vnkj75wp2cvi2q75azleq3kjr2alr.
==> dbus is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/dbus-1.9.0-vk3dr7ps5csbudicoqgpk3hvl27wlz4z.
==> zlib is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/zlib-1.2.8-5cowb3si2gbcqchtdewwxwhmypem4hdu.
==> jpeg is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/jpeg-9a-v6w3vn6ytc4owszkccwmolwhytnbvasw.
==> Installing gtkplus
==> gdk-pixbuf is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/gdk-pixbuf-2.31.2-n5bcojzjasynb3pn55lbyw5jebko3ptu.
==> pango is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/pango-1.36.8-boik7co2rayuuosdnv6qnzpkiklkl4ug.
==> atk is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/atk-2.14.0-6xy47vuatz52e3luykhccrcpm6bkptpw.
==> cairo is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/cairo-1.14.0-rqplcapyohdgxjolv26ktt7esmqptstj.
==> Trying to fetch from http://ftp.gnome.org/pub/gnome/sources/gtk+/2.24/gtk+-2.24.25.tar.xz
######################################################################## 100.0%
==> Staging archive: /home/paul/Clinic/spack/var/spack/stage/gtkplus-2.24.25-dqc66tudnnl3qte7r67vinptia3dnu7y/gtk+-2.24.25.tar.xz
==> Created stage in /home/paul/Clinic/spack/var/spack/stage/gtkplus-2.24.25-dqc66tudnnl3qte7r67vinptia3dnu7y.
==> Patched gtkplus
==> Building gtkplus.
Traceback (most recent call last):
File "/home/paul/Clinic/spack/lib/spack/spack/build_environment.py", line 278, in fork
function()
File "/home/paul/Clinic/spack/lib/spack/spack/package.py", line 839, in real_work
cleanup()
File "/home/paul/Clinic/spack/lib/spack/spack/package.py", line 786, in cleanup
self.remove_prefix()
File "/home/paul/Clinic/spack/lib/spack/spack/package.py", line 614, in remove_prefix
spack.installed_db.remove(self.spec)
File "/home/paul/Clinic/spack/lib/spack/spack/database.py", line 74, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/paul/Clinic/spack/lib/spack/spack/database.py", line 502, in remove
return self._remove(spec)
File "/home/paul/Clinic/spack/lib/spack/spack/database.py", line 472, in _remove
key = self._get_matching_spec_key(spec)
File "/home/paul/Clinic/spack/lib/spack/spack/database.py", line 442, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! gtkplus@2.24.25%gcc@5.2.1=x86_64^atk@2.14.0%gcc@5.2.1=x86_64^cairo@1.14.0%gcc@5.2.1=x86_64^fontconfig@2.11.1%gcc@5.2.1=x86_64^freetype@2.5.3%gcc@5.2.1=x86_64^gdk-pixbuf@2.31.2%gcc@5.2.1=x86_64^glib@2.42.1%gcc@5.2.1=x86_64^harfbuzz@0.9.37%gcc@5.2.1=x86_64^icu@54.1%gcc@5.2.1=x86_64^jpeg@9a%gcc@5.2.1=x86_64^libffi@3.2.1%gcc@5.2.1=x86_64^libpng@1.6.16%gcc@5.2.1=x86_64^libtiff@4.0.3%gcc@5.2.1=x86_64^pango@1.36.8%gcc@5.2.1=x86_64^pixman@0.32.6%gcc@5.2.1=x86_64^xz@5.2.2%gcc@5.2.1=x86_64^zlib@1.2.8%gcc@5.2.1=x86_64'
==> Error: Installation process had nonzero exit code.
|
KeyError
|
def _write(self):
"""Write the in-memory database index to its file path.
Does no locking.
"""
temp_file = self._index_path + (".%s.%s.temp" % (socket.getfqdn(), os.getpid()))
# Write a temporary database file them move it into place
try:
with open(temp_file, "w") as f:
self._write_to_yaml(f)
os.rename(temp_file, self._index_path)
except:
# Clean up temp file if something goes wrong.
if os.path.exists(temp_file):
os.remove(temp_file)
raise
|
def _write(self):
"""Write the in-memory database index to its file path.
Does no locking.
"""
temp_name = "%s.%s.temp" % (socket.getfqdn(), os.getpid())
temp_file = join_path(self._db_dir, temp_name)
# Write a temporary database file them move it into place
try:
with open(temp_file, "w") as f:
self._write_to_yaml(f)
os.rename(temp_file, self._index_path)
except:
# Clean up temp file if something goes wrong.
if os.path.exists(temp_file):
os.remove(temp_file)
raise
|
https://github.com/spack/spack/issues/150
|
paul ~/C/s/bin > ./spack install memaxes
==> Installing memaxes
==> cmake is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/cmake-3.0.2-bv634arumx3m47mpynkw5ygmiqdlcykp.
==> Installing qt
==> glib is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/glib-2.42.1-dwjk6vntaa4dextqivrup3odqfjfg6mh.
==> libtiff is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/libtiff-4.0.3-n6k5fnztfshg3krijefks5qmnaptvigk.
==> libxml2 is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/libxml2-2.9.2-lt2vnkj75wp2cvi2q75azleq3kjr2alr.
==> dbus is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/dbus-1.9.0-vk3dr7ps5csbudicoqgpk3hvl27wlz4z.
==> zlib is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/zlib-1.2.8-5cowb3si2gbcqchtdewwxwhmypem4hdu.
==> jpeg is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/jpeg-9a-v6w3vn6ytc4owszkccwmolwhytnbvasw.
==> Installing gtkplus
==> gdk-pixbuf is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/gdk-pixbuf-2.31.2-n5bcojzjasynb3pn55lbyw5jebko3ptu.
==> pango is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/pango-1.36.8-boik7co2rayuuosdnv6qnzpkiklkl4ug.
==> atk is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/atk-2.14.0-6xy47vuatz52e3luykhccrcpm6bkptpw.
==> cairo is already installed in /home/paul/Clinic/spack/opt/spack/x86_64/gcc-5.2.1/cairo-1.14.0-rqplcapyohdgxjolv26ktt7esmqptstj.
==> Trying to fetch from http://ftp.gnome.org/pub/gnome/sources/gtk+/2.24/gtk+-2.24.25.tar.xz
######################################################################## 100.0%
==> Staging archive: /home/paul/Clinic/spack/var/spack/stage/gtkplus-2.24.25-dqc66tudnnl3qte7r67vinptia3dnu7y/gtk+-2.24.25.tar.xz
==> Created stage in /home/paul/Clinic/spack/var/spack/stage/gtkplus-2.24.25-dqc66tudnnl3qte7r67vinptia3dnu7y.
==> Patched gtkplus
==> Building gtkplus.
Traceback (most recent call last):
File "/home/paul/Clinic/spack/lib/spack/spack/build_environment.py", line 278, in fork
function()
File "/home/paul/Clinic/spack/lib/spack/spack/package.py", line 839, in real_work
cleanup()
File "/home/paul/Clinic/spack/lib/spack/spack/package.py", line 786, in cleanup
self.remove_prefix()
File "/home/paul/Clinic/spack/lib/spack/spack/package.py", line 614, in remove_prefix
spack.installed_db.remove(self.spec)
File "/home/paul/Clinic/spack/lib/spack/spack/database.py", line 74, in converter
return function(self, spec_like, *args, **kwargs)
File "/home/paul/Clinic/spack/lib/spack/spack/database.py", line 502, in remove
return self._remove(spec)
File "/home/paul/Clinic/spack/lib/spack/spack/database.py", line 472, in _remove
key = self._get_matching_spec_key(spec)
File "/home/paul/Clinic/spack/lib/spack/spack/database.py", line 442, in _get_matching_spec_key
raise KeyError("No such spec in database! %s" % spec)
KeyError: 'No such spec in database! gtkplus@2.24.25%gcc@5.2.1=x86_64^atk@2.14.0%gcc@5.2.1=x86_64^cairo@1.14.0%gcc@5.2.1=x86_64^fontconfig@2.11.1%gcc@5.2.1=x86_64^freetype@2.5.3%gcc@5.2.1=x86_64^gdk-pixbuf@2.31.2%gcc@5.2.1=x86_64^glib@2.42.1%gcc@5.2.1=x86_64^harfbuzz@0.9.37%gcc@5.2.1=x86_64^icu@54.1%gcc@5.2.1=x86_64^jpeg@9a%gcc@5.2.1=x86_64^libffi@3.2.1%gcc@5.2.1=x86_64^libpng@1.6.16%gcc@5.2.1=x86_64^libtiff@4.0.3%gcc@5.2.1=x86_64^pango@1.36.8%gcc@5.2.1=x86_64^pixman@0.32.6%gcc@5.2.1=x86_64^xz@5.2.2%gcc@5.2.1=x86_64^zlib@1.2.8%gcc@5.2.1=x86_64'
==> Error: Installation process had nonzero exit code.
|
KeyError
|
def find_working_dir(self):
# type: () -> Optional[str]
view = self._current_view()
window = view.window() if view else None
file_name = self._current_filename()
if file_name:
file_dir = os.path.dirname(file_name)
if os.path.isdir(file_dir):
return file_dir
if window:
folders = window.folders()
if folders and os.path.isdir(folders[0]):
return folders[0]
return None
|
def find_working_dir(self):
# type: () -> Optional[str]
view = self._current_view()
window = view.window() if view else None
if view and view.file_name():
file_dir = os.path.dirname(view.file_name())
if os.path.isdir(file_dir):
return file_dir
if window:
folders = window.folders()
if folders and os.path.isdir(folders[0]):
return folders[0]
return None
|
https://github.com/timbrel/GitSavvy/issues/1448
|
Traceback (most recent call last):
File "C:\Users\c-flo\AppData\Roaming\Sublime Text 3\Packages\GitSavvy\core\git_command.py", line 157, in git
working_dir = self.repo_path
File "C:\Users\c-flo\AppData\Roaming\Sublime Text 3\Packages\GitSavvy\core\git_command.py", line 437, in repo_path
return self.get_repo_path()
File "C:\Users\c-flo\AppData\Roaming\Sublime Text 3\Packages\GitSavvy\core\git_command.py", line 409, in get_repo_path
window = view.window()
AttributeError: 'NoneType' object has no attribute 'window'
|
AttributeError
|
def find_repo_path(self):
# type: () -> Optional[str]
"""
Similar to find_working_dir, except that it does not stop on the first
directory found, rather on the first git repository found.
"""
view = self._current_view()
window = view.window() if view else None
repo_path = None
file_name = self._current_filename()
if file_name:
file_dir = os.path.dirname(file_name)
if os.path.isdir(file_dir):
repo_path = self._find_git_toplevel(file_dir)
# fallback: use the first folder if the current file is not inside a git repo
if not repo_path:
if window:
folders = window.folders()
if folders and os.path.isdir(folders[0]):
repo_path = self._find_git_toplevel(folders[0])
return os.path.realpath(repo_path) if repo_path else None
|
def find_repo_path(self):
# type: () -> Optional[str]
"""
Similar to find_working_dir, except that it does not stop on the first
directory found, rather on the first git repository found.
"""
view = self._current_view()
window = view.window() if view else None
repo_path = None
# try the current file first
if view and view.file_name():
file_dir = os.path.dirname(view.file_name())
if os.path.isdir(file_dir):
repo_path = self._find_git_toplevel(file_dir)
# fallback: use the first folder if the current file is not inside a git repo
if not repo_path:
if window:
folders = window.folders()
if folders and os.path.isdir(folders[0]):
repo_path = self._find_git_toplevel(folders[0])
return os.path.realpath(repo_path) if repo_path else None
|
https://github.com/timbrel/GitSavvy/issues/1448
|
Traceback (most recent call last):
File "C:\Users\c-flo\AppData\Roaming\Sublime Text 3\Packages\GitSavvy\core\git_command.py", line 157, in git
working_dir = self.repo_path
File "C:\Users\c-flo\AppData\Roaming\Sublime Text 3\Packages\GitSavvy\core\git_command.py", line 437, in repo_path
return self.get_repo_path()
File "C:\Users\c-flo\AppData\Roaming\Sublime Text 3\Packages\GitSavvy\core\git_command.py", line 409, in get_repo_path
window = view.window()
AttributeError: 'NoneType' object has no attribute 'window'
|
AttributeError
|
def __init__(self, window=None):
# type: (sublime.Window) -> None
self._window = window or sublime.active_window()
self._global_settings = get_global_settings()
|
def __init__(self, parent=None):
self.parent = parent
self.global_settings = sublime.load_settings("GitSavvy.sublime-settings")
|
https://github.com/timbrel/GitSavvy/issues/1201
|
Traceback (most recent call last):
File "C:\Users\c-flo\AppData\Roaming\Sublime Text 3\Packages\GitSavvy\core\git_command.py", line 220, in git
stdout, stderr = self.decode_stdout(stdout), self.decode_stdout(stderr)
File "C:\Users\c-flo\AppData\Roaming\Sublime Text 3\Packages\GitSavvy\core\git_command.py", line 277, in decode_stdout
fallback_encoding = self.savvy_settings.get("fallback_encoding")
File "C:\Users\c-flo\AppData\Roaming\Sublime Text 3\Packages\GitSavvy\core\settings.py", line 12, in get
project_savvy_settings = view.settings().get("GitSavvy", {}) or {}
AttributeError: 'NoneType' object has no attribute 'settings'
|
AttributeError
|
def get(self, key, default=None):
try:
return get_project_settings(self._window)[key]
except KeyError:
return self._global_settings.get(key, default)
|
def get(self, key, default=None):
window = sublime.active_window()
view = window.active_view()
project_savvy_settings = view.settings().get("GitSavvy", {}) or {}
if key in project_savvy_settings:
return project_savvy_settings[key]
# fall back to old style project setting
project_data = window.project_data()
if project_data and "GitSavvy" in project_data:
project_savvy_settings = project_data["GitSavvy"]
if key in project_savvy_settings:
return project_savvy_settings.get(key)
return self.global_settings.get(key, default)
|
https://github.com/timbrel/GitSavvy/issues/1201
|
Traceback (most recent call last):
File "C:\Users\c-flo\AppData\Roaming\Sublime Text 3\Packages\GitSavvy\core\git_command.py", line 220, in git
stdout, stderr = self.decode_stdout(stdout), self.decode_stdout(stderr)
File "C:\Users\c-flo\AppData\Roaming\Sublime Text 3\Packages\GitSavvy\core\git_command.py", line 277, in decode_stdout
fallback_encoding = self.savvy_settings.get("fallback_encoding")
File "C:\Users\c-flo\AppData\Roaming\Sublime Text 3\Packages\GitSavvy\core\settings.py", line 12, in get
project_savvy_settings = view.settings().get("GitSavvy", {}) or {}
AttributeError: 'NoneType' object has no attribute 'settings'
|
AttributeError
|
def set(self, key, value):
self._global_settings.set(key, value)
|
def set(self, key, value):
self.global_settings.set(key, value)
|
https://github.com/timbrel/GitSavvy/issues/1201
|
Traceback (most recent call last):
File "C:\Users\c-flo\AppData\Roaming\Sublime Text 3\Packages\GitSavvy\core\git_command.py", line 220, in git
stdout, stderr = self.decode_stdout(stdout), self.decode_stdout(stderr)
File "C:\Users\c-flo\AppData\Roaming\Sublime Text 3\Packages\GitSavvy\core\git_command.py", line 277, in decode_stdout
fallback_encoding = self.savvy_settings.get("fallback_encoding")
File "C:\Users\c-flo\AppData\Roaming\Sublime Text 3\Packages\GitSavvy\core\settings.py", line 12, in get
project_savvy_settings = view.settings().get("GitSavvy", {}) or {}
AttributeError: 'NoneType' object has no attribute 'settings'
|
AttributeError
|
def savvy_settings(self):
if not self._savvy_settings:
window = (
maybe(lambda: self.window) # type: ignore[attr-defined]
or maybe(lambda: self.view.window()) # type: ignore[attr-defined]
or sublime.active_window()
)
self._savvy_settings = GitSavvySettings(window)
return self._savvy_settings
|
def savvy_settings(self):
if not self._savvy_settings:
self._savvy_settings = GitSavvySettings(self)
return self._savvy_settings
|
https://github.com/timbrel/GitSavvy/issues/1201
|
Traceback (most recent call last):
File "C:\Users\c-flo\AppData\Roaming\Sublime Text 3\Packages\GitSavvy\core\git_command.py", line 220, in git
stdout, stderr = self.decode_stdout(stdout), self.decode_stdout(stderr)
File "C:\Users\c-flo\AppData\Roaming\Sublime Text 3\Packages\GitSavvy\core\git_command.py", line 277, in decode_stdout
fallback_encoding = self.savvy_settings.get("fallback_encoding")
File "C:\Users\c-flo\AppData\Roaming\Sublime Text 3\Packages\GitSavvy\core\settings.py", line 12, in get
project_savvy_settings = view.settings().get("GitSavvy", {}) or {}
AttributeError: 'NoneType' object has no attribute 'settings'
|
AttributeError
|
def branch_relatives(self, branch):
# type: (str) -> List[str]
"""Get list of all relatives from ``git show-branch`` results"""
output = self.git("show-branch", "--no-color") # type: str
try:
prelude, body = re.split(r"^-+$", output, flags=re.M)
except ValueError:
# If there is only one branch, git changes the output format
# and omits the prelude and column indicator.
lines = filter(None, output.splitlines())
else:
match = re.search(r"^(\s+)\*", prelude, re.M)
if not match:
print("branch {} not found in header information".format(branch))
return []
branch_column = len(match.group(1))
lines = (
line
for line in filter(None, body.splitlines())
if line[branch_column] != " "
)
relatives = [] # type: List[str]
for line in lines:
match = EXTRACT_BRANCH_NAME.match(line)
if match:
branch_name = match.group(1)
if branch_name != branch and branch_name not in relatives:
relatives.append(branch_name)
return relatives
|
def branch_relatives(self, branch):
# type: (str) -> List[str]
"""Get list of all relatives from ``git show-branch`` results"""
output = self.git("show-branch", "--no-color")
prelude, body = re.split(r"^-+$", output, flags=re.M)
match = re.search(r"^(\s+)\*", prelude, re.M)
if not match:
print("branch {} not found in header information".format(branch))
return []
branch_column = len(match.group(1))
relatives = [] # type: List[str]
for line in filter(None, body.splitlines()): # type: str
if line[branch_column] != " ":
match = EXTRACT_BRANCH_NAME.match(line)
if match:
branch_name = match.group(1)
if branch_name != branch and branch_name not in relatives:
relatives.append(branch_name)
return relatives
|
https://github.com/timbrel/GitSavvy/issues/1261
|
Traceback (most recent call last):
File "/Applications/Sublime Text.app/Contents/MacOS/sublime_plugin.py", line 1052, in run_
return self.run()
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/core/interfaces/rebase.py", line 48, in run
RebaseInterface(repo_path=self.repo_path)
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/core/interfaces/rebase.py", line 134, in __init__
super().__init__(*args, **kwargs)
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/common/ui.py", line 87, in __init__
self.create_view(repo_path)
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/common/ui.py", line 115, in create_view
self.render()
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/common/ui.py", line 131, in render
rendered = self._render_template()
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/common/ui.py", line 149, in _render_template
keyed_content = self.get_keyed_content()
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/common/ui.py", line 187, in get_keyed_content
for key, render_fn in self.partials.items()
File "./python3.3/collections/__init__.py", line 56, in __init__
File "./python3.3/collections/abc.py", line 578, in update
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/common/ui.py", line 187, in <genexpr>
for key, render_fn in self.partials.items()
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/core/interfaces/rebase.py", line 200, in render_diverged_commits
start=self.base_commit(),
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/core/interfaces/rebase.py", line 356, in base_commit
base_ref = self.base_ref()
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/core/interfaces/rebase.py", line 329, in base_ref
default=remote_branch or "master")
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/core/git_mixins/rebase.py", line 60, in nearest_branch
relatives = self.branch_relatives(branch)
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/core/git_mixins/rebase.py", line 23, in branch_relatives
prelude, body = re.split(r'^-+$', output, flags=re.M)
ValueError: need more than 1 value to unpack
|
ValueError
|
def nearest_branch(self, branch, default="master"):
# type: (str, str) -> str
"""
Find the nearest commit in current branch history that exists
on a different branch and return that branch name.
If no such branch is found, return the given default ("master" if not
specified).
"""
relatives = self.branch_relatives(branch)
if not relatives:
return default
return relatives[0]
|
def nearest_branch(self, branch, default="master"):
# type: (str, str) -> str
"""
Find the nearest commit in current branch history that exists
on a different branch and return that branch name.
We filter these branches through a list of known ancestors which have
an initial branch point with current branch, and pick the first one
that matches both.
If no such branch is found, returns the given default ("master" if not
specified).
Solution snagged from:
http://stackoverflow.com/a/17843908/484127
http://stackoverflow.com/questions/1527234
"""
try:
relatives = self.branch_relatives(branch)
except GitSavvyError:
return default
if not relatives:
util.debug.add_to_log(
"nearest_branch: No relatives found. Possibly on a root branch!"
)
return default
util.debug.add_to_log(
"nearest_branch: found {} relatives: {}".format(len(relatives), relatives)
)
return relatives[0]
|
https://github.com/timbrel/GitSavvy/issues/1261
|
Traceback (most recent call last):
File "/Applications/Sublime Text.app/Contents/MacOS/sublime_plugin.py", line 1052, in run_
return self.run()
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/core/interfaces/rebase.py", line 48, in run
RebaseInterface(repo_path=self.repo_path)
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/core/interfaces/rebase.py", line 134, in __init__
super().__init__(*args, **kwargs)
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/common/ui.py", line 87, in __init__
self.create_view(repo_path)
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/common/ui.py", line 115, in create_view
self.render()
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/common/ui.py", line 131, in render
rendered = self._render_template()
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/common/ui.py", line 149, in _render_template
keyed_content = self.get_keyed_content()
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/common/ui.py", line 187, in get_keyed_content
for key, render_fn in self.partials.items()
File "./python3.3/collections/__init__.py", line 56, in __init__
File "./python3.3/collections/abc.py", line 578, in update
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/common/ui.py", line 187, in <genexpr>
for key, render_fn in self.partials.items()
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/core/interfaces/rebase.py", line 200, in render_diverged_commits
start=self.base_commit(),
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/core/interfaces/rebase.py", line 356, in base_commit
base_ref = self.base_ref()
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/core/interfaces/rebase.py", line 329, in base_ref
default=remote_branch or "master")
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/core/git_mixins/rebase.py", line 60, in nearest_branch
relatives = self.branch_relatives(branch)
File "/Users/pavel.savchenko/Library/Application Support/Sublime Text 3/Packages/GitSavvy/core/git_mixins/rebase.py", line 23, in branch_relatives
prelude, body = re.split(r'^-+$', output, flags=re.M)
ValueError: need more than 1 value to unpack
|
ValueError
|
def git(
self,
*args,
stdin=None,
working_dir=None,
show_panel=False,
throw_on_stderr=True,
decode=True,
encode=True,
stdin_encoding="UTF-8",
custom_environ=None,
):
"""
Run the git command specified in `*args` and return the output
of the git command as a string.
If stdin is provided, it should be a string and will be piped to
the git process. If `working_dir` is provided, set this as the
current working directory for the git process; otherwise,
the `repo_path` value will be used.
"""
args = self._include_global_flags(args)
command = (self.git_binary_path,) + tuple(arg for arg in args if arg)
command_str = " ".join(command)
show_panel_overrides = self.savvy_settings.get("show_panel_for")
show_panel = show_panel or args[0] in show_panel_overrides
close_panel_for = self.savvy_settings.get("close_panel_for") or []
if args[0] in close_panel_for:
sublime.active_window().run_command("hide_panel", {"cancel": True})
live_panel_output = self.savvy_settings.get("live_panel_output", False)
stdout, stderr = None, None
try:
if not working_dir:
working_dir = self.repo_path
except RuntimeError as e:
# do not show panel when the window does not exist
raise GitSavvyError(e, show_panel=False)
except Exception as e:
# offer initialization when "Not a git repository" is thrown from self.repo_path
if type(e) == ValueError and e.args and "Not a git repository" in e.args[0]:
sublime.set_timeout_async(
lambda: sublime.active_window().run_command("gs_offer_init")
)
raise GitSavvyError(e)
try:
startupinfo = None
if os.name == "nt":
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
environ = os.environ.copy()
environ.update(custom_environ or {})
start = time.time()
p = subprocess.Popen(
command,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=working_dir,
env=environ,
startupinfo=startupinfo,
)
def initialize_panel():
# clear panel
util.log.panel("")
if self.savvy_settings.get("show_stdin_in_output") and stdin is not None:
util.log.panel_append("STDIN\n{}\n".format(stdin))
if self.savvy_settings.get("show_input_in_output"):
util.log.panel_append("> {}\n".format(command_str))
if show_panel and live_panel_output:
wrapper = LoggingProcessWrapper(
p, self.savvy_settings.get("live_panel_output_timeout", 10000)
)
initialize_panel()
if stdin is not None and encode:
stdin = stdin.encode(encoding=stdin_encoding)
if show_panel and live_panel_output:
stdout, stderr = wrapper.communicate(stdin)
else:
stdout, stderr = p.communicate(stdin)
if decode:
stdout, stderr = self.decode_stdout(stdout), self.decode_stdout(stderr)
if show_panel and not live_panel_output:
initialize_panel()
if stdout:
util.log.panel_append(stdout)
if stderr:
if stdout:
util.log.panel_append("\n")
util.log.panel_append(stderr)
except Exception as e:
# this should never be reached
raise GitSavvyError(
"Please report this error to GitSavvy:\n\n{}\n\n{}".format(
e, traceback.format_exc()
)
)
finally:
end = time.time()
if decode:
util.debug.log_git(args, stdin, stdout, stderr, end - start)
else:
util.debug.log_git(
args,
stdin,
self.decode_stdout(stdout),
self.decode_stdout(stderr),
end - start,
)
if show_panel and self.savvy_settings.get("show_time_elapsed_in_output", True):
util.log.panel_append("\n[Done in {:.2f}s]".format(end - start))
if throw_on_stderr and not p.returncode == 0:
sublime.active_window().status_message(
"Failed to run `git {}`. See log for details.".format(command[1])
)
if "*** Please tell me who you are." in stderr:
sublime.set_timeout_async(
lambda: sublime.active_window().run_command("gs_setup_user")
)
if stdout or stderr:
raise GitSavvyError(
"`{}` failed with following output:\n{}\n{}".format(
command_str, stdout, stderr
)
)
else:
raise GitSavvyError("`{}` failed.".format(command_str))
return stdout
|
def git(
self,
*args,
stdin=None,
working_dir=None,
show_panel=False,
throw_on_stderr=True,
decode=True,
encode=True,
stdin_encoding="UTF-8",
custom_environ=None,
):
"""
Run the git command specified in `*args` and return the output
of the git command as a string.
If stdin is provided, it should be a string and will be piped to
the git process. If `working_dir` is provided, set this as the
current working directory for the git process; otherwise,
the `repo_path` value will be used.
"""
args = self._include_global_flags(args)
command = (self.git_binary_path,) + tuple(arg for arg in args if arg)
command_str = " ".join(command)
show_panel_overrides = self.savvy_settings.get("show_panel_for")
show_panel = show_panel or args[0] in show_panel_overrides
close_panel_for = self.savvy_settings.get("close_panel_for") or []
if args[0] in close_panel_for:
sublime.active_window().run_command("hide_panel", {"cancel": True})
live_panel_output = self.savvy_settings.get("live_panel_output", False)
stdout, stderr = None, None
try:
if not working_dir:
working_dir = self.repo_path
except RuntimeError as e:
# do not show panel when the window does not exist
raise GitSavvyError(e, show_panel=False)
except Exception as e:
# offer initialization when "Not a git repository" is thrown from self.repo_path
if type(e) == ValueError and e.args and "Not a git repository" in e.args[0]:
sublime.set_timeout_async(
lambda: sublime.active_window().run_command("gs_offer_init")
)
raise GitSavvyError(e)
try:
startupinfo = None
if os.name == "nt":
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
environ = os.environ.copy()
environ.update(custom_environ or {})
start = time.time()
p = subprocess.Popen(
command,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=working_dir,
env=environ,
startupinfo=startupinfo,
)
def initialize_panel():
# clear panel
util.log.panel("")
if self.savvy_settings.get("show_stdin_in_output") and stdin is not None:
util.log.panel_append("STDIN\n{}\n".format(stdin))
if self.savvy_settings.get("show_input_in_output"):
util.log.panel_append("> {}\n".format(command_str))
if show_panel and live_panel_output:
wrapper = LoggingProcessWrapper(
p, self.savvy_settings.get("live_panel_output_timeout", 10000)
)
initialize_panel()
if stdin is not None and encode:
stdin = stdin.encode(encoding=stdin_encoding)
if show_panel and live_panel_output:
stdout, stderr = wrapper.communicate(stdin)
else:
stdout, stderr = p.communicate(stdin)
if decode:
stdout, stderr = self.decode_stdout(stdout), stderr.decode()
if show_panel and not live_panel_output:
initialize_panel()
if stdout:
util.log.panel_append(stdout)
if stderr:
if stdout:
util.log.panel_append("\n")
util.log.panel_append(stderr)
except Exception as e:
# this should never be reached
raise GitSavvyError(
"Please report this error to GitSavvy:\n\n{}\n\n{}".format(
e, traceback.format_exc()
)
)
finally:
end = time.time()
if decode:
util.debug.log_git(args, stdin, stdout, stderr, end - start)
else:
util.debug.log_git(
args, stdin, self.decode_stdout(stdout), stderr.decode(), end - start
)
if show_panel and self.savvy_settings.get("show_time_elapsed_in_output", True):
util.log.panel_append("\n[Done in {:.2f}s]".format(end - start))
if throw_on_stderr and not p.returncode == 0:
sublime.active_window().status_message(
"Failed to run `git {}`. See log for details.".format(command[1])
)
if "*** Please tell me who you are." in stderr:
sublime.set_timeout_async(
lambda: sublime.active_window().run_command("gs_setup_user")
)
if stdout or stderr:
raise GitSavvyError(
"`{}` failed with following output:\n{}\n{}".format(
command_str, stdout, stderr
)
)
else:
raise GitSavvyError("`{}` failed.".format(command_str))
return stdout
|
https://github.com/timbrel/GitSavvy/issues/966
|
'utf-8' codec can't decode byte 0x8a in position 16: invalid start byte
Traceback (most recent call last):
File "[...]Sublime\Data\Installed Packages\GitSavvy.sublime-package\core/git_command.py", line 215, in git
stdout, stderr = self.decode_stdout(stdout), stderr.decode()
UnicodeDecodeError: 'utf-8' codec can't decode byte 0x8a in position 16: invalid start byte
|
UnicodeDecodeError
|
def decode_stdout(self, stdout):
fallback_encoding = self.savvy_settings.get("fallback_encoding")
silent_fallback = self.savvy_settings.get("silent_fallback")
try:
return stdout.decode()
except UnicodeDecodeError:
try:
return stdout.decode("latin-1")
except UnicodeDecodeError as unicode_err:
if silent_fallback or sublime.ok_cancel_dialog(
UTF8_PARSE_ERROR_MSG, "Fallback?"
):
try:
return stdout.decode(fallback_encoding)
except UnicodeDecodeError as fallback_err:
sublime.error_message(FALLBACK_PARSE_ERROR_MSG)
raise fallback_err
raise unicode_err
|
def decode_stdout(self, stdout):
fallback_encoding = self.savvy_settings.get("fallback_encoding")
silent_fallback = self.savvy_settings.get("silent_fallback")
try:
return stdout.decode()
except UnicodeDecodeError as unicode_err:
try:
return stdout.decode("latin-1")
except UnicodeDecodeError as unicode_err:
if silent_fallback or sublime.ok_cancel_dialog(
UTF8_PARSE_ERROR_MSG, "Fallback?"
):
try:
return stdout.decode(fallback_encoding)
except UnicodeDecodeError as fallback_err:
sublime.error_message(FALLBACK_PARSE_ERROR_MSG)
raise fallback_err
raise unicode_err
|
https://github.com/timbrel/GitSavvy/issues/966
|
'utf-8' codec can't decode byte 0x8a in position 16: invalid start byte
Traceback (most recent call last):
File "[...]Sublime\Data\Installed Packages\GitSavvy.sublime-package\core/git_command.py", line 215, in git
stdout, stderr = self.decode_stdout(stdout), stderr.decode()
UnicodeDecodeError: 'utf-8' codec can't decode byte 0x8a in position 16: invalid start byte
|
UnicodeDecodeError
|
def do_action(self, commit_hash, **kwargs):
self.git("cherry-pick", commit_hash)
sublime.active_window().status_message(
"Commit %s cherry-picked successfully." % commit_hash
)
util.view.refresh_gitsavvy(self.window.active_view())
|
def do_action(self, commit_hash, **kwargs):
self.git("cherry-pick", commit_hash)
self.view.window().status_message(
"Commit %s cherry-picked successfully." % commit_hash
)
util.view.refresh_gitsavvy(self.window.active_view())
|
https://github.com/timbrel/GitSavvy/issues/941
|
Traceback (most recent call last):
File "/Users/koenlageveen/Library/Application Support/Sublime Text 3/Installed Packages/GitSavvy.sublime-package/core/ui_mixins/quick_panel.py", line 526, in <lambda>
File "/Users/koenlageveen/Library/Application Support/Sublime Text 3/Installed Packages/GitSavvy.sublime-package/core/ui_mixins/quick_panel.py", line 530, in on_selection_async
File "/Users/koenlageveen/Library/Application Support/Sublime Text 3/Installed Packages/GitSavvy.sublime-package/core/commands/log.py", line 32, in <lambda>
File "/Users/koenlageveen/Library/Application Support/Sublime Text 3/Installed Packages/GitSavvy.sublime-package/core/commands/log.py", line 37, in on_done
File "/Users/koenlageveen/Library/Application Support/Sublime Text 3/Installed Packages/GitSavvy.sublime-package/core/commands/cherry_pick.py", line 17, in do_action
AttributeError: 'GsCherryPickCommand' object has no attribute 'view'
|
AttributeError
|
def refresh_gitsavvy(
view, refresh_sidebar=False, refresh_status_bar=True, interface_reset_cursor=False
):
"""
Called after GitSavvy action was taken that may have effected the
state of the Git repo.
"""
if view is None:
return
if view.settings().get("git_savvy.interface") is not None:
view.run_command(
"gs_interface_refresh", {"nuke_cursors": interface_reset_cursor}
)
if view.settings().get("git_savvy.log_graph_view", False):
view.run_command("gs_log_graph_refresh")
if view.window() and refresh_status_bar:
view.run_command("gs_update_status_bar")
if view.window() and refresh_sidebar:
view.window().run_command("refresh_folder_list")
|
def refresh_gitsavvy(
view, refresh_sidebar=False, refresh_status_bar=True, interface_reset_cursor=False
):
"""
Called after GitSavvy action was taken that may have effected the
state of the Git repo.
"""
if view is None:
return
if view.settings().get("git_savvy.interface") is not None:
view.run_command(
"gs_interface_refresh", {"nuke_cursors": interface_reset_cursor}
)
if view.settings().get("git_savvy.log_graph_view", False):
view.run_command("gs_log_graph_refresh")
if refresh_status_bar:
view.run_command("gs_update_status_bar")
if view.window() and refresh_sidebar:
view.window().run_command("refresh_folder_list")
|
https://github.com/timbrel/GitSavvy/issues/772
|
Traceback (most recent call last):
File "core.git_command in /Users/user/Library/Application Support/Sublime Text 3/Installed Packages/GitSavvy.sublime-package", line 103, in git
File "core.git_command in /Users/user/Library/Application Support/Sublime Text 3/Installed Packages/GitSavvy.sublime-package", line 297, in repo_path
RuntimeError: Window does not exist.
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "core.commands.commit in /Users/user/Library/Application Support/Sublime Text 3/Installed Packages/GitSavvy.sublime-package", line 142, in <lambda>
File "core.commands.commit in /Users/user/Library/Application Support/Sublime Text 3/Installed Packages/GitSavvy.sublime-package", line 162, in run_async
File "core.git_command in /Users/user/Library/Application Support/Sublime Text 3/Installed Packages/GitSavvy.sublime-package", line 106, in git
File "core.exceptions in /Users/user/Library/Application Support/Sublime Text 3/Installed Packages/GitSavvy.sublime-package", line 7, in __init__
TypeError: GitSavvyError does not take keyword arguments
|
RuntimeError
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.