code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
import pycurl
import select
import six
import logging
from threading import Lock
from grab.error import GrabTooManyRedirectsError
ERROR_TOO_MANY_REFRESH_REDIRECTS = -2
#ERROR_INTERNAL_GRAB_ERROR = -3
ERROR_ABBR = {
ERROR_TOO_MANY_REFRESH_REDIRECTS: 'too-many-refresh-redirects',
#ERROR_INTERNAL_GRAB_ERROR: 'internal-grab-error',
}
for key in dir(pycurl):
if key.startswith('E_'):
abbr = key[2:].lower().replace('_', '-')
ERROR_ABBR[getattr(pycurl, key)] = abbr
class MulticurlTransport(object):
def __init__(self, socket_number):
self.socket_number = socket_number
self.multi = pycurl.CurlMulti()
self.multi.handles = []
self.freelist = []
self.registry = {}
self.connection_count = {}
self.network_op_lock = Lock()
# Create curl instances
for x in six.moves.range(self.socket_number):
curl = pycurl.Curl()
self.connection_count[id(curl)] = 0
self.freelist.append(curl)
# self.multi.handles.append(curl)
def ready_for_task(self):
return len(self.freelist)
def get_free_threads_number(self):
return len(self.freelist)
def get_active_threads_number(self):
return self.socket_number - len(self.freelist)
def process_connection_count(self, curl):
curl_id = id(curl)
self.connection_count[curl_id] += 1
if self.connection_count[curl_id] > 100:
del self.connection_count[curl_id]
del curl
new_curl = pycurl.Curl()
self.connection_count[id(new_curl)] = 1
return new_curl
else:
return curl
def start_task_processing(self, task, grab, grab_config_backup):
self.network_op_lock.acquire()
try:
curl = self.process_connection_count(self.freelist.pop())
self.registry[id(curl)] = {
'grab': grab,
'grab_config_backup': grab_config_backup,
'task': task,
}
grab.transport.curl = curl
try:
grab.prepare_request()
grab.log_request()
except Exception:
# If some error occurred while processing the request arguments
# then we should put curl object back to free list
del self.registry[id(curl)]
self.freelist.append(curl)
raise
else:
# Add configured curl instance to multi-curl processor
self.multi.add_handle(curl)
finally:
self.network_op_lock.release()
def process_handlers(self):
# Ok, frankly I have really bad understanding of
# how to deal with multicurl sockets ;-)
# It is a sort of miracle that Grab actually works
self.network_op_lock.acquire()
rlist, wlist, xlist = self.multi.fdset()
if rlist or wlist or xlist:
timeout = self.multi.timeout()
if timeout and timeout > 0:
select.select(rlist, wlist, xlist, timeout / 1000.0)
else:
pass
while True:
status, active_objects = self.multi.perform()
if status != pycurl.E_CALL_MULTI_PERFORM:
break
self.network_op_lock.release()
def iterate_results(self):
while True:
#try:
queued_messages, ok_list, fail_list = self.multi.info_read()
#except Exception as ex:
# # Usually that should not happen
# logging.error('', exc_info=ex)
# continue
results = []
for curl in ok_list:
results.append((True, curl, None, None))
for curl, ecode, emsg in fail_list:
# CURLE_WRITE_ERROR (23)
# An error occurred when writing received data
# to a local file, or
# an error was returned to libcurl from a write callback.
# This exception should be ignored if _callback_interrupted
# flag
# is enabled (this happens when nohead or
# nobody options enabeld)
#
# Also this error is raised when curl receives
# KeyboardInterrupt
# while it is processing some callback function
# (WRITEFUNCTION, HEADERFUNCTIO, etc)
if ecode == 23:
if getattr(curl, '_callback_interrupted', None) is True:
curl._callback_interrupted = False
results.append((True, curl, None, None))
else:
results.append((False, curl, ecode, emsg))
else:
results.append((False, curl, ecode, emsg))
for ok, curl, ecode, emsg in results:
# FORMAT: {ok, grab, grab_config_backup, task, emsg}
curl_id = id(curl)
task = self.registry[curl_id]['task']
grab = self.registry[curl_id]['grab']
grab_config_backup =\
self.registry[curl_id]['grab_config_backup']
try:
grab.process_request_result()
except GrabTooManyRedirectsError:
ecode = ERROR_TOO_MANY_REFRESH_REDIRECTS
emsg = 'Too many meta refresh redirects'
ok = False
#except Exception as ex:
# logging.error('', exc_info=ex)
# ecode = ERROR_INTERNAL_GRAB_ERROR
# emsg = 'Internal grab error'
# ok = False
grab.response.error_code = ecode
grab.response.error_msg = emsg
# Free resources
del self.registry[curl_id]
grab.transport.curl = None
if ok:
error_abbr = None
else:
error_abbr = ERROR_ABBR.get(ecode, 'unknown-%d' % ecode)
yield {'ok': ok,
'ecode': ecode,
'emsg': emsg,
'error_abbr': error_abbr,
'grab': grab,
'grab_config_backup': grab_config_backup,
'task': task}
self.multi.remove_handle(curl)
curl.reset()
self.freelist.append(curl)
if not queued_messages:
break
| [
"six.moves.range",
"threading.Lock",
"select.select",
"pycurl.CurlMulti",
"pycurl.Curl"
] | [((631, 649), 'pycurl.CurlMulti', 'pycurl.CurlMulti', ([], {}), '()\n', (647, 649), False, 'import pycurl\n'), ((802, 808), 'threading.Lock', 'Lock', ([], {}), '()\n', (806, 808), False, 'from threading import Lock\n'), ((859, 894), 'six.moves.range', 'six.moves.range', (['self.socket_number'], {}), '(self.socket_number)\n', (874, 894), False, 'import six\n'), ((915, 928), 'pycurl.Curl', 'pycurl.Curl', ([], {}), '()\n', (926, 928), False, 'import pycurl\n'), ((1556, 1569), 'pycurl.Curl', 'pycurl.Curl', ([], {}), '()\n', (1567, 1569), False, 'import pycurl\n'), ((3083, 3135), 'select.select', 'select.select', (['rlist', 'wlist', 'xlist', '(timeout / 1000.0)'], {}), '(rlist, wlist, xlist, timeout / 1000.0)\n', (3096, 3135), False, 'import select\n')] |
import pygame
import screen
import os
import socket
import protocol
import logging
from screen import *
clock = pygame.time.Clock()
BACKGROUND_IMAGE_PATH = os.path.join(PICTURES_PATH, 'opening_screen_picture.png')
bg_image = pygame.image.load(BACKGROUND_IMAGE_PATH)
bg_image = pygame.transform.scale(bg_image, (SCREEN_WIDTH, SCREEN_HEIGHT))
BACK_BUTTON_IMAGE_PATH = os.path.join(PICTURES_PATH, 'back_sign.png')
back_button_image = pygame.image.load(BACK_BUTTON_IMAGE_PATH)
back_button_image = pygame.transform.scale(back_button_image, (int(SCREEN_WIDTH/10), int(SCREEN_HEIGHT/10)))
REFRESH_BUTTON_IMAGE_PATH = os.path.join(PICTURES_PATH, 'refresh_button.png')
refresh_button_image = pygame.image.load(REFRESH_BUTTON_IMAGE_PATH)
refresh_button_image = pygame.transform.scale(refresh_button_image,
(back_button_image.get_width(), back_button_image.get_height()))
SOUNDS_PATH = 'sounds'
PASSIVE_TEXTBOX_COLOR = colors.WHITE
ACTIVE_TEXTBOX_COLOR = colors.LIGHT_BLUE
BOT_GAME_TYPE = 1
ONLINE_GAME_TYPE = 2
TWO_PLAYERS_GAME_TYPE = 3
# Keys in rectangles dict
START_GAME = 0
NUMBER_OF_PLAYERS = 1
GAME_LENGTH = 2
BOT_LEVEL = 3
TEAM_SELECTION = 4
ONLINE_GAME = 5
BACK_BUTTON = 8
JOIN_GAME_RECTS = 9
REFRESH_BUTTON = 10
MAX_USERNAME_LENGTH = 10
ONE_RECT_GROUPS = [START_GAME, ONLINE_GAME, BACK_BUTTON, REFRESH_BUTTON]
# default values
is_one_players_playing = True
game_length = 5 # In minutes.
level = 3 # Bot Depth
is_white = True
username = ""
my_socket = None
opponent_player_name = ""
game_type = BOT_GAME_TYPE
TEXT_BOX_HEIGHT = REGULAR_FONT.get_height() + 20
TEXT_BOX_WIDTH = 600
class WaitingGame:
def __init__(self, name, is_other_player_white, current_game_length):
self.opponent_player_name = name
self.is_white = not is_other_player_white
self.length = current_game_length
def starting_screen():
global game_type
game_type = BOT_GAME_TYPE if is_one_players_playing else TWO_PLAYERS_GAME_TYPE
# Print background image.
screen.blit(bg_image, (0, 0))
# Print title.
text = LARGE_FONT.render("BeCheZ", False, colors.YELLOW)
screen.blit(text, (SCREEN_WIDTH / 2 - text.get_width() / 2, 50))
rectangles = set_rectangles()
while True:
pygame.display.flip()
for event in pygame.event.get():
handle_event(event, rectangles)
def online_screen(*ignore):
global username
global game_type
global my_socket
game_type = ONLINE_GAME_TYPE
screen.blit(bg_image, (0, 0))
back_button_rect = draw_and_get_back_button()
# Print title.
text = LARGE_FONT.render("ENTER YOUR NAME:", False, colors.DARK_BLUE)
screen.blit(text, (SCREEN_WIDTH / 2 - text.get_width() / 2, 100))
# Text box rectangle to get input from user.
text_box = pygame.Rect(MIDDLE_HORIZONTAL - TEXT_BOX_WIDTH/2, SCREEN_HEIGHT/2, TEXT_BOX_WIDTH, TEXT_BOX_HEIGHT)
create_game_rect, join_game_rect, create_game_text, join_game_text = get_join_create_rectangles(text_box)
# Colors of text box
is_active = False
draw_text_box(username, text_box, is_active)
while True:
pygame.display.flip()
for event in pygame.event.get():
if event.type == pygame.QUIT:
raise exceptions.UserExitGame
if event.type == pygame.MOUSEBUTTONDOWN:
if text_box.collidepoint(pygame.mouse.get_pos()):
is_active = True
else:
is_active = False
draw_text_box(username, text_box, is_active)
if back_button_rect.collidepoint(pygame.mouse.get_pos()):
return starting_screen()
if create_game_rect.collidepoint(pygame.mouse.get_pos()):
if len(username) > 0:
create_game()
else:
pygame.mixer.Sound(os.path.join(SOUNDS_PATH, 'error.wav')).play()
if join_game_rect.collidepoint(pygame.mouse.get_pos()):
if len(username) > 0:
join_game_screen()
else:
pygame.mixer.Sound(os.path.join(SOUNDS_PATH, 'error.wav')).play()
if is_active and event.type == pygame.KEYDOWN:
# Delete last letter.
if event.key == pygame.K_BACKSPACE:
username = username[:-1]
elif len(username) < MAX_USERNAME_LENGTH:
username += event.unicode
draw_text_box(username, text_box, is_active)
# deactivate create and join game rect.
if len(username) == 0:
pygame.draw.rect(screen, colors.WHITE, create_game_rect)
pygame.draw.rect(screen, colors.WHITE, join_game_rect)
# Activate create and join game rect.
else:
pygame.draw.rect(screen, colors.LIGHT_BLUE, create_game_rect)
pygame.draw.rect(screen, colors.LIGHT_BLUE, join_game_rect)
# The 25 is padding from rect and text
screen.blit(create_game_text, (MIDDLE_HORIZONTAL - create_game_text.get_width() / 2, create_game_rect.top + 25))
screen.blit(join_game_text, (MIDDLE_HORIZONTAL - join_game_text.get_width() / 2, join_game_rect.top + 25))
def get_join_create_rectangles(textbox: pygame.Rect) -> tuple:
text = REGULAR_FONT.render("JOIN GAME", False, colors.BLACK)
join_game_rect = pygame.Rect(MIDDLE_HORIZONTAL - (text.get_width() / 2 + 50),
textbox.bottom + text.get_height() + 50, text.get_width() + 100, text.get_height() + 50)
pygame.draw.rect(screen, colors.YELLOW, join_game_rect)
screen.blit(text, (join_game_rect.centerx - text.get_width() / 2, join_game_rect.centery - text.get_height() / 2))
join_game_text = text
text = REGULAR_FONT.render("CREATE GAME", False, colors.BLACK)
create_game_rect = pygame.Rect(MIDDLE_HORIZONTAL - (text.get_width() / 2 + 50),
join_game_rect.bottom + text.get_height() + 50,
text.get_width() + 100, text.get_height() + 50)
pygame.draw.rect(screen, colors.WHITE, create_game_rect)
screen.blit(text, (create_game_rect.centerx - text.get_width() / 2, create_game_rect.top + 25))
create_game_text = text
return create_game_rect, join_game_rect, create_game_text, join_game_text
def join_game_screen(*ignore):
global opponent_player_name
rectangles = dict()
# Print background.
screen.blit(bg_image, (0, 0))
rectangles[BACK_BUTTON] = draw_and_get_back_button()
rectangles[REFRESH_BUTTON] = draw_and_get_refresh_button()
# Join server.
connect_to_server()
final_request = protocol.Request(username, protocol.GET_GAMES).set_request_to_server()
my_socket.send(final_request)
games_list = get_games_list()
logging.debug(f"Games list is: {games_list}")
rectangles[JOIN_GAME_RECTS] = create_join_game_rectangles(games_list)
while True:
pygame.display.flip()
for event in pygame.event.get():
try:
handle_event(event, rectangles)
except exceptions.BackToLastScreen:
online_screen()
except exceptions.JoinGameError:
pygame.mixer.Sound(os.path.join(SOUNDS_PATH, 'error.wav')).play()
def create_game():
global opponent_player_name
screen.blit(bg_image, (0, 0))
connect_to_server()
logging.debug("Creating game")
msg_content = "1" if is_white else "0"
msg_content += str(game_length).zfill(2)
my_socket.send(protocol.Request(username, protocol.CREATE_GAME, msg_content).set_request_to_server())
text = LARGE_FONT.render("waiting for second player...", False, colors.DARK_BLUE)
screen.blit(text, (SCREEN_WIDTH/2 - text.get_width()/2, SCREEN_HEIGHT/2 - text.get_height()/2))
pygame.display.flip()
opponent_player_name_length = int(my_socket.recv(1).decode())
opponent_player_name = my_socket.recv(opponent_player_name_length).decode()
raise exceptions.FinishStartingScreen
def create_join_game_rectangles(games_name: list):
rectangle_width = int(SCREEN_WIDTH * (3/4))
rectangle_height = REGULAR_FONT.get_height() + 20
last_rectangle_bottom = 0
rectangles = dict()
for game in games_name:
text_string = f"opponent player is: {game.opponent_player_name} - "
your_team = "white team" if game.is_white else "black team"
text_string += f"your team: {your_team} - "
text_string += f"game length: {str(game.length)}"
text = REGULAR_FONT.render(text_string, False, colors.WHITE)
current_game_rect = pygame.Rect(MIDDLE_HORIZONTAL - int(rectangle_width/2),
last_rectangle_bottom + rectangle_height, rectangle_width, rectangle_height)
last_rectangle_bottom = current_game_rect.bottom
rectangles[game] = current_game_rect
pygame.draw.rect(screen, colors.DARK_BLUE, current_game_rect)
screen.blit(text, (MIDDLE_HORIZONTAL - text.get_width()/2, current_game_rect.top + 10))
# Rect wouldn't be out of screen
if last_rectangle_bottom + (rectangle_height*2) >= SCREEN_HEIGHT:
break
pygame.display.flip()
return rectangles
def get_games_list() -> list:
""""
:return A list with all data of the games waiting for second player
data of game including: first player name, is first player white, game length
"""
games = list()
list_length = my_socket.recv(1).decode()
logging.debug(f"number of players waiting for their games is {list_length}")
for x in range(int(list_length)):
name_length = int(my_socket.recv(1).decode())
name = my_socket.recv(name_length).decode()
is_opponent_player_white = int(my_socket.recv(1).decode())
current_game_length = int(my_socket.recv(2).decode())
games.append(WaitingGame(name, is_opponent_player_white, current_game_length))
return games
def draw_text_box(username, text_box, is_active):
text_box_color = ACTIVE_TEXTBOX_COLOR if is_active else PASSIVE_TEXTBOX_COLOR
pygame.draw.rect(screen, text_box_color, text_box)
text = REGULAR_FONT.render(username, False, colors.BLACK)
screen.blit(text, (text_box.left + 10, text_box.top + 10))
def connect_to_server():
global my_socket
if my_socket is not None:
return
my_socket = socket.socket()
my_socket.connect((protocol.SERVER_IP, protocol.SERVER_PORT))
def set_rectangles():
rectangles = dict()
# Print start game rect.
text = REGULAR_FONT.render("START GAME", False, colors.BLACK)
rect_high = text.get_height() + 50
rect_width = text.get_width() + 50
start_game_rect = pygame.Rect(MIDDLE_HORIZONTAL - rect_width/2, 550, rect_width, rect_high)
pygame.draw.rect(screen, colors.YELLOW, start_game_rect)
screen.blit(text, (start_game_rect.centerx - text.get_width() / 2, start_game_rect.centery - text.get_height() / 2))
rectangles[START_GAME] = start_game_rect
# Print online game rect
text = REGULAR_FONT.render("ONLINE GAME", False, colors.BLACK)
rect_width = text.get_width() + 50
rect_high = text.get_height() + 50
online_game_rect = pygame.Rect(MIDDLE_HORIZONTAL - rect_width / 2, start_game_rect.top+200, rect_width, rect_high)
pygame.draw.rect(screen, colors.YELLOW, online_game_rect)
screen.blit(text, (online_game_rect.centerx - text.get_width() / 2, online_game_rect.centery - text.get_height() / 2))
rectangles[ONLINE_GAME] = online_game_rect
rectangles[NUMBER_OF_PLAYERS] = create_players_count_rects()
rectangles[GAME_LENGTH] = create_small_rects("GAME_LENGTH", GAME_LENGTH_OPTION, default=game_length,
color=colors.DARK_RED, chosen_color=colors.RED, is_left=True)
rectangles[BOT_LEVEL] = create_small_rects("BOT LEVEL", range(1, 5), default=level,
color=colors.DARK_BLUE, chosen_color=colors.LIGHT_BLUE, is_left=False)
# Passing the 'one player' rect as argument to the function.
rectangles[TEAM_SELECTION] = draw_team_selection_rects(rectangles[NUMBER_OF_PLAYERS]["One Player"].midright, is_white)
return rectangles
def handle_event(event, rectangles):
if event.type == pygame.QUIT:
raise exceptions.UserExitGame
elif event.type == pygame.MOUSEBUTTONDOWN:
mouse_pos = pygame.mouse.get_pos()
try:
rect_group, rect_clicked, text_in_rect = get_rect(mouse_pos, rectangles)
return rect_group_to_function[rect_group](rect_clicked, text_in_rect, rectangles)
# The user clicked on something that not the rect
except exceptions.NonReturnValue:
pass
def get_rect(mouse_pos, rectangles):
for rect_group in rectangles:
if rect_group in ONE_RECT_GROUPS:
rect = rectangles[rect_group]
if rect.collidepoint(*mouse_pos):
return rect_group, rect, None
else:
rects = rectangles[rect_group]
for text in rects:
rect = rects[text]
if rect.collidepoint(*mouse_pos):
return rect_group, rect, text
raise exceptions.NonReturnValue
def join_to(rect_clicked, game: WaitingGame, rectangles):
final_request = protocol.Request(username, protocol.JOIN_GAME, game.opponent_player_name).set_request_to_server()
my_socket.send(final_request)
if my_socket.recv(1) == protocol.OK_MESSAGE:
global is_white
global opponent_player_name
global game_length
is_white = game.is_white
game_length = game.length
opponent_player_name = game.opponent_player_name
raise exceptions.FinishStartingScreen
else:
raise exceptions.JoinGameError
def set_team(rect_clicked, text, rectangles):
if not is_one_players_playing:
return
global is_white
is_white = True if text == "WHITE TEAM" else False
set_rects_color(rectangles[TEAM_SELECTION], rect_clicked,
colors.LIGHT_SILVER, colors.DARK_SILVER, colors.BLACK)
def back_to_last_screen(*ignore):
raise exceptions.BackToLastScreen
def finish_starting_screen(*ignore):
raise exceptions.FinishStartingScreen
def set_number_of_players(rect_clicked, text, rectangles):
global is_one_players_playing
global game_type
is_one_players_playing = (text == 'One Player')
if is_one_players_playing:
# Passing the 'one player' rect as argument to the function.
draw_team_selection_rects(rectangles[NUMBER_OF_PLAYERS]["One Player"].midright, is_white)
game_type = BOT_GAME_TYPE
else:
# Erase team selection rectangles.
screen.blit(bg_image, rectangles[TEAM_SELECTION]["WHITE TEAM"].topleft,
rectangles[TEAM_SELECTION]["WHITE TEAM"])
screen.blit(bg_image, rectangles[TEAM_SELECTION]["BLACK TEAM"].topleft,
rectangles[TEAM_SELECTION]["BLACK TEAM"])
game_type = TWO_PLAYERS_GAME_TYPE
set_rects_color(rectangles[NUMBER_OF_PLAYERS], rect_clicked,
colors.LIGHT_SILVER, colors.DARK_SILVER, colors.BLACK)
def set_bot_level(rect_clicked, text, rectangles):
global level
level = int(text)
set_rects_color(rectangles[BOT_LEVEL], rect_clicked, colors.LIGHT_BLUE, colors.DARK_BLUE)
def set_game_length(rect_clicked, text, rectangles):
global game_length
game_length = int(text)
set_rects_color(rectangles[GAME_LENGTH], rect_clicked, colors.RED, colors.DARK_RED)
def create_small_rects(title, options, default, color, chosen_color, is_left):
# Draw the rectangles in the sides of the starting screen.
# Return a dictionary. the key is the text and the value is the rect.
rects = {}
current_print_height = 100
x_pos = 5 if is_left else (SCREEN_WIDTH - SMALL_RECT_WIDTH - 5)
for option in options:
# Set the color of the rect. the chosen option is in other color.
rect_color = chosen_color if option == default else color
rect = pygame.Rect(x_pos, current_print_height, SMALL_RECT_WIDTH,
SMALL_RECT_HEIGHT)
pygame.draw.rect(screen, rect_color, rect)
# print the text in rect
text = f"{option}"
text_surface = REGULAR_FONT.render(text, False, colors.WHITE)
screen.blit(text_surface, (rect.centerx - text_surface.get_width() / 2
, rect.centery - text_surface.get_height() / 2))
rects[text] = rect
current_print_height += (SMALL_RECT_HEIGHT * 2)
# Print title.
text_surface = REGULAR_FONT.render(title, True, color)
if is_left:
screen.blit(text_surface,
(max(rect.centerx - text_surface.get_width() / 2, 0), 10)) # Space from top.
else:
screen.blit(text_surface,
(min(rect.centerx - text_surface.get_width() / 2, SCREEN_WIDTH - text_surface.get_width() - 10),
10)) # Space from top.
return rects
def create_players_count_rects():
# Return a dictionary. the key is the text and the value is the rect.
rects = dict()
current_print_height = 150
one_player_rect = pygame.Rect(MIDDLE_HORIZONTAL - RECT_WIDTH / 2, current_print_height, RECT_WIDTH, RECT_HEIGHT)
pygame.draw.rect(screen, colors.LIGHT_SILVER, one_player_rect)
text = "One Player"
text_surface = REGULAR_FONT.render(text, False, colors.BLACK)
screen.blit(text_surface, (one_player_rect.centerx - text_surface.get_width() / 2,
one_player_rect.centery - text_surface.get_height() / 2))
current_print_height += 200
rects[text] = one_player_rect
two_player_rect = pygame.Rect(MIDDLE_HORIZONTAL - RECT_WIDTH / 2, current_print_height, RECT_WIDTH, RECT_HEIGHT)
pygame.draw.rect(screen, colors.DARK_SILVER, two_player_rect)
text = "Two Players"
text_surface = REGULAR_FONT.render(text, False, colors.BLACK)
screen.blit(text_surface, (two_player_rect.centerx - text_surface.get_width() / 2,
two_player_rect.centery - text_surface.get_height() / 2))
rects[text] = two_player_rect
return rects
def draw_team_selection_rects(one_player_rect_cords, isWhite=True):
x_pos, y_pos = one_player_rect_cords
x_pos += SCREEN_WIDTH / 10
white_team_y_pos = y_pos - SCREEN_WIDTH / 20
black_team_y_pos = y_pos + SCREEN_WIDTH / 20
white_team_color, black_team_color = (colors.LIGHT_SILVER, colors.DARK_SILVER) if isWhite else \
(colors.DARK_SILVER, colors.LIGHT_SILVER)
rects = {}
rect = pygame.Rect(x_pos, white_team_y_pos, RECT_WIDTH, RECT_HEIGHT)
pygame.draw.rect(screen, white_team_color, rect)
text = "WHITE TEAM"
text_surface = REGULAR_FONT.render(text, False, colors.BLACK)
screen.blit(text_surface, (rect.centerx - text_surface.get_width() / 2,
rect.centery - text_surface.get_height() / 2))
rects[text] = rect
rect = pygame.Rect(x_pos, black_team_y_pos, RECT_WIDTH, RECT_HEIGHT)
pygame.draw.rect(screen, black_team_color, rect)
text = "<NAME>"
text_surface = REGULAR_FONT.render(text, False, colors.BLACK)
screen.blit(text_surface, (rect.centerx - text_surface.get_width() / 2,
rect.centery - text_surface.get_height() / 2))
rects[text] = rect
return rects
def set_rects_color(rects_and_texts: dict, chosen_rect, chosen_rect_color, unchosen_rect_color,
text_color=colors.WHITE):
for text, rect in rects_and_texts.items():
color = chosen_rect_color if rect is chosen_rect else unchosen_rect_color
pygame.draw.rect(screen, color, rect)
text_surface = REGULAR_FONT.render(text, False, text_color)
if rect.width == RECT_WIDTH:
screen.blit(text_surface, (rect.centerx - text_surface.get_width() / 2,
rect.centery - text_surface.get_height() / 2))
else:
screen.blit(text_surface, (rect.centerx - text_surface.get_width() / 2,
rect.centery - text_surface.get_height() / 2))
def draw_and_get_back_button():
back_button_x_pos = 0
back_button_y_pos = SCREEN_HEIGHT - back_button_image.get_height() - 20
screen.blit(back_button_image, (back_button_x_pos, back_button_y_pos))
tmp_rect = back_button_image.get_rect()
tmp_rect.topleft = (back_button_x_pos, back_button_y_pos)
return tmp_rect
def draw_and_get_refresh_button():
refresh_button_x_pos = SCREEN_WIDTH - refresh_button_image.get_width()
refresh_button_y_pos = SCREEN_HEIGHT - refresh_button_image.get_height() - 20
screen.blit(refresh_button_image, (refresh_button_x_pos, refresh_button_y_pos))
return pygame.Rect(refresh_button_x_pos, refresh_button_y_pos,
refresh_button_image.get_width(), refresh_button_image.get_width())
rect_group_to_function = dict()
rect_group_to_function[START_GAME] = finish_starting_screen
rect_group_to_function[NUMBER_OF_PLAYERS] = set_number_of_players
rect_group_to_function[GAME_LENGTH] = set_game_length
rect_group_to_function[BOT_LEVEL] = set_bot_level
rect_group_to_function[TEAM_SELECTION] = set_team
rect_group_to_function[ONLINE_GAME] = online_screen
rect_group_to_function[BACK_BUTTON] = back_to_last_screen
rect_group_to_function[JOIN_GAME_RECTS] = join_to
rect_group_to_function[REFRESH_BUTTON] = join_game_screen
| [
"logging.debug",
"pygame.draw.rect",
"pygame.event.get",
"pygame.Rect",
"socket.socket",
"screen.blit",
"pygame.display.flip",
"pygame.transform.scale",
"pygame.image.load",
"pygame.mouse.get_pos",
"protocol.Request",
"pygame.time.Clock",
"os.path.join"
] | [((113, 132), 'pygame.time.Clock', 'pygame.time.Clock', ([], {}), '()\n', (130, 132), False, 'import pygame\n'), ((158, 215), 'os.path.join', 'os.path.join', (['PICTURES_PATH', '"""opening_screen_picture.png"""'], {}), "(PICTURES_PATH, 'opening_screen_picture.png')\n", (170, 215), False, 'import os\n'), ((227, 267), 'pygame.image.load', 'pygame.image.load', (['BACKGROUND_IMAGE_PATH'], {}), '(BACKGROUND_IMAGE_PATH)\n', (244, 267), False, 'import pygame\n'), ((279, 342), 'pygame.transform.scale', 'pygame.transform.scale', (['bg_image', '(SCREEN_WIDTH, SCREEN_HEIGHT)'], {}), '(bg_image, (SCREEN_WIDTH, SCREEN_HEIGHT))\n', (301, 342), False, 'import pygame\n'), ((369, 413), 'os.path.join', 'os.path.join', (['PICTURES_PATH', '"""back_sign.png"""'], {}), "(PICTURES_PATH, 'back_sign.png')\n", (381, 413), False, 'import os\n'), ((434, 475), 'pygame.image.load', 'pygame.image.load', (['BACK_BUTTON_IMAGE_PATH'], {}), '(BACK_BUTTON_IMAGE_PATH)\n', (451, 475), False, 'import pygame\n'), ((614, 663), 'os.path.join', 'os.path.join', (['PICTURES_PATH', '"""refresh_button.png"""'], {}), "(PICTURES_PATH, 'refresh_button.png')\n", (626, 663), False, 'import os\n'), ((687, 731), 'pygame.image.load', 'pygame.image.load', (['REFRESH_BUTTON_IMAGE_PATH'], {}), '(REFRESH_BUTTON_IMAGE_PATH)\n', (704, 731), False, 'import pygame\n'), ((2032, 2061), 'screen.blit', 'screen.blit', (['bg_image', '(0, 0)'], {}), '(bg_image, (0, 0))\n', (2043, 2061), False, 'import screen\n'), ((2509, 2538), 'screen.blit', 'screen.blit', (['bg_image', '(0, 0)'], {}), '(bg_image, (0, 0))\n', (2520, 2538), False, 'import screen\n'), ((2818, 2925), 'pygame.Rect', 'pygame.Rect', (['(MIDDLE_HORIZONTAL - TEXT_BOX_WIDTH / 2)', '(SCREEN_HEIGHT / 2)', 'TEXT_BOX_WIDTH', 'TEXT_BOX_HEIGHT'], {}), '(MIDDLE_HORIZONTAL - TEXT_BOX_WIDTH / 2, SCREEN_HEIGHT / 2,\n TEXT_BOX_WIDTH, TEXT_BOX_HEIGHT)\n', (2829, 2925), False, 'import pygame\n'), ((5698, 5753), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', 'colors.YELLOW', 'join_game_rect'], {}), '(screen, colors.YELLOW, join_game_rect)\n', (5714, 5753), False, 'import pygame\n'), ((6221, 6277), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', 'colors.WHITE', 'create_game_rect'], {}), '(screen, colors.WHITE, create_game_rect)\n', (6237, 6277), False, 'import pygame\n'), ((6602, 6631), 'screen.blit', 'screen.blit', (['bg_image', '(0, 0)'], {}), '(bg_image, (0, 0))\n', (6613, 6631), False, 'import screen\n'), ((6960, 7005), 'logging.debug', 'logging.debug', (['f"""Games list is: {games_list}"""'], {}), "(f'Games list is: {games_list}')\n", (6973, 7005), False, 'import logging\n'), ((7500, 7529), 'screen.blit', 'screen.blit', (['bg_image', '(0, 0)'], {}), '(bg_image, (0, 0))\n', (7511, 7529), False, 'import screen\n'), ((7559, 7589), 'logging.debug', 'logging.debug', (['"""Creating game"""'], {}), "('Creating game')\n", (7572, 7589), False, 'import logging\n'), ((7974, 7995), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (7993, 7995), False, 'import pygame\n'), ((9353, 9374), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (9372, 9374), False, 'import pygame\n'), ((9668, 9744), 'logging.debug', 'logging.debug', (['f"""number of players waiting for their games is {list_length}"""'], {}), "(f'number of players waiting for their games is {list_length}')\n", (9681, 9744), False, 'import logging\n'), ((10260, 10310), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', 'text_box_color', 'text_box'], {}), '(screen, text_box_color, text_box)\n', (10276, 10310), False, 'import pygame\n'), ((10377, 10435), 'screen.blit', 'screen.blit', (['text', '(text_box.left + 10, text_box.top + 10)'], {}), '(text, (text_box.left + 10, text_box.top + 10))\n', (10388, 10435), False, 'import screen\n'), ((10545, 10560), 'socket.socket', 'socket.socket', ([], {}), '()\n', (10558, 10560), False, 'import socket\n'), ((10870, 10945), 'pygame.Rect', 'pygame.Rect', (['(MIDDLE_HORIZONTAL - rect_width / 2)', '(550)', 'rect_width', 'rect_high'], {}), '(MIDDLE_HORIZONTAL - rect_width / 2, 550, rect_width, rect_high)\n', (10881, 10945), False, 'import pygame\n'), ((10948, 11004), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', 'colors.YELLOW', 'start_game_rect'], {}), '(screen, colors.YELLOW, start_game_rect)\n', (10964, 11004), False, 'import pygame\n'), ((11369, 11470), 'pygame.Rect', 'pygame.Rect', (['(MIDDLE_HORIZONTAL - rect_width / 2)', '(start_game_rect.top + 200)', 'rect_width', 'rect_high'], {}), '(MIDDLE_HORIZONTAL - rect_width / 2, start_game_rect.top + 200,\n rect_width, rect_high)\n', (11380, 11470), False, 'import pygame\n'), ((11469, 11526), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', 'colors.YELLOW', 'online_game_rect'], {}), '(screen, colors.YELLOW, online_game_rect)\n', (11485, 11526), False, 'import pygame\n'), ((17440, 17538), 'pygame.Rect', 'pygame.Rect', (['(MIDDLE_HORIZONTAL - RECT_WIDTH / 2)', 'current_print_height', 'RECT_WIDTH', 'RECT_HEIGHT'], {}), '(MIDDLE_HORIZONTAL - RECT_WIDTH / 2, current_print_height,\n RECT_WIDTH, RECT_HEIGHT)\n', (17451, 17538), False, 'import pygame\n'), ((17539, 17601), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', 'colors.LIGHT_SILVER', 'one_player_rect'], {}), '(screen, colors.LIGHT_SILVER, one_player_rect)\n', (17555, 17601), False, 'import pygame\n'), ((17957, 18055), 'pygame.Rect', 'pygame.Rect', (['(MIDDLE_HORIZONTAL - RECT_WIDTH / 2)', 'current_print_height', 'RECT_WIDTH', 'RECT_HEIGHT'], {}), '(MIDDLE_HORIZONTAL - RECT_WIDTH / 2, current_print_height,\n RECT_WIDTH, RECT_HEIGHT)\n', (17968, 18055), False, 'import pygame\n'), ((18056, 18117), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', 'colors.DARK_SILVER', 'two_player_rect'], {}), '(screen, colors.DARK_SILVER, two_player_rect)\n', (18072, 18117), False, 'import pygame\n'), ((18853, 18914), 'pygame.Rect', 'pygame.Rect', (['x_pos', 'white_team_y_pos', 'RECT_WIDTH', 'RECT_HEIGHT'], {}), '(x_pos, white_team_y_pos, RECT_WIDTH, RECT_HEIGHT)\n', (18864, 18914), False, 'import pygame\n'), ((18919, 18967), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', 'white_team_color', 'rect'], {}), '(screen, white_team_color, rect)\n', (18935, 18967), False, 'import pygame\n'), ((19247, 19308), 'pygame.Rect', 'pygame.Rect', (['x_pos', 'black_team_y_pos', 'RECT_WIDTH', 'RECT_HEIGHT'], {}), '(x_pos, black_team_y_pos, RECT_WIDTH, RECT_HEIGHT)\n', (19258, 19308), False, 'import pygame\n'), ((19313, 19361), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', 'black_team_color', 'rect'], {}), '(screen, black_team_color, rect)\n', (19329, 19361), False, 'import pygame\n'), ((20563, 20633), 'screen.blit', 'screen.blit', (['back_button_image', '(back_button_x_pos, back_button_y_pos)'], {}), '(back_button_image, (back_button_x_pos, back_button_y_pos))\n', (20574, 20633), False, 'import screen\n'), ((20958, 21037), 'screen.blit', 'screen.blit', (['refresh_button_image', '(refresh_button_x_pos, refresh_button_y_pos)'], {}), '(refresh_button_image, (refresh_button_x_pos, refresh_button_y_pos))\n', (20969, 21037), False, 'import screen\n'), ((2271, 2292), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (2290, 2292), False, 'import pygame\n'), ((2315, 2333), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (2331, 2333), False, 'import pygame\n'), ((3150, 3171), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (3169, 3171), False, 'import pygame\n'), ((3193, 3211), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (3209, 3211), False, 'import pygame\n'), ((7105, 7126), 'pygame.display.flip', 'pygame.display.flip', ([], {}), '()\n', (7124, 7126), False, 'import pygame\n'), ((7148, 7166), 'pygame.event.get', 'pygame.event.get', ([], {}), '()\n', (7164, 7166), False, 'import pygame\n'), ((9056, 9117), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', 'colors.DARK_BLUE', 'current_game_rect'], {}), '(screen, colors.DARK_BLUE, current_game_rect)\n', (9072, 9117), False, 'import pygame\n'), ((14906, 15023), 'screen.blit', 'screen.blit', (['bg_image', "rectangles[TEAM_SELECTION]['WHITE TEAM'].topleft", "rectangles[TEAM_SELECTION]['WHITE TEAM']"], {}), "(bg_image, rectangles[TEAM_SELECTION]['WHITE TEAM'].topleft,\n rectangles[TEAM_SELECTION]['WHITE TEAM'])\n", (14917, 15023), False, 'import screen\n'), ((15048, 15165), 'screen.blit', 'screen.blit', (['bg_image', "rectangles[TEAM_SELECTION]['BLACK TEAM'].topleft", "rectangles[TEAM_SELECTION]['BLACK TEAM']"], {}), "(bg_image, rectangles[TEAM_SELECTION]['BLACK TEAM'].topleft,\n rectangles[TEAM_SELECTION]['BLACK TEAM'])\n", (15059, 15165), False, 'import screen\n'), ((16275, 16352), 'pygame.Rect', 'pygame.Rect', (['x_pos', 'current_print_height', 'SMALL_RECT_WIDTH', 'SMALL_RECT_HEIGHT'], {}), '(x_pos, current_print_height, SMALL_RECT_WIDTH, SMALL_RECT_HEIGHT)\n', (16286, 16352), False, 'import pygame\n'), ((16388, 16430), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', 'rect_color', 'rect'], {}), '(screen, rect_color, rect)\n', (16404, 16430), False, 'import pygame\n'), ((19926, 19963), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', 'color', 'rect'], {}), '(screen, color, rect)\n', (19942, 19963), False, 'import pygame\n'), ((6816, 6862), 'protocol.Request', 'protocol.Request', (['username', 'protocol.GET_GAMES'], {}), '(username, protocol.GET_GAMES)\n', (6832, 6862), False, 'import protocol\n'), ((12565, 12587), 'pygame.mouse.get_pos', 'pygame.mouse.get_pos', ([], {}), '()\n', (12585, 12587), False, 'import pygame\n'), ((13490, 13563), 'protocol.Request', 'protocol.Request', (['username', 'protocol.JOIN_GAME', 'game.opponent_player_name'], {}), '(username, protocol.JOIN_GAME, game.opponent_player_name)\n', (13506, 13563), False, 'import protocol\n'), ((4712, 4768), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', 'colors.WHITE', 'create_game_rect'], {}), '(screen, colors.WHITE, create_game_rect)\n', (4728, 4768), False, 'import pygame\n'), ((4785, 4839), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', 'colors.WHITE', 'join_game_rect'], {}), '(screen, colors.WHITE, join_game_rect)\n', (4801, 4839), False, 'import pygame\n'), ((4925, 4986), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', 'colors.LIGHT_BLUE', 'create_game_rect'], {}), '(screen, colors.LIGHT_BLUE, create_game_rect)\n', (4941, 4986), False, 'import pygame\n'), ((5003, 5062), 'pygame.draw.rect', 'pygame.draw.rect', (['screen', 'colors.LIGHT_BLUE', 'join_game_rect'], {}), '(screen, colors.LIGHT_BLUE, join_game_rect)\n', (5019, 5062), False, 'import pygame\n'), ((7697, 7758), 'protocol.Request', 'protocol.Request', (['username', 'protocol.CREATE_GAME', 'msg_content'], {}), '(username, protocol.CREATE_GAME, msg_content)\n', (7713, 7758), False, 'import protocol\n'), ((3397, 3419), 'pygame.mouse.get_pos', 'pygame.mouse.get_pos', ([], {}), '()\n', (3417, 3419), False, 'import pygame\n'), ((3630, 3652), 'pygame.mouse.get_pos', 'pygame.mouse.get_pos', ([], {}), '()\n', (3650, 3652), False, 'import pygame\n'), ((3750, 3772), 'pygame.mouse.get_pos', 'pygame.mouse.get_pos', ([], {}), '()\n', (3770, 3772), False, 'import pygame\n'), ((4019, 4041), 'pygame.mouse.get_pos', 'pygame.mouse.get_pos', ([], {}), '()\n', (4039, 4041), False, 'import pygame\n'), ((7395, 7433), 'os.path.join', 'os.path.join', (['SOUNDS_PATH', '"""error.wav"""'], {}), "(SOUNDS_PATH, 'error.wav')\n", (7407, 7433), False, 'import os\n'), ((3924, 3962), 'os.path.join', 'os.path.join', (['SOUNDS_PATH', '"""error.wav"""'], {}), "(SOUNDS_PATH, 'error.wav')\n", (3936, 3962), False, 'import os\n'), ((4198, 4236), 'os.path.join', 'os.path.join', (['SOUNDS_PATH', '"""error.wav"""'], {}), "(SOUNDS_PATH, 'error.wav')\n", (4210, 4236), False, 'import os\n')] |
from dataclasses import dataclass
import dataclasses
import argparse
from hashlib import pbkdf2_hmac
from typing import Mapping, Any
from interstate_love_song.mapping.base import *
def hash_pass(s: str, salt="IGNORED"):
"""Hash a password.
Not the best way to store a password since the salt is known, but it offers a bit more protection than storing it
plaintext."""
return pbkdf2_hmac("sha256", s.encode("utf-8"), salt.encode("utf-8"), 100000).hex()
@dataclass
class SimpleMapperSettings:
username: str = "test"
password_hash: str = "<PASSWORD>"
resources: Sequence[Resource] = dataclasses.field(default_factory=lambda: [])
domains: Sequence[str] = dataclasses.field(default_factory=lambda: [])
class SimpleMapper(Mapper):
"""A very simple mapper that accepts one set of credentials and returns a given set of resources."""
def __init__(
self, username: str, password_hash: str, resources: Sequence[Resource], domains: Sequence[str],
):
"""
:param username:
The username to accept.
:param password_hash:
A password hash, output from hash_pass.
:param resources:
:param domains:
A list of valid domains.
:raises TypeError:
"""
super().__init__()
self._username = str(username)
self._password_hash = str(password_hash)
self._resources = list(resources)
self._domains = list(domains)
@property
def username(self) -> str:
return self._username
@property
def password_hash(self) -> str:
return self._password_hash
@property
def resources(self) -> Sequence[Resource]:
return self._resources
def map(self, credentials: Credentials, previous_host: Optional[str] = None) -> MapperResult:
usr, psw = credentials
if not isinstance(usr, str) or not isinstance(psw, str):
raise ValueError("username and password must be strings.")
if usr == self.username and hash_pass(psw) == self._password_hash:
if self._resources:
return (
MapperStatus.SUCCESS,
dict((str(k), v) for k, v in enumerate(self.resources)),
)
else:
return MapperStatus.NO_MACHINE, {}
else:
return MapperStatus.AUTHENTICATION_FAILED, {}
@property
def domains(self):
return self._domains
@property
def name(self):
return "SimpleMapper"
@classmethod
def create_from_dict(cls, data: Mapping[str, Any]):
from interstate_love_song.settings import load_dict_into_dataclass
settings = load_dict_into_dataclass(SimpleMapperSettings, data)
return cls(settings.username, settings.password_hash, settings.resources, settings.domains,)
if __name__ == "__main__":
parser = argparse.ArgumentParser("hasher")
parser.add_argument("PASSWORD")
args = parser.parse_args()
print(hash_pass(args.PASSWORD))
| [
"dataclasses.field",
"interstate_love_song.settings.load_dict_into_dataclass",
"argparse.ArgumentParser"
] | [((615, 661), 'dataclasses.field', 'dataclasses.field', ([], {'default_factory': '(lambda : [])'}), '(default_factory=lambda : [])\n', (632, 661), False, 'import dataclasses\n'), ((690, 736), 'dataclasses.field', 'dataclasses.field', ([], {'default_factory': '(lambda : [])'}), '(default_factory=lambda : [])\n', (707, 736), False, 'import dataclasses\n'), ((2907, 2940), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""hasher"""'], {}), "('hasher')\n", (2930, 2940), False, 'import argparse\n'), ((2711, 2763), 'interstate_love_song.settings.load_dict_into_dataclass', 'load_dict_into_dataclass', (['SimpleMapperSettings', 'data'], {}), '(SimpleMapperSettings, data)\n', (2735, 2763), False, 'from interstate_love_song.settings import load_dict_into_dataclass\n')] |
from collections import UserDict
from six import StringIO
from gevent.lock import Semaphore
from ..utils.importing import import_dotted_path
try:
from cPickle import Unpickler, Pickler
except ImportError:
from pickle import Unpickler, Pickler
try:
StrictRedis = import_dotted_path('redis.StrictRedis')
except ImportError:
raise ImportError("redis-py is not installed. Install it using `pip install redis` "
"(see https://github.com/andymccurdy/redis-py for more details)")
class RedisStorage(object):
_redis_opt_keys = 'host', 'port', 'db', 'password', 'socket_timeout', 'socket_connect_timeout', 'socket_keepalive', \
'socket_keepalive_options', 'connection_pool', 'unix_socket_path', 'encoding', 'encoding_errors', \
'errors', 'decode_responses', 'retry_on_timeout', 'ssl', 'ssl_keyfile', 'ssl_certfile', \
'ssl_cert_reqs', 'ssl_ca_certs'
_redis_int_opts = 'port', 'db', 'socket_timeout',
_redis_float_opts = 'socket_timeout',
_redis_bool_opts = 'decode_responses', 'ssl', 'retry_on_timeout'
def __init__(self, client, opts):
self._client = client
self._opts = opts
self._redis = None
self._prefix = opts.get('redis_key_prefix', '')
def _get_redis_kwargs(self):
kwargs = {}
for key in self._redis_opt_keys:
opt_key = 'redis_%s' % key
if opt_key in self._opts:
kwargs[key] = self._opts[opt_key]
for key in self._redis_int_opts:
if key in kwargs:
kwargs[key] = int(kwargs[key])
for key in self._redis_float_opts:
if key in kwargs:
kwargs[key] = float(kwargs[key])
return kwargs
def _get_redis(self):
if 'redis_url' in self._opts:
return StrictRedis.from_url(self._opts['redis_url'])
return StrictRedis(**self._get_redis_kwargs())
def _get_hash_key(self, module_name):
return '%s%s' % (self._prefix, module_name)
@property
def redis(self):
if self._redis is None:
raise RuntimeError("Attempting to access RedisStorage.redis from a RedisStorage that has not been started.")
return self._redis
def start(self):
if self._redis is None:
self._redis = self._get_redis()
def stop(self):
if self._redis:
self._redis.connection_pool.disconnect()
self._redis = None
def get_data_for_module_name(self, module_name):
return RedisDict(self, self._get_hash_key(module_name))
storage = RedisStorage
class RedisDict(UserDict.DictMixin):
def __init__(self, storage, hash_key):
self._storage = storage
self._hash_key = hash_key
self._protocol = 0
self._cache = {}
self._cache_write_lock = Semaphore()
def keys(self):
return self._storage.redis.hkeys(self._hash_key)
def __len__(self):
return self._storage.redis.hlen(self._hash_key)
def has_key(self, key):
return key in self
def __contains__(self, key):
if key in self._cache:
return True
return self._storage.redis.hexists(self._hash_key, key)
def get(self, key, default=None):
if key in self:
return self[key]
return default
def __getitem__(self, key):
try:
value = self._cache[key]
except KeyError:
if key not in self:
raise KeyError(key)
f = StringIO(self._storage.redis.hget(self._hash_key, key))
value = Unpickler(f).load()
self._cache[key] = value
return value
def __setitem__(self, key, value):
with self._cache_write_lock:
self._cache[key] = value
f = StringIO()
p = Pickler(f, self._protocol)
p.dump(value)
self._storage.redis.hset(self._hash_key, key, f.getvalue())
def __delitem__(self, key):
self._storage.redis.hdel(self._hash_key, key)
with self._cache_write_lock:
self._cache.pop(key, None)
def close(self):
self.sync()
self._storage = None
def __del__(self):
self.close()
def sync(self):
if not self._cache:
return
with self._cache_write_lock, self._storage.redis.pipeline() as pipeline:
for key, entry in self._cache.items():
f = StringIO()
p = Pickler(f, self._protocol)
p.dump(entry)
pipeline.hset(self._hash_key, key, f.getvalue())
pipeline.execute()
self._cache.clear()
| [
"gevent.lock.Semaphore",
"pickle.Unpickler",
"pickle.Pickler",
"six.StringIO"
] | [((2887, 2898), 'gevent.lock.Semaphore', 'Semaphore', ([], {}), '()\n', (2896, 2898), False, 'from gevent.lock import Semaphore\n'), ((3860, 3870), 'six.StringIO', 'StringIO', ([], {}), '()\n', (3868, 3870), False, 'from six import StringIO\n'), ((3883, 3909), 'pickle.Pickler', 'Pickler', (['f', 'self._protocol'], {}), '(f, self._protocol)\n', (3890, 3909), False, 'from pickle import Unpickler, Pickler\n'), ((4502, 4512), 'six.StringIO', 'StringIO', ([], {}), '()\n', (4510, 4512), False, 'from six import StringIO\n'), ((4533, 4559), 'pickle.Pickler', 'Pickler', (['f', 'self._protocol'], {}), '(f, self._protocol)\n', (4540, 4559), False, 'from pickle import Unpickler, Pickler\n'), ((3654, 3666), 'pickle.Unpickler', 'Unpickler', (['f'], {}), '(f)\n', (3663, 3666), False, 'from pickle import Unpickler, Pickler\n')] |
import click
from pulse import Config
from douban import DoubanInterest, DoubanStatus
from github import GitHub
from loguru import logger
from pulse import CombinedPulse
@click.command()
@click.option("-c", "--config", type=click.Path(), help="Path to config")
def pulse(config):
conf = Config(config)
logger.debug(f"The full config: {conf}")
# Douban
logger.debug(f'Douban interest config: {conf[["social", "douban", "book_movie_music"]]}')
douban_interests = DoubanInterest(
conf[["social", "douban", "book_movie_music"]], base_folder="dashboard/data"
)
douban_interests.run()
logger.debug(f'Douban status config: {conf[["social", "douban", "status"]]}')
douban_status = DoubanStatus(
conf[["social", "douban", "status"]], base_folder="dashboard/data"
)
douban_status.run()
# GitHub
logger.debug(f'GitHub config: {conf[["social", "github", "events"]]}')
github = GitHub(conf[["social", "github", "events"]], base_folder="dashboard/data")
github.run()
# Combine
combined_publse = CombinedPulse(
[douban_interests.pulses, github.pulses], conf[["combined"]], base_folder="dashboard/data"
)
combined_publse.save()
sm_combined_publse = CombinedPulse(
[douban_interests.pulses, douban_status.pulses], conf[["combined-social-media"]], base_folder="dashboard/data"
)
sm_combined_publse.save()
tech_combined_publse = CombinedPulse(
[github.pulses], conf[["combined-tech"]], base_folder="dashboard/data"
)
tech_combined_publse.save()
if __name__ == "__main__":
pulse()
| [
"loguru.logger.debug",
"douban.DoubanStatus",
"github.GitHub",
"click.command",
"pulse.Config",
"click.Path",
"douban.DoubanInterest",
"pulse.CombinedPulse"
] | [((173, 188), 'click.command', 'click.command', ([], {}), '()\n', (186, 188), False, 'import click\n'), ((294, 308), 'pulse.Config', 'Config', (['config'], {}), '(config)\n', (300, 308), False, 'from pulse import Config\n'), ((313, 353), 'loguru.logger.debug', 'logger.debug', (['f"""The full config: {conf}"""'], {}), "(f'The full config: {conf}')\n", (325, 353), False, 'from loguru import logger\n'), ((372, 471), 'loguru.logger.debug', 'logger.debug', (['f"""Douban interest config: {conf[[\'social\', \'douban\', \'book_movie_music\']]}"""'], {}), '(\n f"Douban interest config: {conf[[\'social\', \'douban\', \'book_movie_music\']]}"\n )\n', (384, 471), False, 'from loguru import logger\n'), ((485, 582), 'douban.DoubanInterest', 'DoubanInterest', (["conf[['social', 'douban', 'book_movie_music']]"], {'base_folder': '"""dashboard/data"""'}), "(conf[['social', 'douban', 'book_movie_music']], base_folder=\n 'dashboard/data')\n", (499, 582), False, 'from douban import DoubanInterest, DoubanStatus\n'), ((624, 701), 'loguru.logger.debug', 'logger.debug', (['f"""Douban status config: {conf[[\'social\', \'douban\', \'status\']]}"""'], {}), '(f"Douban status config: {conf[[\'social\', \'douban\', \'status\']]}")\n', (636, 701), False, 'from loguru import logger\n'), ((722, 807), 'douban.DoubanStatus', 'DoubanStatus', (["conf[['social', 'douban', 'status']]"], {'base_folder': '"""dashboard/data"""'}), "(conf[['social', 'douban', 'status']], base_folder='dashboard/data'\n )\n", (734, 807), False, 'from douban import DoubanInterest, DoubanStatus\n'), ((860, 930), 'loguru.logger.debug', 'logger.debug', (['f"""GitHub config: {conf[[\'social\', \'github\', \'events\']]}"""'], {}), '(f"GitHub config: {conf[[\'social\', \'github\', \'events\']]}")\n', (872, 930), False, 'from loguru import logger\n'), ((944, 1018), 'github.GitHub', 'GitHub', (["conf[['social', 'github', 'events']]"], {'base_folder': '"""dashboard/data"""'}), "(conf[['social', 'github', 'events']], base_folder='dashboard/data')\n", (950, 1018), False, 'from github import GitHub\n'), ((1073, 1182), 'pulse.CombinedPulse', 'CombinedPulse', (['[douban_interests.pulses, github.pulses]', "conf[['combined']]"], {'base_folder': '"""dashboard/data"""'}), "([douban_interests.pulses, github.pulses], conf[['combined']],\n base_folder='dashboard/data')\n", (1086, 1182), False, 'from pulse import CombinedPulse\n'), ((1246, 1376), 'pulse.CombinedPulse', 'CombinedPulse', (['[douban_interests.pulses, douban_status.pulses]', "conf[['combined-social-media']]"], {'base_folder': '"""dashboard/data"""'}), "([douban_interests.pulses, douban_status.pulses], conf[[\n 'combined-social-media']], base_folder='dashboard/data')\n", (1259, 1376), False, 'from pulse import CombinedPulse\n'), ((1444, 1534), 'pulse.CombinedPulse', 'CombinedPulse', (['[github.pulses]', "conf[['combined-tech']]"], {'base_folder': '"""dashboard/data"""'}), "([github.pulses], conf[['combined-tech']], base_folder=\n 'dashboard/data')\n", (1457, 1534), False, 'from pulse import CombinedPulse\n'), ((226, 238), 'click.Path', 'click.Path', ([], {}), '()\n', (236, 238), False, 'import click\n')] |
from argparse import ArgumentParser
import pkgutil
import random
from random import randrange, choice
DOGE_PREFIXES = ["such", "much", "so", "many", "wow", "very"]
DOGE_EJACULATES = ["wow"]
WOW_CHANCE = 8
MAX_WHITESPACE = 15
MIN_WHITESPACE = 2
def doge_syntax(clause):
return clause if len(clause.split())>1 else choice(DOGE_PREFIXES)+" "+clause
used_indices = []
def random_select_no_repeat(max, ref_pool):
index = randrange(0,max)
while index in ref_pool:
index = randrange(0,max)
ref_pool.append(index)
return index
def random_insert_clause(clause, img_file):
insert_index = random_select_no_repeat(len(img_file), used_indices)
img_file[insert_index] += (random_whitespace()+clause)
def random_whitespace():
return randrange(MIN_WHITESPACE, MAX_WHITESPACE)*" "
def generate_ejacs(output):
while randrange(0,10) > WOW_CHANCE:
random_insert_clause(choice(DOGE_EJACULATES), output)
parser = ArgumentParser(description="Cowsay for a new generation.")
parser.add_argument("clauses", nargs="*",
help="things you want doge to say")
def main():
args = parser.parse_args()
doge_face_data = pkgutil.get_data(__name__, "static/doge")
doge_face_lines = doge_face_data.decode('utf8').split("\n")
clauses_source = args.clauses
indices = random.sample(range(len(doge_face_lines)), len(clauses_source))
for clause, index in zip(clauses_source, indices):
clause = random_whitespace()+doge_syntax(clause.strip())
generate_ejacs(doge_face_lines)
doge_face_lines[index] += (random_whitespace() + clause)
for line in doge_face_lines:
print(line)
if __name__ == "__main__":
main()
| [
"pkgutil.get_data",
"random.choice",
"random.randrange",
"argparse.ArgumentParser"
] | [((958, 1016), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': '"""Cowsay for a new generation."""'}), "(description='Cowsay for a new generation.')\n", (972, 1016), False, 'from argparse import ArgumentParser\n'), ((433, 450), 'random.randrange', 'randrange', (['(0)', 'max'], {}), '(0, max)\n', (442, 450), False, 'from random import randrange, choice\n'), ((1191, 1232), 'pkgutil.get_data', 'pkgutil.get_data', (['__name__', '"""static/doge"""'], {}), "(__name__, 'static/doge')\n", (1207, 1232), False, 'import pkgutil\n'), ((496, 513), 'random.randrange', 'randrange', (['(0)', 'max'], {}), '(0, max)\n', (505, 513), False, 'from random import randrange, choice\n'), ((770, 811), 'random.randrange', 'randrange', (['MIN_WHITESPACE', 'MAX_WHITESPACE'], {}), '(MIN_WHITESPACE, MAX_WHITESPACE)\n', (779, 811), False, 'from random import randrange, choice\n'), ((855, 871), 'random.randrange', 'randrange', (['(0)', '(10)'], {}), '(0, 10)\n', (864, 871), False, 'from random import randrange, choice\n'), ((914, 937), 'random.choice', 'choice', (['DOGE_EJACULATES'], {}), '(DOGE_EJACULATES)\n', (920, 937), False, 'from random import randrange, choice\n'), ((324, 345), 'random.choice', 'choice', (['DOGE_PREFIXES'], {}), '(DOGE_PREFIXES)\n', (330, 345), False, 'from random import randrange, choice\n')] |
# !/usr/bin/env python
# -*- coding:utf-8 -*-
# @Project : stock_quant
# @Date : 2021/12/19 16:27
# @Author : Adolf
# @File : ma5_ma10.py
# @Function:
import pandas as pd
import pandas_ta as ta
from GetBaseData.hanle_data_show import get_show_data
from Utils.ShowKline.base_kline import draw_chart
pd.set_option("expand_frame_repr", False)
pd.set_option("display.max_rows", None)
df = pd.read_csv("Data/RealData/hfq/600570.csv")
# print(df)
# df.set_index(pd.DatetimeIndex(df["date"]), inplace=True)
# df.ta.log_return(cumulative=True, append=True)
# df.ta.percent_return(cumulative=True, append=True)
df["sma5"] = ta.sma(df['close'], length=5)
df["sma10"] = ta.sma(df['close'], length=10)
df["ema10"] = ta.ema(df['close'], length=10)
# print(help(ta.macd))
macd_df = ta.macd(close=df['close'])
df['macd'], df['histogram'], df['signal'] = [macd_df['MACD_12_26_9'], macd_df['MACDh_12_26_9'],
macd_df['MACDs_12_26_9']]
# pd.concat([df, ta.macd(close=df['close'])])
df = df[df["date"] > "2020-01-01"]
df.reset_index(inplace=True, drop=True)
df.loc[(df["sma5"] > df["sma10"]) & (df["sma5"].shift(1) < df["sma10"].shift(1)), "trade"] = "BUY"
# df.loc[(df["sma5"] < df["sma10"]) & (df["sma5"].shift(1) > df["sma10"].shift(1)), "trade"] = "SELL"
# df = df.loc[df["trade"].notnull() & (df['macd'] > 0) & (df["histogram"] > 0)]
df_chose = df.loc[df["trade"].notnull()]
# print(df_chose)
for show_index in df_chose.index:
# print(show_index)
show_df = df[max(0, show_index - 60):min(len(df), show_index + 10)]
show_data = get_show_data(_df=show_df)
draw_chart(show_data, show_html_path="ShowHtml/Ma5Ma10.html")
break
# df.dropna(subset=['trade'], inplace=True)
# print(df.tail(10))
| [
"Utils.ShowKline.base_kline.draw_chart",
"pandas.read_csv",
"pandas_ta.macd",
"pandas_ta.sma",
"pandas_ta.ema",
"pandas.set_option",
"GetBaseData.hanle_data_show.get_show_data"
] | [((306, 347), 'pandas.set_option', 'pd.set_option', (['"""expand_frame_repr"""', '(False)'], {}), "('expand_frame_repr', False)\n", (319, 347), True, 'import pandas as pd\n'), ((348, 387), 'pandas.set_option', 'pd.set_option', (['"""display.max_rows"""', 'None'], {}), "('display.max_rows', None)\n", (361, 387), True, 'import pandas as pd\n'), ((394, 437), 'pandas.read_csv', 'pd.read_csv', (['"""Data/RealData/hfq/600570.csv"""'], {}), "('Data/RealData/hfq/600570.csv')\n", (405, 437), True, 'import pandas as pd\n'), ((626, 655), 'pandas_ta.sma', 'ta.sma', (["df['close']"], {'length': '(5)'}), "(df['close'], length=5)\n", (632, 655), True, 'import pandas_ta as ta\n'), ((670, 700), 'pandas_ta.sma', 'ta.sma', (["df['close']"], {'length': '(10)'}), "(df['close'], length=10)\n", (676, 700), True, 'import pandas_ta as ta\n'), ((716, 746), 'pandas_ta.ema', 'ta.ema', (["df['close']"], {'length': '(10)'}), "(df['close'], length=10)\n", (722, 746), True, 'import pandas_ta as ta\n'), ((780, 806), 'pandas_ta.macd', 'ta.macd', ([], {'close': "df['close']"}), "(close=df['close'])\n", (787, 806), True, 'import pandas_ta as ta\n'), ((1586, 1612), 'GetBaseData.hanle_data_show.get_show_data', 'get_show_data', ([], {'_df': 'show_df'}), '(_df=show_df)\n', (1599, 1612), False, 'from GetBaseData.hanle_data_show import get_show_data\n'), ((1617, 1678), 'Utils.ShowKline.base_kline.draw_chart', 'draw_chart', (['show_data'], {'show_html_path': '"""ShowHtml/Ma5Ma10.html"""'}), "(show_data, show_html_path='ShowHtml/Ma5Ma10.html')\n", (1627, 1678), False, 'from Utils.ShowKline.base_kline import draw_chart\n')] |
#data we need:
#1) total votes cast
#2) list of all aandidates who received votes
#3) percentage of votes each candidate won
#4) number of votes each candidate won
#5) winner of the election based on popular vote
import os
import csv
#assign variable to csv
file_to_load = "../Resources/election_results.csv"
#assign variable to written filename
file_to_save = os.path.join("..","analysis", "election_analysis.txt")
# Winning Candidate and Winning Count Tracker
winning_candidate = ""
winning_count = 0
winning_percentage = 0
#initialize counters
total_votes=0
candidate_options=[]
candidate_votes={}
#open results and read
with open(file_to_load) as election_data:
file_reader = csv.reader(election_data)
headers=next(file_reader)
#count votes
for row in file_reader:
total_votes+=1
candidate_name=row[2]
if candidate_name not in candidate_options:
candidate_options.append(candidate_name)
candidate_votes[candidate_name]=0
candidate_votes[candidate_name]+=1
#write results to txt
with open(file_to_save,'w') as txt_file:
election_results = (
f"\nElection Results\n"
f"-------------------------\n"
f"Total Votes: {total_votes:,}\n"
f"-------------------------\n")
#print(election_results, end="")
txt_file.write(election_results)
#find the winner, print the results
for candidate in candidate_votes:
votes=candidate_votes[candidate]
vote_percentage=round((float(votes)/total_votes)*100,1)
candidate_results=(f"{candidate}: {vote_percentage}% ({votes:,})\n")
txt_file.write(candidate_results)
if (votes>winning_count) and (vote_percentage>winning_percentage):
winning_count=votes
winning_percentage=vote_percentage
winning_candidate=candidate
#print the winner
winning_candidate_summary = (
f"-------------------------\n"
f"Winner: {winning_candidate}\n"
f"Winning Vote Count: {winning_count:,}\n"
f"Winning Percentage: {winning_percentage:.1f}%\n"
f"-------------------------\n")
txt_file.write(winning_candidate_summary)
| [
"csv.reader",
"os.path.join"
] | [((361, 416), 'os.path.join', 'os.path.join', (['""".."""', '"""analysis"""', '"""election_analysis.txt"""'], {}), "('..', 'analysis', 'election_analysis.txt')\n", (373, 416), False, 'import os\n'), ((687, 712), 'csv.reader', 'csv.reader', (['election_data'], {}), '(election_data)\n', (697, 712), False, 'import csv\n')] |
import argparse
import copy
import optuna
import os
from datetime import datetime
import torch
import torch.nn as nn
import torch.optim as optim
from src.dataloader import create_dataloader
from src.model import Model
from src.utils.torch_utils import model_info
from src.utils.common import read_yaml
from src.utils.macs import calc_macs
from src.trainer import TorchTrainer
from typing import Any, Dict, List, Tuple, Union
from train import train
MODEL_CONFIG = read_yaml(cfg="configs/model/effinetb1.yaml")
DATA_CONFIG = read_yaml(cfg="configs/data/taco.yaml")
def search_hyperparam(trial: optuna.trial.Trial) -> Dict[str, Any]:
"""Search hyperparam from user-specified search space."""
# epochs = trial.suggest_int("epochs", low=400, high=600, step=100)
img_size = trial.suggest_int("img_size", low=42, high=98, step=14)
# n_select = trial.suggest_int("n_select", low=1, high=2, step=1)
batch_size = trial.suggest_int("batch_size", low=32, high=128, step=32)
# "EPOCHS": epochs,
#"n_select": n_select,
return {
"IMG_SIZE": img_size,
"BATCH_SIZE": batch_size
}
def search_model(trial: optuna.trial.Trial) -> List[Any]:
"""Search model structure from user-specified search space."""
model = []
n_stride = 0
MAX_NUM_STRIDE = 5
UPPER_STRIDE = 2
# Module 1
m1 = trial.suggest_categorical("m1", ["Conv", "DWConv"])
m1_args = []
m1_repeat = trial.suggest_int("m1/repeat", 1, 3)
m1_out_channel = trial.suggest_int("m1/out_channels", low=16, high=64, step=16)
m1_stride = trial.suggest_int("m1/stride", low=1, high=UPPER_STRIDE)
if m1_stride == 2:
n_stride += 1
m1_activation = trial.suggest_categorical(
"m1/activation", ["ReLU", "Hardswish"]
)
if m1 == "Conv":
# Conv args: [out_channel, kernel_size, stride, padding, groups, activation]
m1_args = [m1_out_channel, 3, m1_stride, None, 1, m1_activation]
elif m1 == "DWConv":
# DWConv args: [out_channel, kernel_size, stride, padding_size, activation]
m1_args = [m1_out_channel, 3, m1_stride, None, m1_activation]
model.append([m1_repeat, m1, m1_args])
# Module 2
m2 = trial.suggest_categorical(
"m2",
["Conv",
"DWConv",
"MBConv",
"InvertedResidualv2",
"InvertedResidualv3",
"Pass"]
)
m2_args = []
m2_repeat = trial.suggest_int("m2/repeat", 1, 5)
m2_out_channel = trial.suggest_int("m2/out_channels", low=16, high=128, step=16)
m2_stride = trial.suggest_int("m2/stride", low=1, high=UPPER_STRIDE)
# force stride m2
if n_stride == 0:
m2_stride = 2
if m2 == "Conv":
# Conv args: [out_channel, kernel_size, stride, padding, groups, activation]
m2_kernel = trial.suggest_int("m2/kernel_size", low=1, high=5, step=2)
m2_activation = trial.suggest_categorical("m2/activation", ["ReLU", "Hardswish"])
m2_args = [m2_out_channel, m2_kernel, m2_stride, None, 1, m2_activation]
elif m2 == "DWConv":
# DWConv args: [out_channel, kernel_size, stride, padding_size, activation]
m2_kernel = trial.suggest_int("m2/kernel_size", low=1, high=5, step=2)
m2_activation = trial.suggest_categorical("m2/activation", ["ReLU", "Hardswish"])
m2_args = [m2_out_channel, m2_kernel, m2_stride, None, m2_activation]
elif m2 == "MBConv":
m2_c = trial.suggest_int("m2/c", low=16, high=320, step=16)
m2_k = trial.suggest_int("m2/k", low=3, high=5, step=2)
m2_args = [1, m2_c, m2_stride, m2_k]
elif m2 == "InvertedResidualv2":
m2_c = trial.suggest_int("m2/v2_c", low=16, high=32, step=16)
m2_t = trial.suggest_int("m2/v2_t", low=1, high=4)
m2_args = [m2_c, m2_t, m2_stride]
elif m2 == "InvertedResidualv3":
m2_kernel = trial.suggest_int("m2/kernel_size", low=3, high=5, step=2)
m2_t = round(trial.suggest_float("m2/v3_t", low=1.0, high=6.0, step=0.1), 1)
m2_c = trial.suggest_int("m2/v3_c", low=16, high=40, step=8)
m2_se = trial.suggest_categorical("m2/v3_se", [0, 1])
m2_hs = trial.suggest_categorical("m2/v3_hs", [0, 1])
# k t c SE HS s
m2_args = [m2_kernel, m2_t, m2_c, m2_se, m2_hs, m2_stride]
if not m2 == "Pass":
if m2_stride == 2:
n_stride += 1
if n_stride>=MAX_NUM_STRIDE:
UPPER_STRIDE = 1
model.append([m2_repeat, m2, m2_args])
# Module 3
m3 = trial.suggest_categorical(
"m3",
["Conv",
"DWConv",
"MBConv",
"InvertedResidualv2",
"InvertedResidualv3",
"Pass"]
)
m3_args = []
m3_repeat = trial.suggest_int("m3/repeat", 1, 5)
m3_out_channel = trial.suggest_int("m3/out_channels", low=16, high=128, step=16)
m3_stride = trial.suggest_int("m3/stride", low=1, high=UPPER_STRIDE)
if m3 == "Conv":
# Conv args: [out_channel, kernel_size, stride, padding, groups, activation]
m3_out_channel = trial.suggest_int("m3/out_channels", low=16, high=128, step=16)
m3_kernel = trial.suggest_int("m3/kernel_size", low=1, high=5, step=2)
m3_activation = trial.suggest_categorical("m3/activation", ["ReLU", "Hardswish"])
m3_args = [m3_out_channel, m3_kernel, m3_stride, None, 1, m3_activation]
elif m3 == "DWConv":
# DWConv args: [out_channel, kernel_size, stride, padding_size, activation]
m3_out_channel = trial.suggest_int("m3/out_channels", low=16, high=128, step=16)
m3_kernel = trial.suggest_int("m3/kernel_size", low=1, high=5, step=2)
m3_activation = trial.suggest_categorical("m3/activation", ["ReLU", "Hardswish"])
m3_args = [m3_out_channel, m3_kernel, m3_stride, None, m3_activation]
elif m3 == "MBConv":
m3_c = trial.suggest_int("m3/c", low=16, high=320, step=16)
m3_k = trial.suggest_int("m3/k", low=3, high=5, step=2)
m3_args = [6, m3_c, m3_stride, m3_k]
elif m3 == "InvertedResidualv2":
m3_c = trial.suggest_int("m3/v2_c", low=8, high=32, step=8)
m3_t = trial.suggest_int("m3/v2_t", low=1, high=8)
m3_args = [m3_c, m3_t, m3_stride]
elif m3 == "InvertedResidualv3":
m3_kernel = trial.suggest_int("m3/kernel_size", low=3, high=5, step=2)
m3_t = round(trial.suggest_float("m3/v3_t", low=1.0, high=6.0, step=0.1), 1)
m3_c = trial.suggest_int("m3/v3_c", low=8, high=40, step=8)
m3_se = trial.suggest_categorical("m3/v3_se", [0, 1])
m3_hs = trial.suggest_categorical("m3/v3_hs", [0, 1])
m3_args = [m3_kernel, m3_t, m3_c, m3_se, m3_hs, m3_stride]
if not m3 == "Pass":
if m3_stride == 2:
n_stride += 1
if n_stride>=MAX_NUM_STRIDE:
UPPER_STRIDE = 1
model.append([m3_repeat, m3, m3_args])
# Module 4
m4 = trial.suggest_categorical(
"m4",
["Conv",
"DWConv",
"MBConv",
"InvertedResidualv2",
"InvertedResidualv3",
"Pass"]
)
m4_args = []
m4_repeat = trial.suggest_int("m4/repeat", 1, 5)
m4_out_channel = trial.suggest_int("m4/out_channels", low=16, high=128, step=16)
m4_stride = trial.suggest_int("m4/stride", low=1, high=UPPER_STRIDE)
# force stride m4
if n_stride == 1:
m4_stride = 2
if m4 == "Conv":
# Conv args: [out_channel, kernel_size, stride, padding, groups, activation]
m4_out_channel = trial.suggest_int("m4/out_channels", low=16, high=256, step=16)
m4_kernel = trial.suggest_int("m4/kernel_size", low=1, high=5, step=2)
m4_activation = trial.suggest_categorical("m4/activation", ["ReLU", "Hardswish"])
m4_args = [m4_out_channel, m4_kernel, m4_stride, None, 1, m4_activation]
elif m4 == "DWConv":
# DWConv args: [out_channel, kernel_size, stride, padding_size, activation]
m4_out_channel = trial.suggest_int("m4/out_channels", low=16, high=256, step=16)
m4_kernel = trial.suggest_int("m4/kernel_size", low=1, high=5, step=2)
m4_activation = trial.suggest_categorical("m4/activation", ["ReLU", "Hardswish"])
m4_args = [m4_out_channel, m4_kernel, m4_stride, None, m4_activation]
elif m4 == "MBConv":
m4_c = trial.suggest_int("m4/c", low=16, high=320, step=16)
m4_k = trial.suggest_int("m4/k", low=3, high=5, step=2)
m4_args = [6, m4_c, m4_stride, m4_k]
elif m4 == "InvertedResidualv2":
m4_c = trial.suggest_int("m4/v2_c", low=8, high=64, step=8)
m4_t = trial.suggest_int("m4/v2_t", low=1, high=8)
m4_args = [m4_c, m4_t, m4_stride]
elif m4 == "InvertedResidualv3":
m4_kernel = trial.suggest_int("m4/kernel_size", low=3, high=5, step=2)
m4_t = round(trial.suggest_float("m4/v3_t", low=1.0, high=6.0, step=0.1), 1)
m4_c = trial.suggest_int("m4/v3_c", low=8, high=80, step=8)
m4_se = trial.suggest_categorical("m4/v3_se", [0, 1])
m4_hs = trial.suggest_categorical("m4/v3_hs", [0, 1])
m4_args = [m4_kernel, m4_t, m4_c, m4_se, m4_hs, m4_stride]
if not m4 == "Pass":
if m4_stride == 2:
n_stride += 1
if n_stride>=MAX_NUM_STRIDE:
UPPER_STRIDE = 1
model.append([m4_repeat, m4, m4_args])
# Module 5
m5 = trial.suggest_categorical(
"m5",
["Conv",
"DWConv",
"MBConv",
"InvertedResidualv2",
"InvertedResidualv3",
"Pass"]
)
m5_args = []
m5_repeat = trial.suggest_int("m5/repeat", 1, 5)
m5_out_channel = trial.suggest_int("m5/out_channels", low=16, high=128, step=16)
m5_stride = 1
if m5 == "Conv":
# Conv args: [out_channel, kernel_size, stride, padding, groups, activation]
m5_out_channel = trial.suggest_int("m5/out_channels", low=16, high=256, step=16)
m5_kernel = trial.suggest_int("m5/kernel_size", low=1, high=5, step=2)
m5_activation = trial.suggest_categorical("m5/activation", ["ReLU", "Hardswish"])
m5_stride = trial.suggest_int("m5/stride", low=1, high=UPPER_STRIDE)
m5_args = [m5_out_channel, m5_kernel, m5_stride, None, 1, m5_activation]
elif m5 == "DWConv":
# DWConv args: [out_channel, kernel_size, stride, padding_size, activation]
m5_out_channel = trial.suggest_int("m5/out_channels", low=16, high=256, step=16)
m5_kernel = trial.suggest_int("m5/kernel_size", low=1, high=5, step=2)
m5_activation = trial.suggest_categorical("m5/activation", ["ReLU", "Hardswish"])
m5_stride = trial.suggest_int("m5/stride", low=1, high=UPPER_STRIDE)
m5_args = [m5_out_channel, m5_kernel, m5_stride, None, m5_activation]
elif m5 == "MBConv":
m5_c = trial.suggest_int("m5/c", low=16, high=320, step=16)
m5_k = trial.suggest_int("m5/k", low=3, high=5, step=2)
m5_args = [6, m5_c, m5_stride, m5_k]
elif m5 == "InvertedResidualv2":
m5_c = trial.suggest_int("m5/v2_c", low=16, high=128, step=16)
m5_t = trial.suggest_int("m5/v2_t", low=1, high=8)
m5_stride = trial.suggest_int("m5/stride", low=1, high=UPPER_STRIDE)
m5_args = [m5_c, m5_t, m5_stride]
elif m5 == "InvertedResidualv3":
m5_kernel = trial.suggest_int("m5/kernel_size", low=3, high=5, step=2)
m5_t = round(trial.suggest_float("m5/v3_t", low=1.0, high=6.0, step=0.1), 1)
m5_c = trial.suggest_int("m5/v3_c", low=16, high=80, step=16)
m5_se = trial.suggest_categorical("m5/v3_se", [0, 1])
m5_hs = trial.suggest_categorical("m5/v3_hs", [0, 1])
m5_stride = trial.suggest_int("m5/stride", low=1, high=UPPER_STRIDE)
m5_args = [m5_kernel, m5_t, m5_c, m5_se, m5_hs, m5_stride]
if not m5 == "Pass":
if m5_stride == 2:
n_stride += 1
if n_stride>=MAX_NUM_STRIDE:
UPPER_STRIDE = 1
model.append([m5_repeat, m5, m5_args])
# Module 6
m6 = trial.suggest_categorical(
"m6",
["Conv",
"DWConv",
"MBConv",
"InvertedResidualv2",
"InvertedResidualv3",
"Pass"]
)
m6_args = []
m6_repeat = trial.suggest_int("m6/repeat", 1, 5)
m6_out_channel = trial.suggest_int("m6/out_channels", low=16, high=128, step=16)
m6_stride = trial.suggest_int("m6/stride", low=1, high=UPPER_STRIDE)
# force stride m6
if n_stride == 2:
m4_stride = 2
if m6 == "Conv":
# Conv args: [out_channel, kernel_size, stride, padding, groups, activation]
m6_out_channel = trial.suggest_int("m6/out_channels", low=16, high=512, step=16)
m6_kernel = trial.suggest_int("m6/kernel_size", low=1, high=5, step=2)
m6_activation = trial.suggest_categorical("m6/activation", ["ReLU", "Hardswish"])
m6_args = [m6_out_channel, m6_kernel, m6_stride, None, 1, m6_activation]
elif m6 == "DWConv":
# DWConv args: [out_channel, kernel_size, stride, padding_size, activation]
m6_out_channel = trial.suggest_int("m6/out_channels", low=16, high=512, step=16)
m6_kernel = trial.suggest_int("m6/kernel_size", low=1, high=5, step=2)
m6_activation = trial.suggest_categorical("m6/activation", ["ReLU", "Hardswish"])
m6_args = [m6_out_channel, m6_kernel, m6_stride, None, m6_activation]
elif m6 == "MBConv":
m6_c = trial.suggest_int("m6/c", low=16, high=320, step=16)
m6_k = trial.suggest_int("m6/k", low=3, high=5, step=2)
m6_args = [6, m6_c, m6_stride, m6_k]
elif m6 == "InvertedResidualv2":
m6_c = trial.suggest_int("m6/v2_c", low=16, high=128, step=16)
m6_t = trial.suggest_int("m6/v2_t", low=1, high=8)
m6_args = [m6_c, m6_t, m6_stride]
elif m6 == "InvertedResidualv3":
m6_kernel = trial.suggest_int("m6/kernel_size", low=3, high=5, step=2)
m6_t = round(trial.suggest_float("m6/v3_t", low=1.0, high=6.0, step=0.1), 1)
m6_c = trial.suggest_int("m6/v3_c", low=16, high=160, step=16)
m6_se = trial.suggest_categorical("m6/v3_se", [0, 1])
m6_hs = trial.suggest_categorical("m6/v3_hs", [0, 1])
m6_args = [m6_kernel, m6_t, m6_c, m6_se, m6_hs, m6_stride]
if not m6 == "Pass":
if m6_stride == 2:
n_stride += 1
if n_stride>=MAX_NUM_STRIDE:
UPPER_STRIDE = 1
model.append([m6_repeat, m6, m6_args])
# Module 7
m7 = trial.suggest_categorical(
"m7",
["Conv",
"DWConv",
"MBConv",
"InvertedResidualv2",
"InvertedResidualv3",
"Pass"]
)
m7_args = []
m7_repeat = trial.suggest_int("m7/repeat", 1, 5)
m7_out_channel = trial.suggest_int("m7/out_channels", low=16, high=128, step=16)
m7_stride = trial.suggest_int("m7/stride", low=1, high=UPPER_STRIDE)
if m7 == "Conv":
# Conv args: [out_channel, kernel_size, stride, padding, groups, activation]
m7_out_channel = trial.suggest_int("m7/out_channels", low=128, high=1024, step=128)
m7_kernel = trial.suggest_int("m7/kernel_size", low=1, high=5, step=2)
m7_activation = trial.suggest_categorical("m7/activation", ["ReLU", "Hardswish"])
m7_args = [m7_out_channel, m7_kernel, m7_stride, None, 1, m7_activation]
elif m7 == "DWConv":
# DWConv args: [out_channel, kernel_size, stride, padding_size, activation]
m7_out_channel = trial.suggest_int("m7/out_channels", low=128, high=1024, step=128)
m7_kernel = trial.suggest_int("m7/kernel_size", low=1, high=5, step=2)
m7_activation = trial.suggest_categorical("m7/activation", ["ReLU", "Hardswish"])
m7_args = [m7_out_channel, m7_kernel, m7_stride, None, m7_activation]
elif m7 == "MBConv":
m7_c = trial.suggest_int("m7/c", low=16, high=320, step=16)
m7_k = trial.suggest_int("m7/k", low=3, high=5, step=2)
m7_args = [6, m7_c, m7_stride, m7_k]
elif m7 == "InvertedResidualv2":
m7_c = trial.suggest_int("m7/v2_c", low=16, high=160, step=16)
m7_t = trial.suggest_int("m7/v2_t", low=1, high=8)
m7_args = [m7_c, m7_t, m7_stride]
elif m7 == "InvertedResidualv3":
m7_kernel = trial.suggest_int("m7/kernel_size", low=3, high=5, step=2)
m7_t = round(trial.suggest_float("m7/v3_t", low=1.0, high=6.0, step=0.1), 1)
m7_c = trial.suggest_int("m7/v3_c", low=8, high=160, step=8)
m7_se = trial.suggest_categorical("m7/v3_se", [0, 1])
m7_hs = trial.suggest_categorical("m7/v3_hs", [0, 1])
m7_args = [m7_kernel, m7_t, m7_c, m7_se, m7_hs, m7_stride]
if not m7 == "Pass":
if m7_stride == 2:
n_stride += 1
if n_stride>=MAX_NUM_STRIDE:
UPPER_STRIDE = 1
model.append([m7_repeat, m7, m7_args])
# last layer
last_dim = trial.suggest_int("last_dim", low=128, high=512, step=128)
# We can setup fixed structure as well
model.append([1, "Conv", [last_dim, 1, 1]])
model.append([1, "GlobalAvgPool", []])
model.append([1, "FixedConv", [last_dim, 1, 1, None, 1, None]])
return model
def tuning_score(test_f1: float, macs: float) -> float:
f1_pivot = 0.85
f1_limit = 0.5
macs_pivot = 100000
if test_f1 < f1_limit:
score_f1 = 1
elif f1_limit <= test_f1 < f1_pivot:
score_f1 = 1 - (test_f1 / f1_pivot)
else:
score_f1 = 0.5 * (1 - (test_f1 / f1_pivot))
score_macs = macs / macs_pivot
result = score_f1 + score_macs
return result
def objective(trial: optuna.trial.Trial, device) -> float:
"""Optuna objective.
Args:
trial
Returns:
float: tuning_score(accuracy & params)
"""
model_config = copy.deepcopy(MODEL_CONFIG)
data_config = copy.deepcopy(DATA_CONFIG)
# hyperparams: EPOCHS, IMG_SIZE, n_select, BATCH_SIZE
hyperparams = search_hyperparam(trial)
model_config["input_size"] = [data_config["IMG_SIZE"], data_config["IMG_SIZE"]]
# model_config["backbone"] = search_model(trial)
# data_config["AUG_TRAIN_PARAMS"]["n_select"] = hyperparams["n_select"]
data_config["BATCH_SIZE"] = hyperparams["BATCH_SIZE"]
# data_config["EPOCHS"] = hyperparams["EPOCHS"]
data_config["IMG_SIZE"] = hyperparams["IMG_SIZE"]
log_dir = os.path.join("exp", datetime.now().strftime("%Y-%m-%d_%H-%M-%S"))
os.makedirs(log_dir, exist_ok=True)
model_instance = Model(model_config, verbose=False)
macs = calc_macs(model_instance.model, (3, data_config["IMG_SIZE"], data_config["IMG_SIZE"]))
# model_config, data_config
_, test_f1, _ = train(
model_config=model_config,
data_config=data_config,
log_dir=log_dir,
fp16=data_config["FP16"],
device=device,
)
return tuning_score(test_f1, macs)
def tune(gpu_id: int, storage: Union[str, None] = None, study_name: str = "pstage_automl"):
if not torch.cuda.is_available():
device = torch.device("cpu")
elif 0 <= gpu_id < torch.cuda.device_count():
device = torch.device(f"cuda:{gpu_id}")
sampler = optuna.samplers.TPESampler(n_startup_trials=20)
if storage is not None:
rdb_storage = optuna.storages.RDBStorage(url=storage)
else:
rdb_storage = None
study = optuna.create_study(
directions=["minimize"],
storage=rdb_storage,
study_name=study_name,
sampler=sampler,
load_if_exists=True
)
study.optimize(lambda trial: objective(trial, device), n_trials=20)
pruned_trials = [
t for t in study.trials if t.state == optuna.trial.TrialState.PRUNED
]
complete_trials = [
t for t in study.trials if t.state == optuna.trial.TrialState.COMPLETE
]
print("Study statistics: ")
print(" Number of finished trials: ", len(study.trials))
print(" Number of pruned trials: ", len(pruned_trials))
print(" Number of complete trials: ", len(complete_trials))
print("Best trials:")
best_trials = study.best_trials
# trials that satisfies Pareto Fronts
for tr in best_trials:
print(f" value1:{tr.values[0]}, value2:{tr.values[1]}")
for key, value in tr.params.items():
print(f" {key}:{value}")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Optuna tuner.")
parser.add_argument(
"--gpu", default=0, type=int, help="GPU id to use"
)
parser.add_argument(
"--storage", default="", type=str, help="RDB Storage URL for optuna."
)
parser.add_argument(
"--study-name", default="pstage_automl", type=str, help="Optuna study name."
)
args = parser.parse_args()
tune(args.gpu, storage=None if args.storage == "" else args.storage, study_name=args.study_name)
| [
"copy.deepcopy",
"os.makedirs",
"src.utils.common.read_yaml",
"argparse.ArgumentParser",
"src.utils.macs.calc_macs",
"torch.cuda.device_count",
"src.model.Model",
"torch.cuda.is_available",
"train.train",
"torch.device",
"optuna.storages.RDBStorage",
"datetime.datetime.now",
"optuna.create_s... | [((466, 511), 'src.utils.common.read_yaml', 'read_yaml', ([], {'cfg': '"""configs/model/effinetb1.yaml"""'}), "(cfg='configs/model/effinetb1.yaml')\n", (475, 511), False, 'from src.utils.common import read_yaml\n'), ((526, 565), 'src.utils.common.read_yaml', 'read_yaml', ([], {'cfg': '"""configs/data/taco.yaml"""'}), "(cfg='configs/data/taco.yaml')\n", (535, 565), False, 'from src.utils.common import read_yaml\n'), ((17760, 17787), 'copy.deepcopy', 'copy.deepcopy', (['MODEL_CONFIG'], {}), '(MODEL_CONFIG)\n', (17773, 17787), False, 'import copy\n'), ((17806, 17832), 'copy.deepcopy', 'copy.deepcopy', (['DATA_CONFIG'], {}), '(DATA_CONFIG)\n', (17819, 17832), False, 'import copy\n'), ((18398, 18433), 'os.makedirs', 'os.makedirs', (['log_dir'], {'exist_ok': '(True)'}), '(log_dir, exist_ok=True)\n', (18409, 18433), False, 'import os\n'), ((18455, 18489), 'src.model.Model', 'Model', (['model_config'], {'verbose': '(False)'}), '(model_config, verbose=False)\n', (18460, 18489), False, 'from src.model import Model\n'), ((18501, 18592), 'src.utils.macs.calc_macs', 'calc_macs', (['model_instance.model', "(3, data_config['IMG_SIZE'], data_config['IMG_SIZE'])"], {}), "(model_instance.model, (3, data_config['IMG_SIZE'], data_config[\n 'IMG_SIZE']))\n", (18510, 18592), False, 'from src.utils.macs import calc_macs\n'), ((18641, 18760), 'train.train', 'train', ([], {'model_config': 'model_config', 'data_config': 'data_config', 'log_dir': 'log_dir', 'fp16': "data_config['FP16']", 'device': 'device'}), "(model_config=model_config, data_config=data_config, log_dir=log_dir,\n fp16=data_config['FP16'], device=device)\n", (18646, 18760), False, 'from train import train\n'), ((19124, 19171), 'optuna.samplers.TPESampler', 'optuna.samplers.TPESampler', ([], {'n_startup_trials': '(20)'}), '(n_startup_trials=20)\n', (19150, 19171), False, 'import optuna\n'), ((19311, 19441), 'optuna.create_study', 'optuna.create_study', ([], {'directions': "['minimize']", 'storage': 'rdb_storage', 'study_name': 'study_name', 'sampler': 'sampler', 'load_if_exists': '(True)'}), "(directions=['minimize'], storage=rdb_storage,\n study_name=study_name, sampler=sampler, load_if_exists=True)\n", (19330, 19441), False, 'import optuna\n'), ((20315, 20367), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Optuna tuner."""'}), "(description='Optuna tuner.')\n", (20338, 20367), False, 'import argparse\n'), ((18948, 18973), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (18971, 18973), False, 'import torch\n'), ((18992, 19011), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (19004, 19011), False, 'import torch\n'), ((19222, 19261), 'optuna.storages.RDBStorage', 'optuna.storages.RDBStorage', ([], {'url': 'storage'}), '(url=storage)\n', (19248, 19261), False, 'import optuna\n'), ((19035, 19060), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (19058, 19060), False, 'import torch\n'), ((19079, 19109), 'torch.device', 'torch.device', (['f"""cuda:{gpu_id}"""'], {}), "(f'cuda:{gpu_id}')\n", (19091, 19109), False, 'import torch\n'), ((18348, 18362), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (18360, 18362), False, 'from datetime import datetime\n')] |
"""
1. 选择题型
普通算式 x +/-/*// y
类型算式1 x +/- y +/- z
类型算式2 x +/- y * z
类型算式3 x +/- y / z
2. 选择题量
3. 生成题目 / 打印题目
"""
import random
import wx
import os
import time
from student import math1, math2, math3, math4
'''
frame(窗口):带标题和边框的最顶层窗体
panel(面板):容器类,提供空间放其他组件,包括其他panel
'''
math_list = [math1, math2, math3, math4]
class MyApp(wx.App):
def __init__(self):
wx.App.__init__(self)
self.selected_list = []
self.math_list = []
self.total_num = 40
def OnInit(self):
self.Frame = wx.Frame(parent=None, title="口算题卡生成器", pos=(100, 100), size=(300, 200))
self.Frame.SetMaxSize((300, 300))
self.Frame.SetMinSize((300, 300))
self.SetTopWindow(self.Frame)
self.panel = wx.Panel(self.Frame, -1)
self.Set_Math_Type()
self.Set_Add_Data_Button()
self.Set_Generate_Button()
self.Frame.Show()
return True
def Set_Generate_Button(self):
print('set generate button')
self.generate_button = wx.Button(self.panel, -1, "生成题目", pos=(150, 100), size=(150, 50))
font = wx.Font(18, wx.ROMAN, wx.NORMAL, wx.NORMAL)
self.generate_button.SetFont(font)
self.generate_button.SetBackgroundColour("black")
self.generate_button.SetForegroundColour("white")
self.Bind(wx.EVT_BUTTON, self.generate_data, self.generate_button)
def Set_Add_Data_Button(self):
print('set add data button')
self.add_data_button = wx.Button(self.panel, -1, "增加题目", pos=(0, 100), size=(150, 50))
font = wx.Font(18, wx.ROMAN, wx.NORMAL, wx.NORMAL)
self.add_data_button.SetFont(font)
self.add_data_button.SetBackgroundColour("black")
self.add_data_button.SetForegroundColour("white")
self.Bind(wx.EVT_BUTTON, self.add_data, self.add_data_button)
def Set_Math_Type(self):
list1 = ["普通算式 x +/-/*// y", "类型算式1 x +/- y +/- z", "类型算式2 x +/- y * z", "类型算式3 x +/- y / z"]
# self.mathlistbox1 = wx.ListBox(self.panel, -1, (-1, -1), (200, 60), list1, wx.LB_MULTIPLE)
self.mathlistbox1 = wx.CheckListBox(self.panel, -1, (-1, -1), (300, 150), list1)
# self.mathlistbox1.Bind(wx.EVT_CHECKLISTBOX, self.printselect1)
self.mathlistbox1.Bind(wx.EVT_CHECKLISTBOX, self.printselect1)
def generate_data(self, event):
print("generate data")
print(self.total_num)
for i in range(int(self.total_num / 4)):
for j in range(4):
k = random.randint(0, len(self.math_list) - 1)
self.math_list[k](1, 10)
print('\n')
def add_data(self, event):
self.total_num += 40
print(self.total_num)
def printselect1(self, data):
self.selected_list.append(data.GetInt())
print(self.selected_list)
self.math_list.append(math_list[data.GetInt()])
print(self.math_list)
def loop_new():
MyApp().MainLoop()
if __name__ == "__main__":
loop_new()
| [
"wx.App.__init__",
"wx.CheckListBox",
"wx.Panel",
"wx.Button",
"wx.Font",
"wx.Frame"
] | [((386, 407), 'wx.App.__init__', 'wx.App.__init__', (['self'], {}), '(self)\n', (401, 407), False, 'import wx\n'), ((540, 611), 'wx.Frame', 'wx.Frame', ([], {'parent': 'None', 'title': '"""口算题卡生成器"""', 'pos': '(100, 100)', 'size': '(300, 200)'}), "(parent=None, title='口算题卡生成器', pos=(100, 100), size=(300, 200))\n", (548, 611), False, 'import wx\n'), ((755, 779), 'wx.Panel', 'wx.Panel', (['self.Frame', '(-1)'], {}), '(self.Frame, -1)\n', (763, 779), False, 'import wx\n'), ((1029, 1094), 'wx.Button', 'wx.Button', (['self.panel', '(-1)', '"""生成题目"""'], {'pos': '(150, 100)', 'size': '(150, 50)'}), "(self.panel, -1, '生成题目', pos=(150, 100), size=(150, 50))\n", (1038, 1094), False, 'import wx\n'), ((1110, 1153), 'wx.Font', 'wx.Font', (['(18)', 'wx.ROMAN', 'wx.NORMAL', 'wx.NORMAL'], {}), '(18, wx.ROMAN, wx.NORMAL, wx.NORMAL)\n', (1117, 1153), False, 'import wx\n'), ((1492, 1555), 'wx.Button', 'wx.Button', (['self.panel', '(-1)', '"""增加题目"""'], {'pos': '(0, 100)', 'size': '(150, 50)'}), "(self.panel, -1, '增加题目', pos=(0, 100), size=(150, 50))\n", (1501, 1555), False, 'import wx\n'), ((1571, 1614), 'wx.Font', 'wx.Font', (['(18)', 'wx.ROMAN', 'wx.NORMAL', 'wx.NORMAL'], {}), '(18, wx.ROMAN, wx.NORMAL, wx.NORMAL)\n', (1578, 1614), False, 'import wx\n'), ((2105, 2165), 'wx.CheckListBox', 'wx.CheckListBox', (['self.panel', '(-1)', '(-1, -1)', '(300, 150)', 'list1'], {}), '(self.panel, -1, (-1, -1), (300, 150), list1)\n', (2120, 2165), False, 'import wx\n')] |
# encoding: utf-8
__author__ = "<NAME>"
# Taken and adapted from: https://github.com/NVIDIA/NeMo/blob/main/nemo/collections/common/callbacks/callbacks.py
import time
from pytorch_lightning.callbacks.base import Callback
from pytorch_lightning.utilities import rank_zero_only
class LogEpochTimeCallback(Callback):
"""Simple callback that logs how long each epoch takes, in seconds, to a pytorch lightning log"""
def __init__(self):
"""Initialize the callback."""
super().__init__()
self.epoch_start = time.time()
@rank_zero_only
def on_train_epoch_start(self, trainer, pl_module):
"""Called at the start of each epoch."""
self.epoch_start = time.time()
@rank_zero_only
def on_train_epoch_end(self, trainer, pl_module):
"""Called at the end of each epoch."""
curr_time = time.time()
duration = curr_time - self.epoch_start
trainer.logger.log_metrics({"epoch_time": duration}, step=trainer.global_step)
| [
"time.time"
] | [((538, 549), 'time.time', 'time.time', ([], {}), '()\n', (547, 549), False, 'import time\n'), ((703, 714), 'time.time', 'time.time', ([], {}), '()\n', (712, 714), False, 'import time\n'), ((857, 868), 'time.time', 'time.time', ([], {}), '()\n', (866, 868), False, 'import time\n')] |
from Board.Board import Board
from Location.Location import Location
from Board.Disc import Disc
# BoardクラスのbitBoardとしての実装
class BitBoard(Board):
def __init__(self) -> None:
# 打っているプレイヤーの色
self.player = Disc.black
self.turn = 1
self.playerBoard = 0x0000000810000000
self.opponentBoard = 0x0000001008000000
# Locationで指定された場所のみビットが立っているボードに変換
def locationToBits(self, location:Location) -> int:
bits = 1
shift = 63 - (8*(location.row - 1) + (location.column - 1))
return bits << shift
# 指定された場所に置く
def put(self, location:Location) -> None:
if self.canPut(location):
put = self.locationToBits(location)
self.reverse(put)
# パスをする
def passPut(self) -> None:
pass
# 指定された場所に置けるかどうか
def canPut(self, location:Location) -> bool:
if location.checkRange():
putBoard = self.locationToBits(location)
legalBoard = self.makeLegalBoard()
# 指定された場所が合法手に含まれているか
return (putBoard & legalBoard) == putBoard
else:
return False
# 合法手のビットのみが立っているボードを生成
def makeLegalBoard(self) -> int:
legalBoard = 0
# 空きマスのみにビットが立っているボード
blankBoard = ~(self.playerBoard | self.opponentBoard)
# 左右の端を除く相手ボード
horizontalMaskedOpponentBoard = self.opponentBoard & 0x7e7e7e7e7e7e7e7e
# 上下の端を除く相手ボード
verticalMaskedOpponentBoard = self.opponentBoard & 0x00ffffffffffff00
# 上下左右の端を除く相手ボード
allSideMaskedOpponentBoard = self.opponentBoard & 0x007e7e7e7e7e7e00
# 相手の石がある場所を保存する
opponentDiscs = 0
# 8方向をチェック
# 1度に返せる石は6つまで
# 左
opponentDiscs = horizontalMaskedOpponentBoard & (self.playerBoard << 1)
opponentDiscs |= horizontalMaskedOpponentBoard & (opponentDiscs << 1)
opponentDiscs |= horizontalMaskedOpponentBoard & (opponentDiscs << 1)
opponentDiscs |= horizontalMaskedOpponentBoard & (opponentDiscs << 1)
opponentDiscs |= horizontalMaskedOpponentBoard & (opponentDiscs << 1)
opponentDiscs |= horizontalMaskedOpponentBoard & (opponentDiscs << 1)
legalBoard |= blankBoard & (opponentDiscs << 1)
# 右
opponentDiscs = horizontalMaskedOpponentBoard & (self.playerBoard >> 1)
opponentDiscs |= horizontalMaskedOpponentBoard & (opponentDiscs >> 1)
opponentDiscs |= horizontalMaskedOpponentBoard & (opponentDiscs >> 1)
opponentDiscs |= horizontalMaskedOpponentBoard & (opponentDiscs >> 1)
opponentDiscs |= horizontalMaskedOpponentBoard & (opponentDiscs >> 1)
opponentDiscs |= horizontalMaskedOpponentBoard & (opponentDiscs >> 1)
legalBoard |= blankBoard & (opponentDiscs >> 1)
# 上
opponentDiscs = verticalMaskedOpponentBoard & (self.playerBoard << 8)
opponentDiscs |= verticalMaskedOpponentBoard & (opponentDiscs << 8)
opponentDiscs |= verticalMaskedOpponentBoard & (opponentDiscs << 8)
opponentDiscs |= verticalMaskedOpponentBoard & (opponentDiscs << 8)
opponentDiscs |= verticalMaskedOpponentBoard & (opponentDiscs << 8)
opponentDiscs |= verticalMaskedOpponentBoard & (opponentDiscs << 8)
legalBoard |= blankBoard & (opponentDiscs << 8)
# 下
opponentDiscs = verticalMaskedOpponentBoard & (self.playerBoard >> 8)
opponentDiscs |= verticalMaskedOpponentBoard & (opponentDiscs >> 8)
opponentDiscs |= verticalMaskedOpponentBoard & (opponentDiscs >> 8)
opponentDiscs |= verticalMaskedOpponentBoard & (opponentDiscs >> 8)
opponentDiscs |= verticalMaskedOpponentBoard & (opponentDiscs >> 8)
opponentDiscs |= verticalMaskedOpponentBoard & (opponentDiscs >> 8)
legalBoard |= blankBoard & (opponentDiscs >> 8)
# 左上
opponentDiscs = allSideMaskedOpponentBoard & (self.playerBoard << 9)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs << 9)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs << 9)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs << 9)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs << 9)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs << 9)
legalBoard |= blankBoard & (opponentDiscs << 9)
# 右上
opponentDiscs = allSideMaskedOpponentBoard & (self.playerBoard << 7)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs << 7)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs << 7)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs << 7)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs << 7)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs << 7)
legalBoard |= blankBoard & (opponentDiscs << 7)
# 右下
opponentDiscs = allSideMaskedOpponentBoard & (self.playerBoard >> 9)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs >> 9)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs >> 9)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs >> 9)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs >> 9)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs >> 9)
legalBoard |= blankBoard & (opponentDiscs >> 9)
# 左下
opponentDiscs = allSideMaskedOpponentBoard & (self.playerBoard >> 7)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs >> 7)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs >> 7)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs >> 7)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs >> 7)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs >> 7)
legalBoard |= blankBoard & (opponentDiscs >> 7)
return legalBoard
# 置ける場所をLocationのlistとして返す
def getPlaceableLocation(self) -> list:
placeableLocation = []
legalBoard = self.makeLegalBoard()
mask = 1
for i in range(1, 9):
for j in range(1, 9):
if (legalBoard & (mask << (63 - (8*(i - 1) + (j - 1))))) != 0:
placeableLocation.append(Location(i, j))
return placeableLocation
# 反転処理
def reverse(self, put:int) -> None:
rev = self.getReverseBoard(put)
self.playerBoard ^= (put | rev)
self.opponentBoard ^= rev
# 反転箇所のビットが立っているボードを返却
def getReverseBoard(self, put:int) -> int:
# 反転箇所のビットが立っているボード
rev = 0
# 左右の端を除く相手ボード
horizontalMaskedOpponentBoard = self.opponentBoard & 0x7e7e7e7e7e7e7e7e
# 上下の端を除く相手ボード
verticalMaskedOpponentBoard = self.opponentBoard & 0x00ffffffffffff00
# 上下左右の端を除く相手ボード
allSideMaskedOpponentBoard = self.opponentBoard & 0x007e7e7e7e7e7e00
# 8方向をチェック
# 1度に返せる石は6つまで
# 左
opponentDiscs = horizontalMaskedOpponentBoard & (put << 1)
opponentDiscs |= horizontalMaskedOpponentBoard & (opponentDiscs << 1)
opponentDiscs |= horizontalMaskedOpponentBoard & (opponentDiscs << 1)
opponentDiscs |= horizontalMaskedOpponentBoard & (opponentDiscs << 1)
opponentDiscs |= horizontalMaskedOpponentBoard & (opponentDiscs << 1)
opponentDiscs |= horizontalMaskedOpponentBoard & (opponentDiscs << 1)
if (self.playerBoard & (opponentDiscs << 1)) != 0:
rev |= opponentDiscs
# 右
opponentDiscs = horizontalMaskedOpponentBoard & (put >> 1)
opponentDiscs |= horizontalMaskedOpponentBoard & (opponentDiscs >> 1)
opponentDiscs |= horizontalMaskedOpponentBoard & (opponentDiscs >> 1)
opponentDiscs |= horizontalMaskedOpponentBoard & (opponentDiscs >> 1)
opponentDiscs |= horizontalMaskedOpponentBoard & (opponentDiscs >> 1)
opponentDiscs |= horizontalMaskedOpponentBoard & (opponentDiscs >> 1)
if (self.playerBoard & (opponentDiscs >> 1)) != 0:
rev |= opponentDiscs
# 上
opponentDiscs = verticalMaskedOpponentBoard & (put << 8)
opponentDiscs |= verticalMaskedOpponentBoard & (opponentDiscs << 8)
opponentDiscs |= verticalMaskedOpponentBoard & (opponentDiscs << 8)
opponentDiscs |= verticalMaskedOpponentBoard & (opponentDiscs << 8)
opponentDiscs |= verticalMaskedOpponentBoard & (opponentDiscs << 8)
opponentDiscs |= verticalMaskedOpponentBoard & (opponentDiscs << 8)
if (self.playerBoard & (opponentDiscs << 8)) != 0:
rev |= opponentDiscs
# 下
opponentDiscs = verticalMaskedOpponentBoard & (put >> 8)
opponentDiscs |= verticalMaskedOpponentBoard & (opponentDiscs >> 8)
opponentDiscs |= verticalMaskedOpponentBoard & (opponentDiscs >> 8)
opponentDiscs |= verticalMaskedOpponentBoard & (opponentDiscs >> 8)
opponentDiscs |= verticalMaskedOpponentBoard & (opponentDiscs >> 8)
opponentDiscs |= verticalMaskedOpponentBoard & (opponentDiscs >> 8)
if (self.playerBoard & (opponentDiscs >> 8)) != 0:
rev |= opponentDiscs
# 左上
opponentDiscs = allSideMaskedOpponentBoard & (put << 9)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs << 9)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs << 9)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs << 9)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs << 9)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs << 9)
if (self.playerBoard & (opponentDiscs << 9)) != 0:
rev |= opponentDiscs
# 右上
opponentDiscs = allSideMaskedOpponentBoard & (put << 7)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs << 7)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs << 7)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs << 7)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs << 7)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs << 7)
if (self.playerBoard & (opponentDiscs << 7)) != 0:
rev |= opponentDiscs
# 右下
opponentDiscs = allSideMaskedOpponentBoard & (put >> 9)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs >> 9)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs >> 9)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs >> 9)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs >> 9)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs >> 9)
if (self.playerBoard & (opponentDiscs >> 9)) != 0:
rev |= opponentDiscs
# 左下
opponentDiscs = allSideMaskedOpponentBoard & (put >> 7)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs >> 7)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs >> 7)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs >> 7)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs >> 7)
opponentDiscs |= allSideMaskedOpponentBoard & (opponentDiscs >> 7)
if (self.playerBoard & (opponentDiscs >> 7)) != 0:
rev |= opponentDiscs
return rev
# ゲームの終了判定
def gameIsFinished(self) -> bool:
# 自分、相手が共に合法手が無いなら終了
playerLegalBoard = self.makeLegalBoard()
self.swapBoard()
opponentLegalBoard = self.makeLegalBoard()
self.swapBoard()
return (playerLegalBoard == 0) and (opponentLegalBoard == 0)
# ボードのパラメータを更新
def updateBoardStatus(self):
self.swapBoard()
self.changePlayerColor()
self.turn += 1
# 自分と相手のボードを入れ替える
def swapBoard(self) -> None:
temp = self.playerBoard
self.playerBoard = self.opponentBoard
self.opponentBoard = temp
# 打ち手の色を入れ替える
def changePlayerColor(self) -> None:
if self.player == Disc.black:
self.player = Disc.white
else:
self.player = Disc.black
# 石の数がより多い色を返す
def getWinner(self) -> Disc:
blackDiscNum, whiteDiscNum = self.getDiscNum()
if blackDiscNum > whiteDiscNum:
return Disc.black
elif blackDiscNum < whiteDiscNum:
return Disc.white
else:
return Disc.empty
# 黒石と白石の数を返す
def getDiscNum(self) -> tuple:
if self.player == Disc.black:
blackDiscNum = self.numOfDisc(self.playerBoard)
whiteDiscNum = self.numOfDisc(self.opponentBoard)
else:
whiteDiscNum = self.numOfDisc(self.playerBoard)
blackDiscNum = self.numOfDisc(self.opponentBoard)
return blackDiscNum, whiteDiscNum
# ボードの立っているビット数を数える
def numOfDisc(self, board:int) -> int:
# forで回してもいいが、ビット演算で計算すると
# O(N)からO(logN)になる
mask1bit = 0x5555555555555555
mask2bit = 0x3333333333333333
mask4bit = 0x0f0f0f0f0f0f0f0f
mask8bit = 0x00ff00ff00ff00ff
mask16bit = 0x0000ffff0000ffff
mask32bit = 0x00000000ffffffff
board = (board & mask1bit) + ((board >> 1) & mask1bit)
board = (board & mask2bit) + ((board >> 2) & mask2bit)
board = (board & mask4bit) + ((board >> 4) & mask4bit)
board = (board & mask8bit) + ((board >> 8) & mask8bit)
board = (board & mask16bit) + ((board >> 16) & mask16bit)
return (board & mask32bit) + ((board >> 32) & mask32bit)
# 指定された場所の石の色を返す
def getLocationDisc(self, location:Location) -> Disc:
mask = self.locationToBits(location)
if self.player == Disc.black:
if (self.playerBoard & mask) != 0:
return Disc.black
elif (self.opponentBoard & mask) != 0:
return Disc.white
else:
return Disc.empty
elif self.player == Disc.white:
if (self.playerBoard & mask) != 0:
return Disc.white
elif (self.opponentBoard & mask) != 0:
return Disc.black
else:
return Disc.empty
| [
"Location.Location.Location"
] | [((6324, 6338), 'Location.Location.Location', 'Location', (['i', 'j'], {}), '(i, j)\n', (6332, 6338), False, 'from Location.Location import Location\n')] |
from django.urls import path
from rest_framework.routers import DefaultRouter
from investor_management import views
router = DefaultRouter()
app_name = 'investor_management'
urlpatterns = [
path('user/create/', views.CreateInvestorUserView.as_view(), name='investor_user_create'),
path('user/manage/<int:id>', views.ManageInvestorUserView.as_view(), name='investor_user_manage')
]
| [
"investor_management.views.CreateInvestorUserView.as_view",
"investor_management.views.ManageInvestorUserView.as_view",
"rest_framework.routers.DefaultRouter"
] | [((128, 143), 'rest_framework.routers.DefaultRouter', 'DefaultRouter', ([], {}), '()\n', (141, 143), False, 'from rest_framework.routers import DefaultRouter\n'), ((220, 258), 'investor_management.views.CreateInvestorUserView.as_view', 'views.CreateInvestorUserView.as_view', ([], {}), '()\n', (256, 258), False, 'from investor_management import views\n'), ((323, 361), 'investor_management.views.ManageInvestorUserView.as_view', 'views.ManageInvestorUserView.as_view', ([], {}), '()\n', (359, 361), False, 'from investor_management import views\n')] |
import os
import sqlite3
from flask import Flask, render_template
from contextlib import closing
app = Flask(__name__)
app.config.update(dict(
DATABASE=os.path.join(app.root_path, 'flaskApp.db'),
))
def connect_db():
return sqlite3.connect(app.config['DATABASE'])
def init_db():
with closing(connect_db()) as db:
with app.open_resource('schema.sql', mode='r') as f:
db.cursor().executescript(f.read())
db.commit()
@app.route('/')
def index():
return render_template('index.html')
if __name__ == '__main__':
init_db()
app.run() | [
"flask.Flask",
"sqlite3.connect",
"os.path.join",
"flask.render_template"
] | [((104, 119), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (109, 119), False, 'from flask import Flask, render_template\n'), ((234, 273), 'sqlite3.connect', 'sqlite3.connect', (["app.config['DATABASE']"], {}), "(app.config['DATABASE'])\n", (249, 273), False, 'import sqlite3\n'), ((498, 527), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (513, 527), False, 'from flask import Flask, render_template\n'), ((157, 199), 'os.path.join', 'os.path.join', (['app.root_path', '"""flaskApp.db"""'], {}), "(app.root_path, 'flaskApp.db')\n", (169, 199), False, 'import os\n')] |
#!/usr/bin/env python
# Four spaces as indentation [no tabs]
#
# propositional_planner.py
# ma-goal-recognition
#
# Created by <NAME> on 2020-03-12.
# Copyright 2020 <NAME>. All rights reserved.
#
from recognizer.pddl.pddl_parser import PDDL_Parser
from recognizer.pddl.pddl_planner import PDDL_Planner
# from recognizer.pddl.domain import State
from recognizer.pddl.state import applicable, apply
import time
class Propositional_Planner(PDDL_Planner):
def __init__(self, max_length=0, time_limit = 0, verbose=False):
super().__init__(verbose)
self.max_length = max_length
self.time_limit = time_limit
def tree_length(self,plan):
length = 0
while plan:
length += 1
act, plan = plan
return length
#-----------------------------------------------
# Solve
#-----------------------------------------------
def solve(self, domain,initial_state,goal_state):
if self.time_limit: start = time.time()
# Parsed data
actions = domain
state = frozenset(initial_state)
goal_pos = frozenset(goal_state[0])
goal_not = frozenset(goal_state[1])
# Do nothing
if applicable(state, goal_pos, goal_not):
return []
# Search
visited = set([state])
fringe = [(state, None)]
while fringe:
# state = fringe.pop(0)
# plan = fringe.pop(0)
state, plan = fringe.pop(0)
if self.max_length and plan is not None and self.tree_length(plan) > self.max_length: return None
if self.time_limit and time.time() - start > self.time_limit: return None
for act in actions:
if applicable(state, act.positive_preconditions, act.negative_preconditions):
new_state = apply(state, act.add_effects, act.del_effects)
if new_state not in visited:
if applicable(new_state, goal_pos, goal_not):
full_plan = [act]
while plan:
act, plan = plan
full_plan.insert(0, act)
return full_plan
# visited.append(new_state)
visited.add(new_state)
fringe.append((new_state, (act, plan)))
return None
def main(domain, problem):
planner = Propositional_Planner()
plan = planner.solve_file(domain, problem)
if plan:
print('plan:')
for act in plan:
print(act)
else:
print('No plan was found')
# ==========================================
# Main
# ==========================================
if __name__ == '__main__':
import sys
domain = sys.argv[1]
problem = sys.argv[2]
main(domain,problem) | [
"recognizer.pddl.state.applicable",
"recognizer.pddl.state.apply",
"time.time"
] | [((1218, 1255), 'recognizer.pddl.state.applicable', 'applicable', (['state', 'goal_pos', 'goal_not'], {}), '(state, goal_pos, goal_not)\n', (1228, 1255), False, 'from recognizer.pddl.state import applicable, apply\n'), ((998, 1009), 'time.time', 'time.time', ([], {}), '()\n', (1007, 1009), False, 'import time\n'), ((1740, 1813), 'recognizer.pddl.state.applicable', 'applicable', (['state', 'act.positive_preconditions', 'act.negative_preconditions'], {}), '(state, act.positive_preconditions, act.negative_preconditions)\n', (1750, 1813), False, 'from recognizer.pddl.state import applicable, apply\n'), ((1847, 1893), 'recognizer.pddl.state.apply', 'apply', (['state', 'act.add_effects', 'act.del_effects'], {}), '(state, act.add_effects, act.del_effects)\n', (1852, 1893), False, 'from recognizer.pddl.state import applicable, apply\n'), ((1638, 1649), 'time.time', 'time.time', ([], {}), '()\n', (1647, 1649), False, 'import time\n'), ((1970, 2011), 'recognizer.pddl.state.applicable', 'applicable', (['new_state', 'goal_pos', 'goal_not'], {}), '(new_state, goal_pos, goal_not)\n', (1980, 2011), False, 'from recognizer.pddl.state import applicable, apply\n')] |
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
from typing import Callable, Optional
def is_param_in_hook_signature(
hook_fx: Callable, param: str, explicit: bool = False, min_args: Optional[int] = None
) -> bool:
"""
Args:
hook_fx: the hook callable
param: the name of the parameter to check
explicit: whether the parameter has to be explicitly declared
min_args: whether the `signature` as at least `min_args` parameters
"""
parameters = inspect.getfullargspec(hook_fx)
args = parameters.args[1:] # ignore `self`
return (
param in args
or (not explicit and (parameters.varargs is not None))
or (isinstance(min_args, int) and len(args) >= min_args)
)
| [
"inspect.getfullargspec"
] | [((1048, 1079), 'inspect.getfullargspec', 'inspect.getfullargspec', (['hook_fx'], {}), '(hook_fx)\n', (1070, 1079), False, 'import inspect\n')] |
from django.contrib import admin
from .models import ActiveCompound, CompoundSet, ComercialDrug, Prescription
@admin.register(ActiveCompound)
class ActiveCompoundAdmin(admin.ModelAdmin):
search_fields = ['_search_names']
@admin.register(CompoundSet)
class CompoundSetAdmin(admin.ModelAdmin):
search_fields = ['active_compound__search_names']
@admin.register(ComercialDrug)
class ComercialDrugAdmin(admin.ModelAdmin):
search_fields = ['_search_names', '_name']
autocomplete_fields = ['compound_sets']
@admin.register(Prescription)
class PrescriptionAdmin(admin.ModelAdmin):
autocomplete_fields = ['comercial_drug']
| [
"django.contrib.admin.register"
] | [((114, 144), 'django.contrib.admin.register', 'admin.register', (['ActiveCompound'], {}), '(ActiveCompound)\n', (128, 144), False, 'from django.contrib import admin\n'), ((228, 255), 'django.contrib.admin.register', 'admin.register', (['CompoundSet'], {}), '(CompoundSet)\n', (242, 255), False, 'from django.contrib import admin\n'), ((352, 381), 'django.contrib.admin.register', 'admin.register', (['ComercialDrug'], {}), '(ComercialDrug)\n', (366, 381), False, 'from django.contrib import admin\n'), ((516, 544), 'django.contrib.admin.register', 'admin.register', (['Prescription'], {}), '(Prescription)\n', (530, 544), False, 'from django.contrib import admin\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Dec 30 09:52:31 2021
@author: HaoLI
"""
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Dec 8 11:48:41 2021
@author: HaoLI
"""
import torch, torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from torch.utils.data.sampler import WeightedRandomSampler
import torch.utils.data as data_utils
import pandas as pd
import numpy as np
import os #for working directory
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
from sklearn.metrics import roc_curve, auc, roc_auc_score # 计算roc和auc
import time
import datetime
from imblearn.over_sampling import RandomOverSampler
from sklearn.preprocessing import MinMaxScaler, LabelEncoder
import random
use_gpu = torch.cuda.is_available()
print("GPU",use_gpu)
list_rec = [] #记录参数
randomseed = 22
random.seed(randomseed)
layer1=196
layer2=196
oversample_ratio=0.5
training_epochs = 80
minibatch_size = 5000
learning_rate=2e-4
penalty=2 #p=1 for L1; p=0 for L2, weight_decay only for L2 ; p=2 for default. 范数计算中的幂指数值,默认求2范数. 当p=0为L2正则化,p=1为L1正则化
weight_decay=0.0125 #weight_decay 就是 L2 正则项
dropout=0.0
#os.getcwd()
os.chdir('/Users/HaoLI/Stata/credit/data')
df = pd.read_csv('data1210rename_use.csv')
col_names = list(df.columns.values[3:30])
col_names.remove('default_geq_1') #X中不能包含目标函数y
col_names.remove('default_geq_2')
col_names.remove('default_geq_3')
base_col_names = col_names[0:13] # for baseline model 包含银行数据+早中晚数据
df_fillna = df.fillna(0) # fill NA with 0. 无消费以0计
X = df_fillna[col_names]
y = df_fillna.default_geq_1 # Target variable
X_base = df_fillna[base_col_names]
y_base = df_fillna.default_geq_1 # Target variable
layer0=len(X.columns) # input层的神经元个数
#min_max_scaler = MinMaxScaler()
#X = min_max_scaler.fit_transform(X)
sc = StandardScaler()# transform X into standard normal distribution for each column. X from dataframe to array
X = sc.fit_transform(X)
ros = RandomOverSampler(random_state=0)
for layer1 in [196]:
for layer2 in [196]:
for weight_decay in [0.0125]:
for training_epochs in [80]:
for minibatch_size in [5000]:
for random_state in [18]:
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=random_state) # data types are dataframe
X_train, y_train = ros.fit_resample(X_train, y_train)
y_train = y_train.values
y_test = np.array(y_test)
# construct NN
class CreditNet(nn.Module):
def __init__(self): #p=1 for L1; p=0 for L2, weight_decay only for L2 ; p=2 for default. 范数计算中的幂指数值,默认求2范数. 当p=0为L2正则化,p=1为L1正则化
super().__init__()
self.fc1 = nn.Linear(layer0, layer1) # fc: fully connected
#self.bn1 = nn.BatchNorm1d(num_features=64, momentum=0.1) #default momentum = 0.1
self.fc2 = nn.Linear(layer1, layer2)
#self.fc3 = nn.Linear(layer2, layer3)
#self.bn3 = nn.BatchNorm1d(num_features=32)
#self.fc4 = nn.Linear(28, 24)
self.fc5 = nn.Linear(layer2, 1)
# x represents our data
def forward(self, x): # x is the data
x = F.relu(self.fc1(x)) # first x pass through
#x = self.bn1(x)
x = F.dropout(x, p=dropout)
x = F.relu(self.fc2(x))
x = F.dropout(x, p=dropout)
#x = F.relu(self.fc3(x))
#x = self.bn3(x)
#x = F.dropout(x, p=0.25)
#x = F.relu(self.fc4(x))
#x = F.softmax(self.fc5(x),dim=0)
x = torch.sigmoid(self.fc5(x))
return x
net = CreditNet().double() # .double() makes the data type float, 在pytorch中,只有浮点类型的数才有梯度
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
#或device = torch.device("cuda:0")
device1 = torch.device("cuda:1")
if torch.cuda.is_available():
#net = net.cuda()
net = net.to(device1) #使用序号为0的GPU
#或model.to(device1) #使用序号为1的GPU
########### Train #################
#loss_fn = nn.CrossEntropyLoss()
#loss_fn = nn.BCELoss() # binary cross entropy loss
#optimizer = torch.optim.Adam(net.parameters(), lr=learning_rate) # auto adjust lr, better than sgd
#optimizer = torch.optim.SGD(net.parameters(), lr=learning_rate, momentum = 0.9) # auto adjust lr, better than sgd; sgd stable
#优化器采用Adam,并且设置参数weight_decay=0.0,即无正则化的方法
#优化器采用Adam,并且设置参数weight_decay=10.0,即正则化的权重lambda =10.0
optimizer = torch.optim.Adam(net.parameters(), lr=learning_rate, weight_decay=weight_decay) # auto adjust lr, better than sgd
# if we use L2 regularization, apply the following line
#optimizer = torch.optim.SGD(net.parameters(), lr=learning_rate, weight_decay=weight_decay)
X_train = torch.from_numpy(X_train) # transfer to Tensor, no need to add .double(), because it is already float data type
y_train = torch.from_numpy(y_train).double() # .double() makes the data type float, 在pytorch中,只有浮点类型的数才有梯度
#weights_tensor = torch.from_numpy(overwt_arr_y_lossfn)
if torch.cuda.is_available():
X_train = X_train.to(device1)
y_train = y_train.to(device1)
#weights_tensor = weights_tensor.to(device1)
train = data_utils.TensorDataset(X_train, y_train) # adjust format. 打包X Y 用于训练
train_loader = data_utils.DataLoader(train, batch_size=minibatch_size, shuffle=True) # 在PyTorch中训练模型经常要使用它. batch_size定义每次喂给神经网络多少行数据. shuffle在每次迭代训练时是否将数据洗牌,默认设置是False。将输入数据的顺序打乱,是为了使数据更有独立性,
# !tensorboard --logdir './runs' #远程的notebook中如果使用魔法函数, 可能会导致你无法打开tensorboard的http服务
from tensorboardX import SummaryWriter
writer = SummaryWriter()
#%reload_ext tensorboard
# Load the TensorBoard notebook extension
for epoch in range(training_epochs):
y_train_labels = [] # create an empty array
y_train_pred = []
for b, data in enumerate(train_loader, 0): # 取batch
inputs, labels = data#.cuda() # inputs and labels follows that when loaded
if torch.cuda.is_available():
inputs = inputs.to(device1)
labels = labels.to(device1)
#weights = weights.to(device1)
#print("inputs shape", inputs.shape, labels.shape)
#print("inputs", inputs)
#print("labels", labels)
optimizer.zero_grad() #reset gradients, i.e. zero the gradient buffers
y_pred = net(inputs) # obtain the predicted values, a Tensor
y_pred = y_pred.view(y_pred.size()[0])
#print("y_pred", y_pred)
y_train_labels = np.append(y_train_labels, labels.cpu().numpy())
y_train_pred = np.append(y_train_pred,y_pred.detach().cpu().numpy())
loss_fn = nn.BCELoss() # binary cross entropy loss, with weights
if torch.cuda.is_available():
loss_fn = loss_fn.to(device1)
loss = loss_fn(y_pred, labels) # 2 tensors in, 1 value out
loss.backward() # backward pass
optimizer.step() # update weights
if b % 100 == 0: # if b整除10, then output loss
#print('Epochs: {}, batch: {} loss: {}'.format(epoch, b, loss))
writer.add_scalar('NN_oversample',loss, epoch)
writer.close()
#%tensorboard --logdir #定位tensorboard读取的文件目录
X_test = torch.from_numpy(X_test) # check the tested results
y_test = torch.from_numpy(y_test).double()
if torch.cuda.is_available():
X_test = X_test.to(device1)
y_test = y_test.to(device1)
test = data_utils.TensorDataset(X_test, y_test)
test_loader = data_utils.DataLoader(test, batch_size=minibatch_size, shuffle=True)
y_test_labels = []
y_test_pred = []
with torch.no_grad(): #上下文管理器,被该语句 wrap 起来的部分将不会track 梯度
for data in test_loader:
inputs, labels = data
#inputs = inputs.to(device1)
#labels = labels.to(device1)
#print("inputs", inputs)
#print("labels", labels)
outputs = net(inputs)
outputs = outputs.view(outputs.size()[0])
#print("outputs", outputs)
#print("predicted", predicted.numpy())
y_test_labels = np.append(y_test_labels,labels.cpu().numpy())
y_test_pred = np.append(y_test_pred,outputs.cpu().numpy())
#print("Y_test_labels", Y_test_labels)
#print("Y_test_pred", Y_test_pred)
#### plot ROC, compute AUC ###
# y_true is ground truth labels, y_score is predicted probabilities generated by sklearn classifier
test_fpr, test_tpr, te_thresholds = roc_curve(y_true = y_test_labels, y_score = y_test_pred)
#print("AUC TEST = ", auc(test_fpr, test_tpr))
train_fpr, train_tpr, tr_thresholds = roc_curve(y_true = y_train_labels, y_score = y_train_pred) # /w_ytrain, such that return the array to 0,1 array
#print("AUC TRAIN = ", auc(train_fpr, train_tpr))
#print('resample: {}, Epochs: {}, batch size: {}, '.format(oversample_ratio, training_epochs, minibatch_size))
#print(net)
plt.grid()
plt.plot(train_fpr, train_tpr, label=" AUC TRAIN ="+str(auc(train_fpr, train_tpr)))
plt.plot(test_fpr, test_tpr, label=" AUC TEST ="+str(auc(test_fpr, test_tpr)))
plt.plot([0,1],[0,1],'g--')
plt.legend()
plt.xlabel("True Positive Rate")
plt.ylabel("False Positive Rate")
t='''
training_epochs=%s, minibatch_size=%s,
learning_rate=%s, penalty=L%s, weight_decay=%s,
dropout=%s, 24=>%s=>%s=>1, myoversampling, random_state=%s,
randomseed=%s
'''%(training_epochs,minibatch_size,learning_rate,
penalty, weight_decay, dropout, layer1, layer2, random_state,randomseed)
plt.title("AUC(Neural Network ROC curve)"+t)
plt.grid(color='black', linestyle='-', linewidth=0.5)
time1 = datetime.datetime.now()
#对现在时间格式化,以此作为文件名
time2 = time1.strftime('%Y-%m-%d-%H%M%S')
plt.savefig("/Users/HaoLI/Stata/credit/out/ROC figure/Figure_"+time2+".png", bbox_inches = 'tight')
plt.show()
list_rec.append([auc(train_fpr, train_tpr), auc(test_fpr, test_tpr),
training_epochs,minibatch_size,learning_rate,
penalty, weight_decay, dropout, layer1, layer2,
random_state, randomseed
])
list_rec_1 = list_rec
df = pd.DataFrame(list_rec, columns = ['IS_AUC','OOS_AUC','training_epochs',
'minibatch_size','learning_rate',
'penalty', 'weight_decay', 'dropout',
'layer1', 'layer2', 'random_state','randomseed'])
df.to_csv('NN_adj.csv') | [
"matplotlib.pyplot.title",
"sklearn.preprocessing.StandardScaler",
"pandas.read_csv",
"sklearn.model_selection.train_test_split",
"torch.nn.functional.dropout",
"torch.utils.data.TensorDataset",
"torch.device",
"torch.no_grad",
"os.chdir",
"pandas.DataFrame",
"torch.nn.BCELoss",
"torch.utils.d... | [((851, 876), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (874, 876), False, 'import torch, torch.nn as nn\n'), ((934, 957), 'random.seed', 'random.seed', (['randomseed'], {}), '(randomseed)\n', (945, 957), False, 'import random\n'), ((1253, 1295), 'os.chdir', 'os.chdir', (['"""/Users/HaoLI/Stata/credit/data"""'], {}), "('/Users/HaoLI/Stata/credit/data')\n", (1261, 1295), False, 'import os\n'), ((1301, 1338), 'pandas.read_csv', 'pd.read_csv', (['"""data1210rename_use.csv"""'], {}), "('data1210rename_use.csv')\n", (1312, 1338), True, 'import pandas as pd\n'), ((1885, 1901), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (1899, 1901), False, 'from sklearn.preprocessing import StandardScaler\n'), ((2024, 2057), 'imblearn.over_sampling.RandomOverSampler', 'RandomOverSampler', ([], {'random_state': '(0)'}), '(random_state=0)\n', (2041, 2057), False, 'from imblearn.over_sampling import RandomOverSampler\n'), ((13406, 13609), 'pandas.DataFrame', 'pd.DataFrame', (['list_rec'], {'columns': "['IS_AUC', 'OOS_AUC', 'training_epochs', 'minibatch_size', 'learning_rate',\n 'penalty', 'weight_decay', 'dropout', 'layer1', 'layer2',\n 'random_state', 'randomseed']"}), "(list_rec, columns=['IS_AUC', 'OOS_AUC', 'training_epochs',\n 'minibatch_size', 'learning_rate', 'penalty', 'weight_decay', 'dropout',\n 'layer1', 'layer2', 'random_state', 'randomseed'])\n", (13418, 13609), True, 'import pandas as pd\n'), ((2334, 2398), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'test_size': '(0.3)', 'random_state': 'random_state'}), '(X, y, test_size=0.3, random_state=random_state)\n', (2350, 2398), False, 'from sklearn.model_selection import train_test_split\n'), ((2586, 2602), 'numpy.array', 'np.array', (['y_test'], {}), '(y_test)\n', (2594, 2602), True, 'import numpy as np\n'), ((4590, 4612), 'torch.device', 'torch.device', (['"""cuda:1"""'], {}), "('cuda:1')\n", (4602, 4612), False, 'import torch, torch.nn as nn\n'), ((4641, 4666), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (4664, 4666), False, 'import torch, torch.nn as nn\n'), ((5838, 5863), 'torch.from_numpy', 'torch.from_numpy', (['X_train'], {}), '(X_train)\n', (5854, 5863), False, 'import torch, torch.nn as nn\n'), ((6189, 6214), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (6212, 6214), False, 'import torch, torch.nn as nn\n'), ((6438, 6480), 'torch.utils.data.TensorDataset', 'data_utils.TensorDataset', (['X_train', 'y_train'], {}), '(X_train, y_train)\n', (6462, 6480), True, 'import torch.utils.data as data_utils\n'), ((6549, 6618), 'torch.utils.data.DataLoader', 'data_utils.DataLoader', (['train'], {'batch_size': 'minibatch_size', 'shuffle': '(True)'}), '(train, batch_size=minibatch_size, shuffle=True)\n', (6570, 6618), True, 'import torch.utils.data as data_utils\n'), ((6934, 6949), 'tensorboardX.SummaryWriter', 'SummaryWriter', ([], {}), '()\n', (6947, 6949), False, 'from tensorboardX import SummaryWriter\n'), ((9267, 9291), 'torch.from_numpy', 'torch.from_numpy', (['X_test'], {}), '(X_test)\n', (9283, 9291), False, 'import torch, torch.nn as nn\n'), ((9414, 9439), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (9437, 9439), False, 'import torch, torch.nn as nn\n'), ((9585, 9625), 'torch.utils.data.TensorDataset', 'data_utils.TensorDataset', (['X_test', 'y_test'], {}), '(X_test, y_test)\n', (9609, 9625), True, 'import torch.utils.data as data_utils\n'), ((9664, 9732), 'torch.utils.data.DataLoader', 'data_utils.DataLoader', (['test'], {'batch_size': 'minibatch_size', 'shuffle': '(True)'}), '(test, batch_size=minibatch_size, shuffle=True)\n', (9685, 9732), True, 'import torch.utils.data as data_utils\n'), ((11063, 11115), 'sklearn.metrics.roc_curve', 'roc_curve', ([], {'y_true': 'y_test_labels', 'y_score': 'y_test_pred'}), '(y_true=y_test_labels, y_score=y_test_pred)\n', (11072, 11115), False, 'from sklearn.metrics import roc_curve, auc, roc_auc_score\n'), ((11253, 11307), 'sklearn.metrics.roc_curve', 'roc_curve', ([], {'y_true': 'y_train_labels', 'y_score': 'y_train_pred'}), '(y_true=y_train_labels, y_score=y_train_pred)\n', (11262, 11307), False, 'from sklearn.metrics import roc_curve, auc, roc_auc_score\n'), ((11636, 11646), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (11644, 11646), True, 'import matplotlib.pyplot as plt\n'), ((11882, 11913), 'matplotlib.pyplot.plot', 'plt.plot', (['[0, 1]', '[0, 1]', '"""g--"""'], {}), "([0, 1], [0, 1], 'g--')\n", (11890, 11913), True, 'import matplotlib.pyplot as plt\n'), ((11934, 11946), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (11944, 11946), True, 'import matplotlib.pyplot as plt\n'), ((11971, 12003), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""True Positive Rate"""'], {}), "('True Positive Rate')\n", (11981, 12003), True, 'import matplotlib.pyplot as plt\n'), ((12028, 12061), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""False Positive Rate"""'], {}), "('False Positive Rate')\n", (12038, 12061), True, 'import matplotlib.pyplot as plt\n'), ((12548, 12594), 'matplotlib.pyplot.title', 'plt.title', (["('AUC(Neural Network ROC curve)' + t)"], {}), "('AUC(Neural Network ROC curve)' + t)\n", (12557, 12594), True, 'import matplotlib.pyplot as plt\n'), ((12617, 12670), 'matplotlib.pyplot.grid', 'plt.grid', ([], {'color': '"""black"""', 'linestyle': '"""-"""', 'linewidth': '(0.5)'}), "(color='black', linestyle='-', linewidth=0.5)\n", (12625, 12670), True, 'import matplotlib.pyplot as plt\n'), ((12703, 12726), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (12724, 12726), False, 'import datetime\n'), ((12863, 12968), 'matplotlib.pyplot.savefig', 'plt.savefig', (["('/Users/HaoLI/Stata/credit/out/ROC figure/Figure_' + time2 + '.png')"], {'bbox_inches': '"""tight"""'}), "('/Users/HaoLI/Stata/credit/out/ROC figure/Figure_' + time2 +\n '.png', bbox_inches='tight')\n", (12874, 12968), True, 'import matplotlib.pyplot as plt\n'), ((13011, 13021), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (13019, 13021), True, 'import matplotlib.pyplot as plt\n'), ((9847, 9862), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (9860, 9862), False, 'import torch, torch.nn as nn\n'), ((2968, 2993), 'torch.nn.Linear', 'nn.Linear', (['layer0', 'layer1'], {}), '(layer0, layer1)\n', (2977, 2993), True, 'import torch, torch.nn as nn\n'), ((3173, 3198), 'torch.nn.Linear', 'nn.Linear', (['layer1', 'layer2'], {}), '(layer1, layer2)\n', (3182, 3198), True, 'import torch, torch.nn as nn\n'), ((3450, 3470), 'torch.nn.Linear', 'nn.Linear', (['layer2', '(1)'], {}), '(layer2, 1)\n', (3459, 3470), True, 'import torch, torch.nn as nn\n'), ((3754, 3777), 'torch.nn.functional.dropout', 'F.dropout', (['x'], {'p': 'dropout'}), '(x, p=dropout)\n', (3763, 3777), True, 'import torch.nn.functional as F\n'), ((3870, 3893), 'torch.nn.functional.dropout', 'F.dropout', (['x'], {'p': 'dropout'}), '(x, p=dropout)\n', (3879, 3893), True, 'import torch.nn.functional as F\n'), ((4460, 4485), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (4483, 4485), False, 'import torch, torch.nn as nn\n'), ((5984, 6009), 'torch.from_numpy', 'torch.from_numpy', (['y_train'], {}), '(y_train)\n', (6000, 6009), False, 'import torch, torch.nn as nn\n'), ((7473, 7498), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (7496, 7498), False, 'import torch, torch.nn as nn\n'), ((8457, 8469), 'torch.nn.BCELoss', 'nn.BCELoss', ([], {}), '()\n', (8467, 8469), True, 'import torch, torch.nn as nn\n'), ((8547, 8572), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (8570, 8572), False, 'import torch, torch.nn as nn\n'), ((9352, 9376), 'torch.from_numpy', 'torch.from_numpy', (['y_test'], {}), '(y_test)\n', (9368, 9376), False, 'import torch, torch.nn as nn\n'), ((13063, 13088), 'sklearn.metrics.auc', 'auc', (['train_fpr', 'train_tpr'], {}), '(train_fpr, train_tpr)\n', (13066, 13088), False, 'from sklearn.metrics import roc_curve, auc, roc_auc_score\n'), ((13090, 13113), 'sklearn.metrics.auc', 'auc', (['test_fpr', 'test_tpr'], {}), '(test_fpr, test_tpr)\n', (13093, 13113), False, 'from sklearn.metrics import roc_curve, auc, roc_auc_score\n'), ((11727, 11752), 'sklearn.metrics.auc', 'auc', (['train_fpr', 'train_tpr'], {}), '(train_fpr, train_tpr)\n', (11730, 11752), False, 'from sklearn.metrics import roc_curve, auc, roc_auc_score\n'), ((11832, 11855), 'sklearn.metrics.auc', 'auc', (['test_fpr', 'test_tpr'], {}), '(test_fpr, test_tpr)\n', (11835, 11855), False, 'from sklearn.metrics import roc_curve, auc, roc_auc_score\n')] |
# Generated by Django 3.1.13 on 2021-12-06 21:00
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('reo', '0141_auto_20211202_2315'),
]
operations = [
migrations.RemoveField(
model_name='sitemodel',
name='preprocessed_BAU_lifecycle_emissions_tCO2',
),
migrations.RemoveField(
model_name='sitemodel',
name='preprocessed_BAU_year_one_emissions_tCO2',
),
]
| [
"django.db.migrations.RemoveField"
] | [((224, 325), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""sitemodel"""', 'name': '"""preprocessed_BAU_lifecycle_emissions_tCO2"""'}), "(model_name='sitemodel', name=\n 'preprocessed_BAU_lifecycle_emissions_tCO2')\n", (246, 325), False, 'from django.db import migrations\n'), ((365, 465), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""sitemodel"""', 'name': '"""preprocessed_BAU_year_one_emissions_tCO2"""'}), "(model_name='sitemodel', name=\n 'preprocessed_BAU_year_one_emissions_tCO2')\n", (387, 465), False, 'from django.db import migrations\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import logging
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
sys.path.insert(0, os.path.abspath('.'))
import cv2
import numpy as np
import common
import imgpheno as ft
def main():
logging.basicConfig(level=logging.INFO, format='%(levelname)s %(message)s')
parser = argparse.ArgumentParser(description='Test image segmentation and splitting')
parser.add_argument('files', metavar='FILE', nargs='+', help='Input images')
parser.add_argument('-o', '--output', metavar='PATH', default=".", help='Path for output files.')
parser.add_argument('-i', '--iters', metavar='N', type=int, default=5, help="The number of grabCut iterations. Default is 5.")
parser.add_argument('-m', '--margin', metavar='N', type=int, default=1, help="The margin of the foreground rectangle from the edges. Default is 1.")
parser.add_argument('--max-size', metavar='N', type=float, help="Scale the input image down if its perimeter exceeds N. Default is no scaling.")
parser.add_argument('--min-size-out', metavar='N', type=int, default=200, help="Set the minimum perimeter for output images. Smaller images are ignored. Default is 200.")
args = parser.parse_args()
for f in args.files:
split_image(f, args)
sys.stderr.write("Output was saved to %s\n" % args.output)
return 0
def split_image(path, args):
img = cv2.imread(path)
if img == None or img.size == 0:
sys.stderr.write("Failed to read %s. Skipping.\n" % path)
return -1
logging.info("Processing %s ..." % path)
# Scale the image down if its perimeter exceeds the maximum (if set).
img = common.scale_max_perimeter(img, args.max_size)
logging.info("Segmenting...")
# Perform segmentation.
mask = common.grabcut(img, args.iters, None, args.margin)
# Create a binary mask. Foreground is made white, background black.
bin_mask = np.where((mask==cv2.GC_FGD) + (mask==cv2.GC_PR_FGD), 255, 0).astype('uint8')
# Split the image into segments.
segments = ft.split_by_mask(img, bin_mask)
logging.info("Exporting segments...")
for i, im in enumerate(segments):
if sum(im.shape[:2]) < args.min_size_out:
continue
name = os.path.basename(path)
name = os.path.splitext(name)
out_path = "%s_%d%s" % (name[0], i, name[1])
out_path = os.path.join(args.output, out_path)
logging.info("\t%s" % out_path)
cv2.imwrite(out_path, im)
return 0
if __name__ == "__main__":
main()
| [
"os.path.abspath",
"argparse.ArgumentParser",
"logging.basicConfig",
"os.path.basename",
"common.scale_max_perimeter",
"cv2.imwrite",
"common.grabcut",
"imgpheno.split_by_mask",
"cv2.imread",
"logging.info",
"numpy.where",
"os.path.splitext",
"sys.stderr.write",
"os.path.join"
] | [((119, 140), 'os.path.abspath', 'os.path.abspath', (['""".."""'], {}), "('..')\n", (134, 140), False, 'import os\n'), ((161, 181), 'os.path.abspath', 'os.path.abspath', (['"""."""'], {}), "('.')\n", (176, 181), False, 'import os\n'), ((268, 343), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO', 'format': '"""%(levelname)s %(message)s"""'}), "(level=logging.INFO, format='%(levelname)s %(message)s')\n", (287, 343), False, 'import logging\n'), ((358, 434), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Test image segmentation and splitting"""'}), "(description='Test image segmentation and splitting')\n", (381, 434), False, 'import argparse\n'), ((1317, 1375), 'sys.stderr.write', 'sys.stderr.write', (["('Output was saved to %s\\n' % args.output)"], {}), "('Output was saved to %s\\n' % args.output)\n", (1333, 1375), False, 'import sys\n'), ((1430, 1446), 'cv2.imread', 'cv2.imread', (['path'], {}), '(path)\n', (1440, 1446), False, 'import cv2\n'), ((1573, 1613), 'logging.info', 'logging.info', (["('Processing %s ...' % path)"], {}), "('Processing %s ...' % path)\n", (1585, 1613), False, 'import logging\n'), ((1699, 1745), 'common.scale_max_perimeter', 'common.scale_max_perimeter', (['img', 'args.max_size'], {}), '(img, args.max_size)\n', (1725, 1745), False, 'import common\n'), ((1751, 1780), 'logging.info', 'logging.info', (['"""Segmenting..."""'], {}), "('Segmenting...')\n", (1763, 1780), False, 'import logging\n'), ((1821, 1871), 'common.grabcut', 'common.grabcut', (['img', 'args.iters', 'None', 'args.margin'], {}), '(img, args.iters, None, args.margin)\n', (1835, 1871), False, 'import common\n'), ((2090, 2121), 'imgpheno.split_by_mask', 'ft.split_by_mask', (['img', 'bin_mask'], {}), '(img, bin_mask)\n', (2106, 2121), True, 'import imgpheno as ft\n'), ((2127, 2164), 'logging.info', 'logging.info', (['"""Exporting segments..."""'], {}), "('Exporting segments...')\n", (2139, 2164), False, 'import logging\n'), ((1492, 1549), 'sys.stderr.write', 'sys.stderr.write', (["('Failed to read %s. Skipping.\\n' % path)"], {}), "('Failed to read %s. Skipping.\\n' % path)\n", (1508, 1549), False, 'import sys\n'), ((2290, 2312), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (2306, 2312), False, 'import os\n'), ((2328, 2350), 'os.path.splitext', 'os.path.splitext', (['name'], {}), '(name)\n', (2344, 2350), False, 'import os\n'), ((2423, 2458), 'os.path.join', 'os.path.join', (['args.output', 'out_path'], {}), '(args.output, out_path)\n', (2435, 2458), False, 'import os\n'), ((2467, 2498), 'logging.info', 'logging.info', (["('\\t%s' % out_path)"], {}), "('\\t%s' % out_path)\n", (2479, 2498), False, 'import logging\n'), ((2507, 2532), 'cv2.imwrite', 'cv2.imwrite', (['out_path', 'im'], {}), '(out_path, im)\n', (2518, 2532), False, 'import cv2\n'), ((1960, 2024), 'numpy.where', 'np.where', (['((mask == cv2.GC_FGD) + (mask == cv2.GC_PR_FGD))', '(255)', '(0)'], {}), '((mask == cv2.GC_FGD) + (mask == cv2.GC_PR_FGD), 255, 0)\n', (1968, 2024), True, 'import numpy as np\n')] |
from setuptools import setup, find_packages
setup(
name='reply_card_corrector',
version='0.1.0',
packages=find_packages(),
author='<NAME>',
author_email='<EMAIL>',
license='MIT',
install_requires=["opencv-python", "numpy"],
)
| [
"setuptools.find_packages"
] | [((119, 134), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (132, 134), False, 'from setuptools import setup, find_packages\n')] |
import uuid
from static.common.database import Database
class Appointment(object):
def __init__(self, owner_id, date, time, confirmed=False, _id=None):
self.owner_id = owner_id
self.date = date
self.time = time
self.confirmed = confirmed
self._id = uuid.uuid4().hex if _id is None else _id
def json(self):
return {
'owner_id': self.owner_id,
'date': self.date,
'time': self.time,
'confirmed': self.confirmed,
'_id': self._id
}
def save_to_db(self):
Database.insert('appointments', self.json())
def format_date(self):
return '-'.join(reversed(self.date.split('-')))
def as_text(self):
return 'Du har en forespurgt aftale med OdenseFotografen d. {} kl. {}'.format(self.format_date(), self.time)
@classmethod
def find_by_id(cls, _id):
app_data = Database.find_one('appointments', {'_id': _id})
return cls(**app_data)
| [
"static.common.database.Database.find_one",
"uuid.uuid4"
] | [((926, 973), 'static.common.database.Database.find_one', 'Database.find_one', (['"""appointments"""', "{'_id': _id}"], {}), "('appointments', {'_id': _id})\n", (943, 973), False, 'from static.common.database import Database\n'), ((295, 307), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (305, 307), False, 'import uuid\n')] |
import argparse
import logging
import json
import copy
import arrow
import requests
import osmapi
import re
import polyline as pl
import osrm as osrm
import shapely.geometry as geo
sensing_configs = json.load(open("sensing_regimes.all.specs.json"))
def validate_and_fill_datetime(current_spec):
ret_spec = copy.copy(current_spec)
timezone = current_spec["region"]["timezone"]
ret_spec["start_ts"] = arrow.get(current_spec["start_fmt_date"], tzinfo=timezone).timestamp
ret_spec["end_ts"] = arrow.get(current_spec["end_fmt_date"], tzinfo=timezone).timestamp
return ret_spec
def node_to_geojson_coords(node_id):
osm = osmapi.OsmApi()
node_details = osm.NodeGet(node_id)
return [node_details["lon"], node_details["lat"]]
def get_route_coords(mode, waypoint_coords):
if mode == "CAR" \
or mode == "WALKING" \
or mode == "BICYCLING" \
or mode == "BUS":
# Use OSRM
overview_geometry_params = {"overview": "full",
"geometries": "polyline", "steps": "false"}
route_coords = osrm.get_route_points(mode, waypoint_coords, overview_geometry_params)
return route_coords
else:
raise NotImplementedError("OSRM does not support train modes at this time")
def _fill_coords_from_id(loc):
if loc is None:
return None
if "osm_id" in loc["properties"]:
if loc["geometry"]["type"] == "Point":
loc["geometry"]["coordinates"] = node_to_geojson_coords(loc["properties"]["osm_id"])
elif loc["geometry"]["type"] == "Polygon":
# get coords for way returns a tuple of (nodes, points)
loc["geometry"]["coordinates"] = [[coords_swap(c) for c in get_coords_for_way(loc["properties"]["osm_id"])[1]]]
else:
assert "coordinates" in loc["geometry"],\
"Location %s does not have either an osmid or specified set of coordinates"
return loc
def validate_and_fill_calibration_tests(curr_spec):
modified_spec = copy.copy(curr_spec)
calibration_tests = modified_spec["calibration_tests"]
for t in calibration_tests:
_fill_coords_from_id(t["start_loc"])
_fill_coords_from_id(t["end_loc"])
t["config"] = sensing_configs[t["config"]["id"]]
return modified_spec
def coords_swap(lon_lat):
return list(reversed(lon_lat))
def get_route_from_osrm(t, start_coords, end_coords):
if "route_waypoints" in t:
waypoints = t["route_waypoints"]
waypoint_coords = [node_to_geojson_coords(node_id) for node_id in waypoints]
t["waypoint_coords"] = {
"type": "Feature",
"properties": {},
"geometry": {
"type": "Polygon",
"coordinates": waypoint_coords
}
}
elif "waypoint_coords" in t:
waypoint_coords = t["waypoint_coords"]["geometry"]["coordinates"]
else:
waypoint_coords = []
logging.debug("waypoint_coords = %s..." % waypoint_coords[0:3])
route_coords = get_route_coords(t["mode"],
[start_coords] + waypoint_coords + [end_coords])
return route_coords
def get_route_from_polyline(t):
return pl.PolylineCodec().decode(t["polyline"])
# Porting the perl script at
# https://wiki.openstreetmap.org/wiki/Relations/Relations_to_GPX to python
def get_way_list(relation_details):
wl = []
for member in relation_details["member"]:
# print(member["ref"], member["type"])
assert member["type"] != "relation", "This is a parent relation for child %d, expecting only child relations" % member["ref"]
if member["type"] == "way" and member["role"] != "platform":
wl.append(member["ref"])
return wl
# way details is an array of n-1 node entries followed by a way entry
# the way entry has an "nd" field which is an array of node ids in the correct
# order the n-1 node entries are not necessarily in the correct order but
# provide the id -> lat,lng mapping
# Note also that the way can sometimes have the nodes in the reversed order
# e.g. way 367132251 in relation 9605483 is reversed compared to ways
# 368345083 and 27422567 before it
# this function automatically detects that and reverses the node array
def get_coords_for_way(wid, prev_last_node=-1):
osm = osmapi.OsmApi()
lat = {}
lon = {}
coords_list = []
way_details = osm.WayFull(wid)
# print("Processing way %d with %d nodes" % (wid, len(way_details) - 1))
for e in way_details:
if e["type"] == "node":
lat[e["data"]["id"]] = e["data"]["lat"]
lon[e["data"]["id"]] = e["data"]["lon"]
if e["type"] == "way":
assert e["data"]["id"] == wid, "Way id mismatch! %d != %d" % (e["data"]["id"], wl[0])
ordered_node_array = e["data"]["nd"]
if prev_last_node != -1 and ordered_node_array[-1] == prev_last_node:
print("LAST entry %d matches prev_last_node %d, REVERSING order for %d" %
(ordered_node_array[-1], prev_last_node, wid))
ordered_node_array = list(reversed(ordered_node_array))
for on in ordered_node_array:
# Returning lat,lon instead of lon,lat to be consistent with
# the returned values from OSRM. Since we manually swap the
# values later
coords_list.append([lat[on], lon[on]])
return ordered_node_array, coords_list
def get_coords_for_relation(rid, start_node, end_node):
osm = osmapi.OsmApi()
relation_details = osm.RelationGet(rid)
wl = get_way_list(relation_details)
print("Relation %d mapped to %d ways" % (rid, len(wl)))
coords_list = []
on_list = []
prev_last_node = -1
for wid in wl:
w_on_list, w_coords_list = get_coords_for_way(wid, prev_last_node)
on_list.extend(w_on_list)
coords_list.extend(w_coords_list)
prev_last_node = w_on_list[-1]
print("After adding %d entries from wid %d, curr count = %d" % (len(w_on_list), wid, len(coords_list)))
start_index = on_list.index(start_node)
end_index = on_list.index(end_node)
assert start_index <= end_index, "Start index %d is before end %d" % (start_index, end_index)
return coords_list[start_index:end_index+1]
def get_route_from_relation(t):
# get_coords_for_relation assumes that start and end are both nodes
return get_coords_for_relation(t["relation"]["relation_id"],
t["relation"]["start_node"], t["relation"]["end_node"])
def validate_and_fill_leg(orig_leg):
# print(t)
t = copy.copy(orig_leg)
t["type"] = "TRAVEL"
# These are now almost certain to be polygons
# and probably user-drawn, not looked up from OSM
# so what we will get here is an geojson representation of a polygon
# TODO: Drop support for single point
start_polygon = _fill_coords_from_id(t["start_loc"])
end_polygon = _fill_coords_from_id(t["end_loc"])
print("Raw polygons: start = %s..., end = %s..." %
(start_polygon["geometry"]["coordinates"][0][0:3],
end_polygon["geometry"]["coordinates"][0][0:3]))
# there are three possible ways in which users can specify routes
# - waypoints from OSM, which we will map into coordinates and then
# move to step 2
# - list of coordinates, which we will use to find route coordinates
# using OSRM
# - a relation with start and end nodes, used only for public transit trips
# - a polyline, which we can get from external API calls such as OTP or Google Maps
# Right now, we leave the integrations unspecified because there is not
# much standardization other than with google maps
# For example, the VTA trip planner () clearly uses OTP
# () but the path (api/otp/plan?) is different from the one for our OTP
# integration (otp/routers/default/plan?)
# But once people figure out the underlying call, they can copy-paste the
# geometry into the spec.
if "polyline" in t:
route_coords = get_route_from_polyline(t)
elif "relation" in t:
route_coords = get_route_from_relation(t)
else:
# We need to find a point within the polygon to pass to the routing engine
start_coords_shp = geo.Polygon(start_polygon["geometry"]["coordinates"][0]).representative_point()
start_coords = geo.mapping(start_coords_shp)["coordinates"]
end_coords_shp = geo.Polygon(end_polygon["geometry"]["coordinates"][0]).representative_point()
end_coords = geo.mapping(end_coords_shp)["coordinates"]
print("Representative_coords: start = %s, end = %s" % (start_coords, end_coords))
route_coords = get_route_from_osrm(t, start_coords, end_coords)
t["route_coords"] = {
"type": "Feature",
"properties": {},
"geometry": {
"type": "LineString",
"coordinates": [coords_swap(rc) for rc in route_coords]
}
}
return t
def get_hidden_access_transfer_walk_segments(prev_l, l):
# print("prev_l = %s, l = %s" % (prev_l, l))
if prev_l is None and l["mode"] != "WALKING":
# This is the first leg and is a vehicular trip,
# need to add an access leg to represent the walk to where the
# vehicle will be parked. This is unknown at spec creation time,
# so we don't have any ground truth for it
return [{
"id": "walk_start",
"type": "ACCESS",
"mode": "WALKING",
"name": "Walk from the building to your vehicle",
"loc": l["start_loc"],
}]
if l is None and prev_l["mode"] != "WALKING":
# This is the first leg and is a vehicular trip,
# need to add an access leg to represent the walk to where the
# vehicle will be parked. This is unknown at spec creation time,
# so we don't have any ground truth for it
return [{
"id": "walk_start",
"type": "ACCESS",
"mode": "WALKING",
"name": "Walk from your vehicle to the building",
"loc": prev_l["end_loc"]
}]
# The order of the checks is important because we want the STOPPED to come
# after the WALKING
ret_list = []
if prev_l is not None and l is not None and\
prev_l["mode"] != "WALKING" and l["mode"] != "WALKING":
# transferring between vehicles, add a transit transfer
# without a ground truthed trajectory
# NOTE: unlike the first two cases, we are NOT returning here
# we will run the next check as well, because for most
# transit transfers, there will be both a transfer and a stop
ret_list.append({
"id": "tt_%s_%s" % (prev_l["mode"], l["mode"]),
"type": "TRANSFER",
"mode": "WALKING",
"name": "Transfer between %s and %s at %s" %\
(prev_l["mode"], l["mode"], prev_l["end_loc"]["properties"]["name"]),
"loc": l["start_loc"]
})
if l is not None and "multiple_occupancy" in l and l["multiple_occupancy"] == True:
ret_list.append({
"id": "wait_for_%s" % (l["mode"]),
"type": "WAITING",
"mode": "STOPPED",
"name": "Wait for %s at %s" %\
(l["mode"], l["start_loc"]["properties"]["name"]),
"loc": l["start_loc"]
})
# return from the last two checks
return ret_list
def validate_and_fill_eval_trips(curr_spec):
modified_spec = copy.copy(curr_spec)
eval_trips = modified_spec["evaluation_trips"]
for t in eval_trips:
if "legs" in t:
print("Filling multi-modal trip %s" % t["id"])
prev_l = None
ret_leg_list = []
for i, l in enumerate(t["legs"]):
print("Filling leg %s" % l["id"])
# Add in shim legs like the ones to walk to/from your vehicle
# or to transfer between transit modes
shim_legs = get_hidden_access_transfer_walk_segments(prev_l, l)
print("Got shim legs %s, extending" % ([sl["id"] for sl in shim_legs]))
ret_leg_list.extend(shim_legs)
ret_leg_list.append(validate_and_fill_leg(l))
prev_l = l
shim_legs = get_hidden_access_transfer_walk_segments(prev_l, None)
assert len(shim_legs) <= 1, "Last leg should not have a transfer shim"
print("Got shim legs %s, extending" % ([sl["id"] for sl in shim_legs]))
ret_leg_list.extend(shim_legs)
t["legs"] = ret_leg_list
else:
print("Filling unimodal trip %s" % t["id"])
# unimodal trip, let's add shims if necessary
# the filled spec will always be multimodal
# since the only true unimodal trip is walking
# and it is easier to assume that there are always legs
# specially since we are adding complexity with the type of trips
# (ACCESS, TRANSFER, TRAVEL)
unmod_trip = copy.deepcopy(t)
t.clear()
t["id"] = unmod_trip["id"]
t["name"] = unmod_trip["name"]
t["legs"] = []
before_shim_leg = get_hidden_access_transfer_walk_segments(None, unmod_trip)
assert len(before_shim_leg) <= 1, "First leg should not have a transfer shim"
print("Got shim legs %s, extending" % ([sl["id"] for sl in before_shim_leg]))
t["legs"].extend(before_shim_leg)
t["legs"].append(validate_and_fill_leg(unmod_trip))
after_shim_leg = get_hidden_access_transfer_walk_segments(unmod_trip, None)
assert len(after_shim_leg) <= 1, "Last leg should not have a transfer shim"
print("Got shim legs %s, extending" % ([sl["id"] for sl in after_shim_leg]))
t["legs"].extend(after_shim_leg)
return modified_spec
def validate_and_fill_sensing_settings(curr_spec):
modified_spec = copy.copy(curr_spec)
for ss in modified_spec["sensing_settings"]:
for phoneOS, compare_list in ss.items():
ss[phoneOS] = {}
ss[phoneOS]["compare"] = compare_list
ss[phoneOS]["name"] = " v/s ".join(compare_list)
ss[phoneOS]["sensing_configs"] = [sensing_configs[cr] for cr in compare_list]
return modified_spec
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser(prog="autofill_eval_spec")
parser.add_argument("in_spec_file", help="file to autofill")
parser.add_argument("out_spec_file", help="autofilled version of in_spec_file")
args = parser.parse_args()
print("Reading input from %s" % args.in_spec_file)
current_spec = json.load(open(args.in_spec_file))
dt_spec = validate_and_fill_datetime(current_spec)
calib_spec = validate_and_fill_calibration_tests(dt_spec)
eval_spec = validate_and_fill_eval_trips(calib_spec)
settings_spec = validate_and_fill_sensing_settings(eval_spec)
print("Writing output to %s" % args.out_spec_file)
json.dump(settings_spec, open(args.out_spec_file, "w"), indent=2)
| [
"arrow.get",
"copy.deepcopy",
"logging.debug",
"argparse.ArgumentParser",
"logging.basicConfig",
"shapely.geometry.Polygon",
"shapely.geometry.mapping",
"copy.copy",
"osrm.get_route_points",
"polyline.PolylineCodec",
"osmapi.OsmApi"
] | [((312, 335), 'copy.copy', 'copy.copy', (['current_spec'], {}), '(current_spec)\n', (321, 335), False, 'import copy\n'), ((642, 657), 'osmapi.OsmApi', 'osmapi.OsmApi', ([], {}), '()\n', (655, 657), False, 'import osmapi\n'), ((1985, 2005), 'copy.copy', 'copy.copy', (['curr_spec'], {}), '(curr_spec)\n', (1994, 2005), False, 'import copy\n'), ((2917, 2980), 'logging.debug', 'logging.debug', (["('waypoint_coords = %s...' % waypoint_coords[0:3])"], {}), "('waypoint_coords = %s...' % waypoint_coords[0:3])\n", (2930, 2980), False, 'import logging\n'), ((4266, 4281), 'osmapi.OsmApi', 'osmapi.OsmApi', ([], {}), '()\n', (4279, 4281), False, 'import osmapi\n'), ((5485, 5500), 'osmapi.OsmApi', 'osmapi.OsmApi', ([], {}), '()\n', (5498, 5500), False, 'import osmapi\n'), ((6553, 6572), 'copy.copy', 'copy.copy', (['orig_leg'], {}), '(orig_leg)\n', (6562, 6572), False, 'import copy\n'), ((11466, 11486), 'copy.copy', 'copy.copy', (['curr_spec'], {}), '(curr_spec)\n', (11475, 11486), False, 'import copy\n'), ((13951, 13971), 'copy.copy', 'copy.copy', (['curr_spec'], {}), '(curr_spec)\n', (13960, 13971), False, 'import copy\n'), ((14357, 14397), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (14376, 14397), False, 'import logging\n'), ((14411, 14461), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""autofill_eval_spec"""'}), "(prog='autofill_eval_spec')\n", (14434, 14461), False, 'import argparse\n'), ((413, 471), 'arrow.get', 'arrow.get', (["current_spec['start_fmt_date']"], {'tzinfo': 'timezone'}), "(current_spec['start_fmt_date'], tzinfo=timezone)\n", (422, 471), False, 'import arrow\n'), ((507, 563), 'arrow.get', 'arrow.get', (["current_spec['end_fmt_date']"], {'tzinfo': 'timezone'}), "(current_spec['end_fmt_date'], tzinfo=timezone)\n", (516, 563), False, 'import arrow\n'), ((1059, 1129), 'osrm.get_route_points', 'osrm.get_route_points', (['mode', 'waypoint_coords', 'overview_geometry_params'], {}), '(mode, waypoint_coords, overview_geometry_params)\n', (1080, 1129), True, 'import osrm as osrm\n'), ((3153, 3171), 'polyline.PolylineCodec', 'pl.PolylineCodec', ([], {}), '()\n', (3169, 3171), True, 'import polyline as pl\n'), ((13016, 13032), 'copy.deepcopy', 'copy.deepcopy', (['t'], {}), '(t)\n', (13029, 13032), False, 'import copy\n'), ((8315, 8344), 'shapely.geometry.mapping', 'geo.mapping', (['start_coords_shp'], {}), '(start_coords_shp)\n', (8326, 8344), True, 'import shapely.geometry as geo\n'), ((8484, 8511), 'shapely.geometry.mapping', 'geo.mapping', (['end_coords_shp'], {}), '(end_coords_shp)\n', (8495, 8511), True, 'import shapely.geometry as geo\n'), ((8212, 8268), 'shapely.geometry.Polygon', 'geo.Polygon', (["start_polygon['geometry']['coordinates'][0]"], {}), "(start_polygon['geometry']['coordinates'][0])\n", (8223, 8268), True, 'import shapely.geometry as geo\n'), ((8385, 8439), 'shapely.geometry.Polygon', 'geo.Polygon', (["end_polygon['geometry']['coordinates'][0]"], {}), "(end_polygon['geometry']['coordinates'][0])\n", (8396, 8439), True, 'import shapely.geometry as geo\n')] |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'miasmod_data.ui'
#
# Created: Tue Apr 29 18:40:05 2014
# by: pyside-uic 0.2.15 running on PySide 1.2.1
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_MiasmataData(object):
def setupUi(self, MiasmataData):
MiasmataData.setObjectName("MiasmataData")
MiasmataData.resize(713, 490)
self.verticalLayout_3 = QtGui.QVBoxLayout(MiasmataData)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.save = QtGui.QPushButton(MiasmataData)
self.save.setEnabled(False)
self.save.setObjectName("save")
self.horizontalLayout_2.addWidget(self.save)
self.show_diff = QtGui.QPushButton(MiasmataData)
self.show_diff.setEnabled(False)
self.show_diff.setObjectName("show_diff")
self.horizontalLayout_2.addWidget(self.show_diff)
self.lblVersion = QtGui.QLabel(MiasmataData)
self.lblVersion.setEnabled(False)
self.lblVersion.setObjectName("lblVersion")
self.horizontalLayout_2.addWidget(self.lblVersion)
self.version = QtGui.QLineEdit(MiasmataData)
self.version.setEnabled(False)
self.version.setMaximumSize(QtCore.QSize(84, 16777215))
self.version.setObjectName("version")
self.horizontalLayout_2.addWidget(self.version)
spacerItem = QtGui.QSpacerItem(20, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem)
self.verticalLayout_3.addLayout(self.horizontalLayout_2)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.treeView = QtGui.QTreeView(MiasmataData)
self.treeView.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
self.treeView.setAlternatingRowColors(True)
self.treeView.setRootIsDecorated(False)
self.treeView.setUniformRowHeights(True)
self.treeView.setAllColumnsShowFocus(True)
self.treeView.setObjectName("treeView")
self.verticalLayout.addWidget(self.treeView)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.label_4 = QtGui.QLabel(MiasmataData)
self.label_4.setObjectName("label_4")
self.horizontalLayout_3.addWidget(self.label_4)
self.search = QtGui.QLineEdit(MiasmataData)
self.search.setObjectName("search")
self.horizontalLayout_3.addWidget(self.search)
self.clear_search = QtGui.QPushButton(MiasmataData)
self.clear_search.setObjectName("clear_search")
self.horizontalLayout_3.addWidget(self.clear_search)
self.verticalLayout.addLayout(self.horizontalLayout_3)
self.horizontalLayout.addLayout(self.verticalLayout)
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.formLayout = QtGui.QFormLayout()
self.formLayout.setObjectName("formLayout")
self.label = QtGui.QLabel(MiasmataData)
self.label.setObjectName("label")
self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.label)
self.name = QtGui.QLineEdit(MiasmataData)
self.name.setReadOnly(True)
self.name.setObjectName("name")
self.formLayout.setWidget(1, QtGui.QFormLayout.FieldRole, self.name)
self.type = QtGui.QComboBox(MiasmataData)
self.type.setEnabled(False)
self.type.setObjectName("type")
self.formLayout.setWidget(2, QtGui.QFormLayout.FieldRole, self.type)
self.label_2 = QtGui.QLabel(MiasmataData)
self.label_2.setObjectName("label_2")
self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.label_2)
self.label_3 = QtGui.QLabel(MiasmataData)
self.label_3.setObjectName("label_3")
self.formLayout.setWidget(3, QtGui.QFormLayout.LabelRole, self.label_3)
self.value_line = QtGui.QLineEdit(MiasmataData)
self.value_line.setObjectName("value_line")
self.formLayout.setWidget(3, QtGui.QFormLayout.FieldRole, self.value_line)
self.verticalLayout_2.addLayout(self.formLayout)
spacerItem1 = QtGui.QSpacerItem(20, 0, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_2.addItem(spacerItem1)
self.value_list = QtGui.QListView(MiasmataData)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(100)
sizePolicy.setHeightForWidth(self.value_list.sizePolicy().hasHeightForWidth())
self.value_list.setSizePolicy(sizePolicy)
self.value_list.setContextMenuPolicy(QtCore.Qt.ActionsContextMenu)
self.value_list.setAlternatingRowColors(True)
self.value_list.setUniformItemSizes(True)
self.value_list.setObjectName("value_list")
self.verticalLayout_2.addWidget(self.value_list)
self.value_hex = QtGui.QPlainTextEdit(MiasmataData)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(100)
sizePolicy.setHeightForWidth(self.value_hex.sizePolicy().hasHeightForWidth())
self.value_hex.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setFamily("Courier New")
font.setWeight(75)
font.setBold(True)
self.value_hex.setFont(font)
self.value_hex.setReadOnly(True)
self.value_hex.setObjectName("value_hex")
self.verticalLayout_2.addWidget(self.value_hex)
self.gridLayout_2 = QtGui.QGridLayout()
self.gridLayout_2.setObjectName("gridLayout_2")
spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem2, 2, 1, 1, 1)
self.new_key = QtGui.QPushButton(MiasmataData)
self.new_key.setEnabled(False)
self.new_key.setObjectName("new_key")
self.gridLayout_2.addWidget(self.new_key, 1, 0, 1, 1)
self.delete_node = QtGui.QPushButton(MiasmataData)
self.delete_node.setEnabled(False)
self.delete_node.setObjectName("delete_node")
self.gridLayout_2.addWidget(self.delete_node, 2, 2, 1, 1)
self.new_value = QtGui.QPushButton(MiasmataData)
self.new_value.setEnabled(False)
self.new_value.setObjectName("new_value")
self.gridLayout_2.addWidget(self.new_value, 2, 0, 1, 1)
self.undo = QtGui.QPushButton(MiasmataData)
self.undo.setEnabled(False)
self.undo.setObjectName("undo")
self.gridLayout_2.addWidget(self.undo, 1, 2, 1, 1)
self.verticalLayout_2.addLayout(self.gridLayout_2)
self.horizontalLayout.addLayout(self.verticalLayout_2)
self.horizontalLayout.setStretch(0, 3)
self.horizontalLayout.setStretch(1, 2)
self.verticalLayout_3.addLayout(self.horizontalLayout)
self.actionNew_Key = QtGui.QAction(MiasmataData)
self.actionNew_Key.setObjectName("actionNew_Key")
self.actionNew_Value = QtGui.QAction(MiasmataData)
self.actionNew_Value.setObjectName("actionNew_Value")
self.actionUndo_Changes = QtGui.QAction(MiasmataData)
self.actionUndo_Changes.setObjectName("actionUndo_Changes")
self.actionDelete = QtGui.QAction(MiasmataData)
self.actionDelete.setObjectName("actionDelete")
self.actionInsert_Row = QtGui.QAction(MiasmataData)
self.actionInsert_Row.setObjectName("actionInsert_Row")
self.actionRemove_Row = QtGui.QAction(MiasmataData)
self.actionRemove_Row.setObjectName("actionRemove_Row")
self.lblVersion.setBuddy(self.version)
self.label_4.setBuddy(self.search)
self.label.setBuddy(self.name)
self.label_2.setBuddy(self.type)
self.label_3.setBuddy(self.value_line)
self.retranslateUi(MiasmataData)
QtCore.QObject.connect(self.actionNew_Key, QtCore.SIGNAL("triggered()"), MiasmataData.insert_key)
QtCore.QObject.connect(self.actionNew_Value, QtCore.SIGNAL("triggered()"), MiasmataData.insert_value)
QtCore.QObject.connect(self.new_key, QtCore.SIGNAL("clicked()"), MiasmataData.insert_key)
QtCore.QObject.connect(self.new_value, QtCore.SIGNAL("clicked()"), MiasmataData.insert_value)
QtCore.QObject.connect(self.delete_node, QtCore.SIGNAL("clicked()"), MiasmataData.delete_node)
QtCore.QObject.connect(self.undo, QtCore.SIGNAL("clicked()"), MiasmataData.undo)
QtCore.QObject.connect(self.actionUndo_Changes, QtCore.SIGNAL("triggered()"), MiasmataData.undo)
QtCore.QObject.connect(self.actionDelete, QtCore.SIGNAL("triggered()"), MiasmataData.delete_node)
QtCore.QObject.connect(self.clear_search, QtCore.SIGNAL("clicked()"), self.search.clear)
QtCore.QMetaObject.connectSlotsByName(MiasmataData)
MiasmataData.setTabOrder(self.treeView, self.search)
MiasmataData.setTabOrder(self.search, self.clear_search)
MiasmataData.setTabOrder(self.clear_search, self.name)
MiasmataData.setTabOrder(self.name, self.type)
MiasmataData.setTabOrder(self.type, self.value_line)
MiasmataData.setTabOrder(self.value_line, self.value_list)
MiasmataData.setTabOrder(self.value_list, self.value_hex)
MiasmataData.setTabOrder(self.value_hex, self.new_key)
MiasmataData.setTabOrder(self.new_key, self.new_value)
MiasmataData.setTabOrder(self.new_value, self.undo)
MiasmataData.setTabOrder(self.undo, self.delete_node)
MiasmataData.setTabOrder(self.delete_node, self.save)
MiasmataData.setTabOrder(self.save, self.show_diff)
MiasmataData.setTabOrder(self.show_diff, self.version)
def retranslateUi(self, MiasmataData):
self.save.setText(QtGui.QApplication.translate("MiasmataData", "&Save...", None, QtGui.QApplication.UnicodeUTF8))
self.show_diff.setText(QtGui.QApplication.translate("MiasmataData", "Show &mod changes...", None, QtGui.QApplication.UnicodeUTF8))
self.lblVersion.setText(QtGui.QApplication.translate("MiasmataData", "&Version:", None, QtGui.QApplication.UnicodeUTF8))
self.label_4.setText(QtGui.QApplication.translate("MiasmataData", "&Search:", None, QtGui.QApplication.UnicodeUTF8))
self.clear_search.setText(QtGui.QApplication.translate("MiasmataData", "&Clear", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("MiasmataData", "&Name:", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("MiasmataData", "&Type:", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("MiasmataData", "&Value:", None, QtGui.QApplication.UnicodeUTF8))
self.new_key.setText(QtGui.QApplication.translate("MiasmataData", "New &Key", None, QtGui.QApplication.UnicodeUTF8))
self.delete_node.setText(QtGui.QApplication.translate("MiasmataData", "&Delete Node...", None, QtGui.QApplication.UnicodeUTF8))
self.new_value.setText(QtGui.QApplication.translate("MiasmataData", "New V&alue", None, QtGui.QApplication.UnicodeUTF8))
self.undo.setText(QtGui.QApplication.translate("MiasmataData", "&Undo Changes to Node", None, QtGui.QApplication.UnicodeUTF8))
self.actionNew_Key.setText(QtGui.QApplication.translate("MiasmataData", "New Key", None, QtGui.QApplication.UnicodeUTF8))
self.actionNew_Value.setText(QtGui.QApplication.translate("MiasmataData", "New Value", None, QtGui.QApplication.UnicodeUTF8))
self.actionUndo_Changes.setText(QtGui.QApplication.translate("MiasmataData", "Undo Changes", None, QtGui.QApplication.UnicodeUTF8))
self.actionDelete.setText(QtGui.QApplication.translate("MiasmataData", "Delete", None, QtGui.QApplication.UnicodeUTF8))
self.actionInsert_Row.setText(QtGui.QApplication.translate("MiasmataData", "Insert Row", None, QtGui.QApplication.UnicodeUTF8))
self.actionRemove_Row.setText(QtGui.QApplication.translate("MiasmataData", "Remove Row", None, QtGui.QApplication.UnicodeUTF8))
| [
"PySide.QtCore.QSize",
"PySide.QtGui.QFormLayout",
"PySide.QtGui.QLineEdit",
"PySide.QtGui.QPlainTextEdit",
"PySide.QtGui.QTreeView",
"PySide.QtGui.QFont",
"PySide.QtGui.QHBoxLayout",
"PySide.QtGui.QGridLayout",
"PySide.QtGui.QApplication.translate",
"PySide.QtCore.QMetaObject.connectSlotsByName",... | [((468, 499), 'PySide.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', (['MiasmataData'], {}), '(MiasmataData)\n', (485, 499), False, 'from PySide import QtCore, QtGui\n'), ((598, 617), 'PySide.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (615, 617), False, 'from PySide import QtCore, QtGui\n'), ((706, 737), 'PySide.QtGui.QPushButton', 'QtGui.QPushButton', (['MiasmataData'], {}), '(MiasmataData)\n', (723, 737), False, 'from PySide import QtCore, QtGui\n'), ((892, 923), 'PySide.QtGui.QPushButton', 'QtGui.QPushButton', (['MiasmataData'], {}), '(MiasmataData)\n', (909, 923), False, 'from PySide import QtCore, QtGui\n'), ((1099, 1125), 'PySide.QtGui.QLabel', 'QtGui.QLabel', (['MiasmataData'], {}), '(MiasmataData)\n', (1111, 1125), False, 'from PySide import QtCore, QtGui\n'), ((1302, 1331), 'PySide.QtGui.QLineEdit', 'QtGui.QLineEdit', (['MiasmataData'], {}), '(MiasmataData)\n', (1317, 1331), False, 'from PySide import QtCore, QtGui\n'), ((1558, 1644), 'PySide.QtGui.QSpacerItem', 'QtGui.QSpacerItem', (['(20)', '(20)', 'QtGui.QSizePolicy.Expanding', 'QtGui.QSizePolicy.Minimum'], {}), '(20, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.\n Minimum)\n', (1575, 1644), False, 'from PySide import QtCore, QtGui\n'), ((1789, 1808), 'PySide.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (1806, 1808), False, 'from PySide import QtCore, QtGui\n'), ((1903, 1922), 'PySide.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', ([], {}), '()\n', (1920, 1922), False, 'from PySide import QtCore, QtGui\n'), ((2007, 2036), 'PySide.QtGui.QTreeView', 'QtGui.QTreeView', (['MiasmataData'], {}), '(MiasmataData)\n', (2022, 2036), False, 'from PySide import QtCore, QtGui\n'), ((2445, 2464), 'PySide.QtGui.QHBoxLayout', 'QtGui.QHBoxLayout', ([], {}), '()\n', (2462, 2464), False, 'from PySide import QtCore, QtGui\n'), ((2556, 2582), 'PySide.QtGui.QLabel', 'QtGui.QLabel', (['MiasmataData'], {}), '(MiasmataData)\n', (2568, 2582), False, 'from PySide import QtCore, QtGui\n'), ((2707, 2736), 'PySide.QtGui.QLineEdit', 'QtGui.QLineEdit', (['MiasmataData'], {}), '(MiasmataData)\n', (2722, 2736), False, 'from PySide import QtCore, QtGui\n'), ((2864, 2895), 'PySide.QtGui.QPushButton', 'QtGui.QPushButton', (['MiasmataData'], {}), '(MiasmataData)\n', (2881, 2895), False, 'from PySide import QtCore, QtGui\n'), ((3169, 3188), 'PySide.QtGui.QVBoxLayout', 'QtGui.QVBoxLayout', ([], {}), '()\n', (3186, 3188), False, 'from PySide import QtCore, QtGui\n'), ((3279, 3298), 'PySide.QtGui.QFormLayout', 'QtGui.QFormLayout', ([], {}), '()\n', (3296, 3298), False, 'from PySide import QtCore, QtGui\n'), ((3372, 3398), 'PySide.QtGui.QLabel', 'QtGui.QLabel', (['MiasmataData'], {}), '(MiasmataData)\n', (3384, 3398), False, 'from PySide import QtCore, QtGui\n'), ((3539, 3568), 'PySide.QtGui.QLineEdit', 'QtGui.QLineEdit', (['MiasmataData'], {}), '(MiasmataData)\n', (3554, 3568), False, 'from PySide import QtCore, QtGui\n'), ((3742, 3771), 'PySide.QtGui.QComboBox', 'QtGui.QComboBox', (['MiasmataData'], {}), '(MiasmataData)\n', (3757, 3771), False, 'from PySide import QtCore, QtGui\n'), ((3948, 3974), 'PySide.QtGui.QLabel', 'QtGui.QLabel', (['MiasmataData'], {}), '(MiasmataData)\n', (3960, 3974), False, 'from PySide import QtCore, QtGui\n'), ((4124, 4150), 'PySide.QtGui.QLabel', 'QtGui.QLabel', (['MiasmataData'], {}), '(MiasmataData)\n', (4136, 4150), False, 'from PySide import QtCore, QtGui\n'), ((4303, 4332), 'PySide.QtGui.QLineEdit', 'QtGui.QLineEdit', (['MiasmataData'], {}), '(MiasmataData)\n', (4318, 4332), False, 'from PySide import QtCore, QtGui\n'), ((4547, 4632), 'PySide.QtGui.QSpacerItem', 'QtGui.QSpacerItem', (['(20)', '(0)', 'QtGui.QSizePolicy.Minimum', 'QtGui.QSizePolicy.Expanding'], {}), '(20, 0, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding\n )\n', (4564, 4632), False, 'from PySide import QtCore, QtGui\n'), ((4705, 4734), 'PySide.QtGui.QListView', 'QtGui.QListView', (['MiasmataData'], {}), '(MiasmataData)\n', (4720, 4734), False, 'from PySide import QtCore, QtGui\n'), ((4756, 4831), 'PySide.QtGui.QSizePolicy', 'QtGui.QSizePolicy', (['QtGui.QSizePolicy.Expanding', 'QtGui.QSizePolicy.Expanding'], {}), '(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)\n', (4773, 4831), False, 'from PySide import QtCore, QtGui\n'), ((5368, 5402), 'PySide.QtGui.QPlainTextEdit', 'QtGui.QPlainTextEdit', (['MiasmataData'], {}), '(MiasmataData)\n', (5388, 5402), False, 'from PySide import QtCore, QtGui\n'), ((5424, 5499), 'PySide.QtGui.QSizePolicy', 'QtGui.QSizePolicy', (['QtGui.QSizePolicy.Expanding', 'QtGui.QSizePolicy.Expanding'], {}), '(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)\n', (5441, 5499), False, 'from PySide import QtCore, QtGui\n'), ((5736, 5749), 'PySide.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (5747, 5749), False, 'from PySide import QtCore, QtGui\n'), ((6054, 6073), 'PySide.QtGui.QGridLayout', 'QtGui.QGridLayout', ([], {}), '()\n', (6071, 6073), False, 'from PySide import QtCore, QtGui\n'), ((6152, 6238), 'PySide.QtGui.QSpacerItem', 'QtGui.QSpacerItem', (['(40)', '(20)', 'QtGui.QSizePolicy.Expanding', 'QtGui.QSizePolicy.Minimum'], {}), '(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.\n Minimum)\n', (6169, 6238), False, 'from PySide import QtCore, QtGui\n'), ((6316, 6347), 'PySide.QtGui.QPushButton', 'QtGui.QPushButton', (['MiasmataData'], {}), '(MiasmataData)\n', (6333, 6347), False, 'from PySide import QtCore, QtGui\n'), ((6522, 6553), 'PySide.QtGui.QPushButton', 'QtGui.QPushButton', (['MiasmataData'], {}), '(MiasmataData)\n', (6539, 6553), False, 'from PySide import QtCore, QtGui\n'), ((6742, 6773), 'PySide.QtGui.QPushButton', 'QtGui.QPushButton', (['MiasmataData'], {}), '(MiasmataData)\n', (6759, 6773), False, 'from PySide import QtCore, QtGui\n'), ((6949, 6980), 'PySide.QtGui.QPushButton', 'QtGui.QPushButton', (['MiasmataData'], {}), '(MiasmataData)\n', (6966, 6980), False, 'from PySide import QtCore, QtGui\n'), ((7424, 7451), 'PySide.QtGui.QAction', 'QtGui.QAction', (['MiasmataData'], {}), '(MiasmataData)\n', (7437, 7451), False, 'from PySide import QtCore, QtGui\n'), ((7541, 7568), 'PySide.QtGui.QAction', 'QtGui.QAction', (['MiasmataData'], {}), '(MiasmataData)\n', (7554, 7568), False, 'from PySide import QtCore, QtGui\n'), ((7665, 7692), 'PySide.QtGui.QAction', 'QtGui.QAction', (['MiasmataData'], {}), '(MiasmataData)\n', (7678, 7692), False, 'from PySide import QtCore, QtGui\n'), ((7789, 7816), 'PySide.QtGui.QAction', 'QtGui.QAction', (['MiasmataData'], {}), '(MiasmataData)\n', (7802, 7816), False, 'from PySide import QtCore, QtGui\n'), ((7905, 7932), 'PySide.QtGui.QAction', 'QtGui.QAction', (['MiasmataData'], {}), '(MiasmataData)\n', (7918, 7932), False, 'from PySide import QtCore, QtGui\n'), ((8029, 8056), 'PySide.QtGui.QAction', 'QtGui.QAction', (['MiasmataData'], {}), '(MiasmataData)\n', (8042, 8056), False, 'from PySide import QtCore, QtGui\n'), ((9304, 9355), 'PySide.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['MiasmataData'], {}), '(MiasmataData)\n', (9341, 9355), False, 'from PySide import QtCore, QtGui\n'), ((1407, 1433), 'PySide.QtCore.QSize', 'QtCore.QSize', (['(84)', '(16777215)'], {}), '(84, 16777215)\n', (1419, 1433), False, 'from PySide import QtCore, QtGui\n'), ((8431, 8459), 'PySide.QtCore.SIGNAL', 'QtCore.SIGNAL', (['"""triggered()"""'], {}), "('triggered()')\n", (8444, 8459), False, 'from PySide import QtCore, QtGui\n'), ((8539, 8567), 'PySide.QtCore.SIGNAL', 'QtCore.SIGNAL', (['"""triggered()"""'], {}), "('triggered()')\n", (8552, 8567), False, 'from PySide import QtCore, QtGui\n'), ((8641, 8667), 'PySide.QtCore.SIGNAL', 'QtCore.SIGNAL', (['"""clicked()"""'], {}), "('clicked()')\n", (8654, 8667), False, 'from PySide import QtCore, QtGui\n'), ((8741, 8767), 'PySide.QtCore.SIGNAL', 'QtCore.SIGNAL', (['"""clicked()"""'], {}), "('clicked()')\n", (8754, 8767), False, 'from PySide import QtCore, QtGui\n'), ((8845, 8871), 'PySide.QtCore.SIGNAL', 'QtCore.SIGNAL', (['"""clicked()"""'], {}), "('clicked()')\n", (8858, 8871), False, 'from PySide import QtCore, QtGui\n'), ((8941, 8967), 'PySide.QtCore.SIGNAL', 'QtCore.SIGNAL', (['"""clicked()"""'], {}), "('clicked()')\n", (8954, 8967), False, 'from PySide import QtCore, QtGui\n'), ((9044, 9072), 'PySide.QtCore.SIGNAL', 'QtCore.SIGNAL', (['"""triggered()"""'], {}), "('triggered()')\n", (9057, 9072), False, 'from PySide import QtCore, QtGui\n'), ((9143, 9171), 'PySide.QtCore.SIGNAL', 'QtCore.SIGNAL', (['"""triggered()"""'], {}), "('triggered()')\n", (9156, 9171), False, 'from PySide import QtCore, QtGui\n'), ((9249, 9275), 'PySide.QtCore.SIGNAL', 'QtCore.SIGNAL', (['"""clicked()"""'], {}), "('clicked()')\n", (9262, 9275), False, 'from PySide import QtCore, QtGui\n'), ((10297, 10396), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MiasmataData"""', '"""&Save..."""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MiasmataData', '&Save...', None, QtGui.\n QApplication.UnicodeUTF8)\n", (10325, 10396), False, 'from PySide import QtCore, QtGui\n'), ((10424, 10534), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MiasmataData"""', '"""Show &mod changes..."""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MiasmataData', 'Show &mod changes...', None,\n QtGui.QApplication.UnicodeUTF8)\n", (10452, 10534), False, 'from PySide import QtCore, QtGui\n'), ((10564, 10664), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MiasmataData"""', '"""&Version:"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MiasmataData', '&Version:', None, QtGui.\n QApplication.UnicodeUTF8)\n", (10592, 10664), False, 'from PySide import QtCore, QtGui\n'), ((10690, 10789), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MiasmataData"""', '"""&Search:"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MiasmataData', '&Search:', None, QtGui.\n QApplication.UnicodeUTF8)\n", (10718, 10789), False, 'from PySide import QtCore, QtGui\n'), ((10820, 10917), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MiasmataData"""', '"""&Clear"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MiasmataData', '&Clear', None, QtGui.\n QApplication.UnicodeUTF8)\n", (10848, 10917), False, 'from PySide import QtCore, QtGui\n'), ((10941, 11038), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MiasmataData"""', '"""&Name:"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MiasmataData', '&Name:', None, QtGui.\n QApplication.UnicodeUTF8)\n", (10969, 11038), False, 'from PySide import QtCore, QtGui\n'), ((11064, 11161), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MiasmataData"""', '"""&Type:"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MiasmataData', '&Type:', None, QtGui.\n QApplication.UnicodeUTF8)\n", (11092, 11161), False, 'from PySide import QtCore, QtGui\n'), ((11187, 11285), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MiasmataData"""', '"""&Value:"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MiasmataData', '&Value:', None, QtGui.\n QApplication.UnicodeUTF8)\n", (11215, 11285), False, 'from PySide import QtCore, QtGui\n'), ((11311, 11410), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MiasmataData"""', '"""New &Key"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MiasmataData', 'New &Key', None, QtGui.\n QApplication.UnicodeUTF8)\n", (11339, 11410), False, 'from PySide import QtCore, QtGui\n'), ((11440, 11546), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MiasmataData"""', '"""&Delete Node..."""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MiasmataData', '&Delete Node...', None, QtGui\n .QApplication.UnicodeUTF8)\n", (11468, 11546), False, 'from PySide import QtCore, QtGui\n'), ((11574, 11675), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MiasmataData"""', '"""New V&alue"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MiasmataData', 'New V&alue', None, QtGui.\n QApplication.UnicodeUTF8)\n", (11602, 11675), False, 'from PySide import QtCore, QtGui\n'), ((11698, 11809), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MiasmataData"""', '"""&Undo Changes to Node"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MiasmataData', '&Undo Changes to Node', None,\n QtGui.QApplication.UnicodeUTF8)\n", (11726, 11809), False, 'from PySide import QtCore, QtGui\n'), ((11842, 11940), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MiasmataData"""', '"""New Key"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MiasmataData', 'New Key', None, QtGui.\n QApplication.UnicodeUTF8)\n", (11870, 11940), False, 'from PySide import QtCore, QtGui\n'), ((11974, 12074), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MiasmataData"""', '"""New Value"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MiasmataData', 'New Value', None, QtGui.\n QApplication.UnicodeUTF8)\n", (12002, 12074), False, 'from PySide import QtCore, QtGui\n'), ((12111, 12214), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MiasmataData"""', '"""Undo Changes"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MiasmataData', 'Undo Changes', None, QtGui.\n QApplication.UnicodeUTF8)\n", (12139, 12214), False, 'from PySide import QtCore, QtGui\n'), ((12245, 12342), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MiasmataData"""', '"""Delete"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MiasmataData', 'Delete', None, QtGui.\n QApplication.UnicodeUTF8)\n", (12273, 12342), False, 'from PySide import QtCore, QtGui\n'), ((12377, 12478), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MiasmataData"""', '"""Insert Row"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MiasmataData', 'Insert Row', None, QtGui.\n QApplication.UnicodeUTF8)\n", (12405, 12478), False, 'from PySide import QtCore, QtGui\n'), ((12513, 12614), 'PySide.QtGui.QApplication.translate', 'QtGui.QApplication.translate', (['"""MiasmataData"""', '"""Remove Row"""', 'None', 'QtGui.QApplication.UnicodeUTF8'], {}), "('MiasmataData', 'Remove Row', None, QtGui.\n QApplication.UnicodeUTF8)\n", (12541, 12614), False, 'from PySide import QtCore, QtGui\n')] |
import logging
logger = logging.getLogger(__name__)
module_uri = "/isam/felb/configuration/logging"
requires_modules = None
requires_versions = None
def get(isamAppliance):
"""
Retrieves logging configuration attributes
"""
return isamAppliance.invoke_get("Retrieving logging configuration", module_uri)
def update(isamAppliance, local, remote_address, remote_port, remote_facility, check_mode=False, force=False):
"""
Updates logging configuration
"""
change_required, json_data = _check(isamAppliance, local, remote_address, remote_port, remote_facility)
if force is True or change_required is True:
return isamAppliance.invoke_put("Updating Configuration", module_uri, json_data,
requires_modules=requires_modules, requires_version=requires_versions)
if change_required is False:
return isamAppliance.create_return_object(changed=False)
def _check(isamAppliance, local, remote_address, remote_port, remote_facility):
"""
Check for idempotency
"""
check_obj = get(isamAppliance)
change_required = False
# If the configuration is local, remote entries are not used
if local is True:
json_data = {
"local": True,
"remote_address": "",
"remote_port": None,
"remote_facility": None
}
if check_obj['data']['local'] != local:
change_required = True
return change_required, json_data
else:
if check_obj['data']['remote_address'] != remote_address:
change_required = True
if check_obj['data']['remote_port'] != remote_port:
change_required = True
if check_obj['data']['remote_facility'] != remote_facility:
change_required = True
json_data = {
"local": local,
"remote_address": remote_address,
"remote_port": remote_port,
"remote_facility": remote_facility
}
return change_required, json_data
| [
"logging.getLogger"
] | [((25, 52), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (42, 52), False, 'import logging\n')] |
"""Step arguments tests."""
import functools
import re
from pytest_bdd import given, parsers, scenario, then, when
import pytest
from pytest_bdd import exceptions
scenario_when = functools.partial(scenario, "../when_arguments.feature")
scenario_args = functools.partial(scenario, "../args_steps.feature")
@scenario_args("Every step takes a parameter with the same name")
def test_steps():
pass
@scenario_when("Argument in when, step 1")
def test_argument_in_when_step_1():
pass
@scenario_when("Argument in when, step 2")
def test_argument_in_when_step_2():
pass
def test_multiple_given(request):
"""Using the same given fixture raises an error."""
@scenario_args("Using the same given fixture raises an error")
def test():
pass
with pytest.raises(exceptions.GivenAlreadyUsed):
test(request)
@given(parsers.re(r"I have (?P<euro>\d+) Euro"), converters=dict(euro=int))
def i_have(euro, values):
assert euro == values.pop(0)
@when(parsers.re(r"I pay (?P<euro>\d+) Euro"), converters=dict(euro=int))
def i_pay(euro, values, request):
assert euro == values.pop(0)
@then(parsers.re(r"I should have (?P<euro>\d+) Euro"), converters=dict(euro=int))
def i_should_have(euro, values):
assert euro == values.pop(0)
@given(parsers.re(r"I have an argument (?P<arg>\d+)"))
def argument(arg):
"""I have an argument."""
return dict(arg=arg)
@when(parsers.re(r"I get argument (?P<arg>\d+)"))
def get_argument(argument, arg):
"""Getting argument."""
argument["arg"] = arg
@then(parsers.re(r"My argument should be (?P<arg>\d+)"))
def assert_that_my_argument_is_arg(argument, arg):
"""Assert that arg from when equals arg."""
assert argument["arg"] == arg
| [
"pytest_bdd.parsers.re",
"functools.partial",
"pytest.raises"
] | [((183, 239), 'functools.partial', 'functools.partial', (['scenario', '"""../when_arguments.feature"""'], {}), "(scenario, '../when_arguments.feature')\n", (200, 239), False, 'import functools\n'), ((257, 309), 'functools.partial', 'functools.partial', (['scenario', '"""../args_steps.feature"""'], {}), "(scenario, '../args_steps.feature')\n", (274, 309), False, 'import functools\n'), ((859, 899), 'pytest_bdd.parsers.re', 'parsers.re', (['"""I have (?P<euro>\\\\d+) Euro"""'], {}), "('I have (?P<euro>\\\\d+) Euro')\n", (869, 899), False, 'from pytest_bdd import given, parsers, scenario, then, when\n'), ((995, 1034), 'pytest_bdd.parsers.re', 'parsers.re', (['"""I pay (?P<euro>\\\\d+) Euro"""'], {}), "('I pay (?P<euro>\\\\d+) Euro')\n", (1005, 1034), False, 'from pytest_bdd import given, parsers, scenario, then, when\n'), ((1138, 1185), 'pytest_bdd.parsers.re', 'parsers.re', (['"""I should have (?P<euro>\\\\d+) Euro"""'], {}), "('I should have (?P<euro>\\\\d+) Euro')\n", (1148, 1185), False, 'from pytest_bdd import given, parsers, scenario, then, when\n'), ((1289, 1335), 'pytest_bdd.parsers.re', 'parsers.re', (['"""I have an argument (?P<arg>\\\\d+)"""'], {}), "('I have an argument (?P<arg>\\\\d+)')\n", (1299, 1335), False, 'from pytest_bdd import given, parsers, scenario, then, when\n'), ((1419, 1461), 'pytest_bdd.parsers.re', 'parsers.re', (['"""I get argument (?P<arg>\\\\d+)"""'], {}), "('I get argument (?P<arg>\\\\d+)')\n", (1429, 1461), False, 'from pytest_bdd import given, parsers, scenario, then, when\n'), ((1558, 1607), 'pytest_bdd.parsers.re', 'parsers.re', (['"""My argument should be (?P<arg>\\\\d+)"""'], {}), "('My argument should be (?P<arg>\\\\d+)')\n", (1568, 1607), False, 'from pytest_bdd import given, parsers, scenario, then, when\n'), ((784, 826), 'pytest.raises', 'pytest.raises', (['exceptions.GivenAlreadyUsed'], {}), '(exceptions.GivenAlreadyUsed)\n', (797, 826), False, 'import pytest\n')] |
"""
AI Heuristics for various evaluation methods
"""
from __future__ import print_function
import random
__author__ = "Matthew 'MasterOdin' Peveler"
__license__ = "The MIT License (MIT)"
class Heuristic(object):
"""
This is the heuristic interface
"""
@staticmethod
def get_best_column(board):
"""
Get the best column based on some heuristic
:param board:
:return:
"""
raise NotImplementedError("Not yet implemented")
@staticmethod
def get_column_value(board, column):
"""
Get the particular value for a column based on some heuristic
:param board:
:param column:
:return:
"""
raise NotImplementedError("Not yet implemented")
class Random(Heuristic):
"""
Random heuristic. Just returns valid random column
"""
NAME = "Random"
def __init__(self):
pass
@staticmethod
def get_best_column(board):
col = -1
while not board.can_add_piece(col):
col = random.randint(0, board.COLUMNS)
return col
@staticmethod
def get_column_value(board, column):
pass
class MinMax(Heuristic):
"""
MinMax heuristic for AI agent
"""
NAME = "MinMax"
def __init__(self):
pass
@staticmethod
def get_best_column(board):
pass
@staticmethod
def get_column_value(board, column):
pass
| [
"random.randint"
] | [((1048, 1080), 'random.randint', 'random.randint', (['(0)', 'board.COLUMNS'], {}), '(0, board.COLUMNS)\n', (1062, 1080), False, 'import random\n')] |
#
# (c) Copyright 2015 <NAME>. All Rights Reserved.
#
"""
Various utilities.
"""
from flask.ext.restful import abort
from bson.objectid import ObjectId
from base64 import b64decode
from werkzeug.routing import BaseConverter
class AxesURIConverter(BaseConverter):
"""
Flask routing converter for AXES URIs of the form::
axes:/path
For example:
axes:/cAXES/v20080512_12...e_clips_investigates/s000000120
The converter strips off the leading 'axes:' part to return the path
"""
regex = r'axes:.*?'
def to_python(self, value):
return value[5:]
supported_mimetypes = {
'image/png': '.png',
'image/jpeg': '.jpg',
'image/bmp': '.bmp',
'image/gif': '.gif',
}
def find_or_404(collection, objectid):
try:
objectid = ObjectId(objectid)
except:
abort(404, message="invalid object id")
item = collection.find_one(objectid)
if not item:
error = "resource with id {} does not exist".format(str(objectid))
abort(404, message=error)
return item
def clause_type(text):
try:
type, value = text.split(':', 1)
except:
raise ValueError('Parse error')
# prepend hash if necessary
if not type.startswith('#'):
type = '#' + type
return { 'type': type, 'text': value }
def parse_data_url(data_url):
"""
Parse a data url into a tuple of params and the encoded data.
E.g.
>>> data_url = "data:image/png;base64,ABC123xxx"
>>> params, encoded_data = parse_data_url(data_url)
>>> params
('image/png', 'base64')
>>> data
'ABC123xxx'
"""
# e.g. data:image/png;base64,xxx..
if not data_url.startswith('data:'):
raise ValueError('not a data url')
data_url = data_url[5:]
params, data = data_url.split(',')
params = params.split(';')
return params, data
def get_image_data_and_extension_from_data_url(data_url):
"""
Parse image data encoded in a data URL and return the decoded (raw) data
and an appropriate file extension to use.
"""
params, data = parse_data_url(data_url)
if len(params) < 2:
raise ValueError('invalid data url: not enough params')
mimetype = params[0]
encoding = params[-1]
if encoding != 'base64':
raise ValueError('Unsupported encoding: {}'.format(encoding))
if mimetype not in supported_mimetypes:
raise ValueError('Unsupported mimetype: {}'.format(mimetype))
data = b64decode(data)
extension = supported_mimetypes[mimetype]
return data, extension
| [
"flask.ext.restful.abort",
"bson.objectid.ObjectId",
"base64.b64decode"
] | [((2528, 2543), 'base64.b64decode', 'b64decode', (['data'], {}), '(data)\n', (2537, 2543), False, 'from base64 import b64decode\n'), ((839, 857), 'bson.objectid.ObjectId', 'ObjectId', (['objectid'], {}), '(objectid)\n', (847, 857), False, 'from bson.objectid import ObjectId\n'), ((1059, 1084), 'flask.ext.restful.abort', 'abort', (['(404)'], {'message': 'error'}), '(404, message=error)\n', (1064, 1084), False, 'from flask.ext.restful import abort\n'), ((878, 917), 'flask.ext.restful.abort', 'abort', (['(404)'], {'message': '"""invalid object id"""'}), "(404, message='invalid object id')\n", (883, 917), False, 'from flask.ext.restful import abort\n')] |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetBlockchainPlatformResult',
'AwaitableGetBlockchainPlatformResult',
'get_blockchain_platform',
]
@pulumi.output_type
class GetBlockchainPlatformResult:
"""
A collection of values returned by getBlockchainPlatform.
"""
def __init__(__self__, blockchain_platform_id=None, ca_cert_archive_text=None, compartment_id=None, component_details=None, compute_shape=None, defined_tags=None, description=None, display_name=None, federated_user_id=None, freeform_tags=None, host_ocpu_utilization_infos=None, id=None, idcs_access_token=None, is_byol=None, is_multi_ad=None, lifecycle_details=None, load_balancer_shape=None, platform_role=None, platform_shape_type=None, replicas=None, service_endpoint=None, service_version=None, state=None, storage_size_in_tbs=None, storage_used_in_tbs=None, time_created=None, time_updated=None, total_ocpu_capacity=None):
if blockchain_platform_id and not isinstance(blockchain_platform_id, str):
raise TypeError("Expected argument 'blockchain_platform_id' to be a str")
pulumi.set(__self__, "blockchain_platform_id", blockchain_platform_id)
if ca_cert_archive_text and not isinstance(ca_cert_archive_text, str):
raise TypeError("Expected argument 'ca_cert_archive_text' to be a str")
pulumi.set(__self__, "ca_cert_archive_text", ca_cert_archive_text)
if compartment_id and not isinstance(compartment_id, str):
raise TypeError("Expected argument 'compartment_id' to be a str")
pulumi.set(__self__, "compartment_id", compartment_id)
if component_details and not isinstance(component_details, dict):
raise TypeError("Expected argument 'component_details' to be a dict")
pulumi.set(__self__, "component_details", component_details)
if compute_shape and not isinstance(compute_shape, str):
raise TypeError("Expected argument 'compute_shape' to be a str")
pulumi.set(__self__, "compute_shape", compute_shape)
if defined_tags and not isinstance(defined_tags, dict):
raise TypeError("Expected argument 'defined_tags' to be a dict")
pulumi.set(__self__, "defined_tags", defined_tags)
if description and not isinstance(description, str):
raise TypeError("Expected argument 'description' to be a str")
pulumi.set(__self__, "description", description)
if display_name and not isinstance(display_name, str):
raise TypeError("Expected argument 'display_name' to be a str")
pulumi.set(__self__, "display_name", display_name)
if federated_user_id and not isinstance(federated_user_id, str):
raise TypeError("Expected argument 'federated_user_id' to be a str")
pulumi.set(__self__, "federated_user_id", federated_user_id)
if freeform_tags and not isinstance(freeform_tags, dict):
raise TypeError("Expected argument 'freeform_tags' to be a dict")
pulumi.set(__self__, "freeform_tags", freeform_tags)
if host_ocpu_utilization_infos and not isinstance(host_ocpu_utilization_infos, list):
raise TypeError("Expected argument 'host_ocpu_utilization_infos' to be a list")
pulumi.set(__self__, "host_ocpu_utilization_infos", host_ocpu_utilization_infos)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if idcs_access_token and not isinstance(idcs_access_token, str):
raise TypeError("Expected argument 'idcs_access_token' to be a str")
pulumi.set(__self__, "idcs_access_token", idcs_access_token)
if is_byol and not isinstance(is_byol, bool):
raise TypeError("Expected argument 'is_byol' to be a bool")
pulumi.set(__self__, "is_byol", is_byol)
if is_multi_ad and not isinstance(is_multi_ad, bool):
raise TypeError("Expected argument 'is_multi_ad' to be a bool")
pulumi.set(__self__, "is_multi_ad", is_multi_ad)
if lifecycle_details and not isinstance(lifecycle_details, str):
raise TypeError("Expected argument 'lifecycle_details' to be a str")
pulumi.set(__self__, "lifecycle_details", lifecycle_details)
if load_balancer_shape and not isinstance(load_balancer_shape, str):
raise TypeError("Expected argument 'load_balancer_shape' to be a str")
pulumi.set(__self__, "load_balancer_shape", load_balancer_shape)
if platform_role and not isinstance(platform_role, str):
raise TypeError("Expected argument 'platform_role' to be a str")
pulumi.set(__self__, "platform_role", platform_role)
if platform_shape_type and not isinstance(platform_shape_type, str):
raise TypeError("Expected argument 'platform_shape_type' to be a str")
pulumi.set(__self__, "platform_shape_type", platform_shape_type)
if replicas and not isinstance(replicas, dict):
raise TypeError("Expected argument 'replicas' to be a dict")
pulumi.set(__self__, "replicas", replicas)
if service_endpoint and not isinstance(service_endpoint, str):
raise TypeError("Expected argument 'service_endpoint' to be a str")
pulumi.set(__self__, "service_endpoint", service_endpoint)
if service_version and not isinstance(service_version, str):
raise TypeError("Expected argument 'service_version' to be a str")
pulumi.set(__self__, "service_version", service_version)
if state and not isinstance(state, str):
raise TypeError("Expected argument 'state' to be a str")
pulumi.set(__self__, "state", state)
if storage_size_in_tbs and not isinstance(storage_size_in_tbs, float):
raise TypeError("Expected argument 'storage_size_in_tbs' to be a float")
pulumi.set(__self__, "storage_size_in_tbs", storage_size_in_tbs)
if storage_used_in_tbs and not isinstance(storage_used_in_tbs, float):
raise TypeError("Expected argument 'storage_used_in_tbs' to be a float")
pulumi.set(__self__, "storage_used_in_tbs", storage_used_in_tbs)
if time_created and not isinstance(time_created, str):
raise TypeError("Expected argument 'time_created' to be a str")
pulumi.set(__self__, "time_created", time_created)
if time_updated and not isinstance(time_updated, str):
raise TypeError("Expected argument 'time_updated' to be a str")
pulumi.set(__self__, "time_updated", time_updated)
if total_ocpu_capacity and not isinstance(total_ocpu_capacity, int):
raise TypeError("Expected argument 'total_ocpu_capacity' to be a int")
pulumi.set(__self__, "total_ocpu_capacity", total_ocpu_capacity)
@property
@pulumi.getter(name="blockchainPlatformId")
def blockchain_platform_id(self) -> str:
return pulumi.get(self, "blockchain_platform_id")
@property
@pulumi.getter(name="caCertArchiveText")
def ca_cert_archive_text(self) -> str:
return pulumi.get(self, "ca_cert_archive_text")
@property
@pulumi.getter(name="compartmentId")
def compartment_id(self) -> str:
"""
Compartment Identifier
"""
return pulumi.get(self, "compartment_id")
@property
@pulumi.getter(name="componentDetails")
def component_details(self) -> 'outputs.GetBlockchainPlatformComponentDetailsResult':
"""
Blockchain Platform component details.
"""
return pulumi.get(self, "component_details")
@property
@pulumi.getter(name="computeShape")
def compute_shape(self) -> str:
"""
Compute shape - STANDARD or ENTERPRISE_SMALL or ENTERPRISE_MEDIUM or ENTERPRISE_LARGE or ENTERPRISE_EXTRA_LARGE or ENTERPRISE_CUSTOM
"""
return pulumi.get(self, "compute_shape")
@property
@pulumi.getter(name="definedTags")
def defined_tags(self) -> Mapping[str, Any]:
"""
Defined tags for this resource. Each key is predefined and scoped to a namespace. Example: `{"foo-namespace.bar-key": "value"}`
"""
return pulumi.get(self, "defined_tags")
@property
@pulumi.getter
def description(self) -> str:
"""
Platform Instance Description
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> str:
"""
Platform Instance Display name, can be renamed
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="federatedUserId")
def federated_user_id(self) -> str:
return pulumi.get(self, "federated_user_id")
@property
@pulumi.getter(name="freeformTags")
def freeform_tags(self) -> Mapping[str, Any]:
"""
Simple key-value pair that is applied without any predefined name, type or scope. Exists for cross-compatibility only. Example: `{"bar-key": "value"}`
"""
return pulumi.get(self, "freeform_tags")
@property
@pulumi.getter(name="hostOcpuUtilizationInfos")
def host_ocpu_utilization_infos(self) -> Sequence['outputs.GetBlockchainPlatformHostOcpuUtilizationInfoResult']:
"""
List of OcpuUtilization for all hosts
"""
return pulumi.get(self, "host_ocpu_utilization_infos")
@property
@pulumi.getter
def id(self) -> str:
"""
unique identifier that is immutable on creation
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="idcsAccessToken")
def idcs_access_token(self) -> str:
return pulumi.get(self, "idcs_access_token")
@property
@pulumi.getter(name="isByol")
def is_byol(self) -> bool:
"""
Bring your own license
"""
return pulumi.get(self, "is_byol")
@property
@pulumi.getter(name="isMultiAd")
def is_multi_ad(self) -> bool:
"""
True for multi-AD blockchain plaforms, false for single-AD
"""
return pulumi.get(self, "is_multi_ad")
@property
@pulumi.getter(name="lifecycleDetails")
def lifecycle_details(self) -> str:
"""
An message describing the current state in more detail. For example, can be used to provide actionable information for a resource in Failed state.
"""
return pulumi.get(self, "lifecycle_details")
@property
@pulumi.getter(name="loadBalancerShape")
def load_balancer_shape(self) -> str:
"""
Type of Load Balancer shape - LB_100_MBPS or LB_400_MBPS. Default is LB_100_MBPS.
"""
return pulumi.get(self, "load_balancer_shape")
@property
@pulumi.getter(name="platformRole")
def platform_role(self) -> str:
"""
Role of platform - FOUNDER or PARTICIPANT
"""
return pulumi.get(self, "platform_role")
@property
@pulumi.getter(name="platformShapeType")
def platform_shape_type(self) -> str:
"""
Type of Platform shape - DEFAULT or CUSTOM
"""
return pulumi.get(self, "platform_shape_type")
@property
@pulumi.getter
def replicas(self) -> 'outputs.GetBlockchainPlatformReplicasResult':
"""
Number of replicas of service components like Rest Proxy, CA and Console
"""
return pulumi.get(self, "replicas")
@property
@pulumi.getter(name="serviceEndpoint")
def service_endpoint(self) -> str:
"""
Service endpoint URL, valid post-provisioning
"""
return pulumi.get(self, "service_endpoint")
@property
@pulumi.getter(name="serviceVersion")
def service_version(self) -> str:
"""
The version of the Platform Instance.
"""
return pulumi.get(self, "service_version")
@property
@pulumi.getter
def state(self) -> str:
"""
The current state of the Platform Instance.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="storageSizeInTbs")
def storage_size_in_tbs(self) -> float:
"""
Storage size in TBs
"""
return pulumi.get(self, "storage_size_in_tbs")
@property
@pulumi.getter(name="storageUsedInTbs")
def storage_used_in_tbs(self) -> float:
"""
Storage used in TBs
"""
return pulumi.get(self, "storage_used_in_tbs")
@property
@pulumi.getter(name="timeCreated")
def time_created(self) -> str:
"""
The time the the Platform Instance was created. An RFC3339 formatted datetime string
"""
return pulumi.get(self, "time_created")
@property
@pulumi.getter(name="timeUpdated")
def time_updated(self) -> str:
"""
The time the Platform Instance was updated. An RFC3339 formatted datetime string
"""
return pulumi.get(self, "time_updated")
@property
@pulumi.getter(name="totalOcpuCapacity")
def total_ocpu_capacity(self) -> int:
"""
Number of total OCPUs allocated to the platform cluster
"""
return pulumi.get(self, "total_ocpu_capacity")
class AwaitableGetBlockchainPlatformResult(GetBlockchainPlatformResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetBlockchainPlatformResult(
blockchain_platform_id=self.blockchain_platform_id,
ca_cert_archive_text=self.ca_cert_archive_text,
compartment_id=self.compartment_id,
component_details=self.component_details,
compute_shape=self.compute_shape,
defined_tags=self.defined_tags,
description=self.description,
display_name=self.display_name,
federated_user_id=self.federated_user_id,
freeform_tags=self.freeform_tags,
host_ocpu_utilization_infos=self.host_ocpu_utilization_infos,
id=self.id,
idcs_access_token=self.idcs_access_token,
is_byol=self.is_byol,
is_multi_ad=self.is_multi_ad,
lifecycle_details=self.lifecycle_details,
load_balancer_shape=self.load_balancer_shape,
platform_role=self.platform_role,
platform_shape_type=self.platform_shape_type,
replicas=self.replicas,
service_endpoint=self.service_endpoint,
service_version=self.service_version,
state=self.state,
storage_size_in_tbs=self.storage_size_in_tbs,
storage_used_in_tbs=self.storage_used_in_tbs,
time_created=self.time_created,
time_updated=self.time_updated,
total_ocpu_capacity=self.total_ocpu_capacity)
def get_blockchain_platform(blockchain_platform_id: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetBlockchainPlatformResult:
"""
This data source provides details about a specific Blockchain Platform resource in Oracle Cloud Infrastructure Blockchain service.
Gets information about a Blockchain Platform identified by the specific id
## Example Usage
```python
import pulumi
import pulumi_oci as oci
test_blockchain_platform = oci.blockchain.get_blockchain_platform(blockchain_platform_id=oci_blockchain_blockchain_platform["test_blockchain_platform"]["id"])
```
:param str blockchain_platform_id: Unique service identifier.
"""
__args__ = dict()
__args__['blockchainPlatformId'] = blockchain_platform_id
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('oci:blockchain/getBlockchainPlatform:getBlockchainPlatform', __args__, opts=opts, typ=GetBlockchainPlatformResult).value
return AwaitableGetBlockchainPlatformResult(
blockchain_platform_id=__ret__.blockchain_platform_id,
ca_cert_archive_text=__ret__.ca_cert_archive_text,
compartment_id=__ret__.compartment_id,
component_details=__ret__.component_details,
compute_shape=__ret__.compute_shape,
defined_tags=__ret__.defined_tags,
description=__ret__.description,
display_name=__ret__.display_name,
federated_user_id=__ret__.federated_user_id,
freeform_tags=__ret__.freeform_tags,
host_ocpu_utilization_infos=__ret__.host_ocpu_utilization_infos,
id=__ret__.id,
idcs_access_token=__ret__.idcs_access_token,
is_byol=__ret__.is_byol,
is_multi_ad=__ret__.is_multi_ad,
lifecycle_details=__ret__.lifecycle_details,
load_balancer_shape=__ret__.load_balancer_shape,
platform_role=__ret__.platform_role,
platform_shape_type=__ret__.platform_shape_type,
replicas=__ret__.replicas,
service_endpoint=__ret__.service_endpoint,
service_version=__ret__.service_version,
state=__ret__.state,
storage_size_in_tbs=__ret__.storage_size_in_tbs,
storage_used_in_tbs=__ret__.storage_used_in_tbs,
time_created=__ret__.time_created,
time_updated=__ret__.time_updated,
total_ocpu_capacity=__ret__.total_ocpu_capacity)
| [
"pulumi.get",
"pulumi.getter",
"pulumi.set",
"pulumi.InvokeOptions",
"pulumi.runtime.invoke"
] | [((7191, 7233), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""blockchainPlatformId"""'}), "(name='blockchainPlatformId')\n", (7204, 7233), False, 'import pulumi\n'), ((7357, 7396), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""caCertArchiveText"""'}), "(name='caCertArchiveText')\n", (7370, 7396), False, 'import pulumi\n'), ((7516, 7551), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""compartmentId"""'}), "(name='compartmentId')\n", (7529, 7551), False, 'import pulumi\n'), ((7714, 7752), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""componentDetails"""'}), "(name='componentDetails')\n", (7727, 7752), False, 'import pulumi\n'), ((7987, 8021), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""computeShape"""'}), "(name='computeShape')\n", (8000, 8021), False, 'import pulumi\n'), ((8292, 8325), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""definedTags"""'}), "(name='definedTags')\n", (8305, 8325), False, 'import pulumi\n'), ((8780, 8813), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""displayName"""'}), "(name='displayName')\n", (8793, 8813), False, 'import pulumi\n'), ((8996, 9033), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""federatedUserId"""'}), "(name='federatedUserId')\n", (9009, 9033), False, 'import pulumi\n'), ((9147, 9181), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""freeformTags"""'}), "(name='freeformTags')\n", (9160, 9181), False, 'import pulumi\n'), ((9484, 9530), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""hostOcpuUtilizationInfos"""'}), "(name='hostOcpuUtilizationInfos')\n", (9497, 9530), False, 'import pulumi\n'), ((9978, 10015), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""idcsAccessToken"""'}), "(name='idcsAccessToken')\n", (9991, 10015), False, 'import pulumi\n'), ((10129, 10157), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""isByol"""'}), "(name='isByol')\n", (10142, 10157), False, 'import pulumi\n'), ((10307, 10338), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""isMultiAd"""'}), "(name='isMultiAd')\n", (10320, 10338), False, 'import pulumi\n'), ((10532, 10570), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""lifecycleDetails"""'}), "(name='lifecycleDetails')\n", (10545, 10570), False, 'import pulumi\n'), ((10863, 10902), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""loadBalancerShape"""'}), "(name='loadBalancerShape')\n", (10876, 10902), False, 'import pulumi\n'), ((11134, 11168), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""platformRole"""'}), "(name='platformRole')\n", (11147, 11168), False, 'import pulumi\n'), ((11348, 11387), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""platformShapeType"""'}), "(name='platformShapeType')\n", (11361, 11387), False, 'import pulumi\n'), ((11836, 11873), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""serviceEndpoint"""'}), "(name='serviceEndpoint')\n", (11849, 11873), False, 'import pulumi\n'), ((12063, 12099), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""serviceVersion"""'}), "(name='serviceVersion')\n", (12076, 12099), False, 'import pulumi\n'), ((12458, 12496), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""storageSizeInTbs"""'}), "(name='storageSizeInTbs')\n", (12471, 12496), False, 'import pulumi\n'), ((12668, 12706), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""storageUsedInTbs"""'}), "(name='storageUsedInTbs')\n", (12681, 12706), False, 'import pulumi\n'), ((12878, 12911), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""timeCreated"""'}), "(name='timeCreated')\n", (12891, 12911), False, 'import pulumi\n'), ((13132, 13165), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""timeUpdated"""'}), "(name='timeUpdated')\n", (13145, 13165), False, 'import pulumi\n'), ((13382, 13421), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""totalOcpuCapacity"""'}), "(name='totalOcpuCapacity')\n", (13395, 13421), False, 'import pulumi\n'), ((1422, 1492), 'pulumi.set', 'pulumi.set', (['__self__', '"""blockchain_platform_id"""', 'blockchain_platform_id'], {}), "(__self__, 'blockchain_platform_id', blockchain_platform_id)\n", (1432, 1492), False, 'import pulumi\n'), ((1664, 1730), 'pulumi.set', 'pulumi.set', (['__self__', '"""ca_cert_archive_text"""', 'ca_cert_archive_text'], {}), "(__self__, 'ca_cert_archive_text', ca_cert_archive_text)\n", (1674, 1730), False, 'import pulumi\n'), ((1884, 1938), 'pulumi.set', 'pulumi.set', (['__self__', '"""compartment_id"""', 'compartment_id'], {}), "(__self__, 'compartment_id', compartment_id)\n", (1894, 1938), False, 'import pulumi\n'), ((2103, 2163), 'pulumi.set', 'pulumi.set', (['__self__', '"""component_details"""', 'component_details'], {}), "(__self__, 'component_details', component_details)\n", (2113, 2163), False, 'import pulumi\n'), ((2314, 2366), 'pulumi.set', 'pulumi.set', (['__self__', '"""compute_shape"""', 'compute_shape'], {}), "(__self__, 'compute_shape', compute_shape)\n", (2324, 2366), False, 'import pulumi\n'), ((2516, 2566), 'pulumi.set', 'pulumi.set', (['__self__', '"""defined_tags"""', 'defined_tags'], {}), "(__self__, 'defined_tags', defined_tags)\n", (2526, 2566), False, 'import pulumi\n'), ((2711, 2759), 'pulumi.set', 'pulumi.set', (['__self__', '"""description"""', 'description'], {}), "(__self__, 'description', description)\n", (2721, 2759), False, 'import pulumi\n'), ((2907, 2957), 'pulumi.set', 'pulumi.set', (['__self__', '"""display_name"""', 'display_name'], {}), "(__self__, 'display_name', display_name)\n", (2917, 2957), False, 'import pulumi\n'), ((3120, 3180), 'pulumi.set', 'pulumi.set', (['__self__', '"""federated_user_id"""', 'federated_user_id'], {}), "(__self__, 'federated_user_id', federated_user_id)\n", (3130, 3180), False, 'import pulumi\n'), ((3333, 3385), 'pulumi.set', 'pulumi.set', (['__self__', '"""freeform_tags"""', 'freeform_tags'], {}), "(__self__, 'freeform_tags', freeform_tags)\n", (3343, 3385), False, 'import pulumi\n'), ((3580, 3665), 'pulumi.set', 'pulumi.set', (['__self__', '"""host_ocpu_utilization_infos"""', 'host_ocpu_utilization_infos'], {}), "(__self__, 'host_ocpu_utilization_infos', host_ocpu_utilization_infos\n )\n", (3590, 3665), False, 'import pulumi\n'), ((3778, 3808), 'pulumi.set', 'pulumi.set', (['__self__', '"""id"""', 'id'], {}), "(__self__, 'id', id)\n", (3788, 3808), False, 'import pulumi\n'), ((3971, 4031), 'pulumi.set', 'pulumi.set', (['__self__', '"""idcs_access_token"""', 'idcs_access_token'], {}), "(__self__, 'idcs_access_token', idcs_access_token)\n", (3981, 4031), False, 'import pulumi\n'), ((4166, 4206), 'pulumi.set', 'pulumi.set', (['__self__', '"""is_byol"""', 'is_byol'], {}), "(__self__, 'is_byol', is_byol)\n", (4176, 4206), False, 'import pulumi\n'), ((4353, 4401), 'pulumi.set', 'pulumi.set', (['__self__', '"""is_multi_ad"""', 'is_multi_ad'], {}), "(__self__, 'is_multi_ad', is_multi_ad)\n", (4363, 4401), False, 'import pulumi\n'), ((4564, 4624), 'pulumi.set', 'pulumi.set', (['__self__', '"""lifecycle_details"""', 'lifecycle_details'], {}), "(__self__, 'lifecycle_details', lifecycle_details)\n", (4574, 4624), False, 'import pulumi\n'), ((4793, 4857), 'pulumi.set', 'pulumi.set', (['__self__', '"""load_balancer_shape"""', 'load_balancer_shape'], {}), "(__self__, 'load_balancer_shape', load_balancer_shape)\n", (4803, 4857), False, 'import pulumi\n'), ((5008, 5060), 'pulumi.set', 'pulumi.set', (['__self__', '"""platform_role"""', 'platform_role'], {}), "(__self__, 'platform_role', platform_role)\n", (5018, 5060), False, 'import pulumi\n'), ((5229, 5293), 'pulumi.set', 'pulumi.set', (['__self__', '"""platform_shape_type"""', 'platform_shape_type'], {}), "(__self__, 'platform_shape_type', platform_shape_type)\n", (5239, 5293), False, 'import pulumi\n'), ((5431, 5473), 'pulumi.set', 'pulumi.set', (['__self__', '"""replicas"""', 'replicas'], {}), "(__self__, 'replicas', replicas)\n", (5441, 5473), False, 'import pulumi\n'), ((5633, 5691), 'pulumi.set', 'pulumi.set', (['__self__', '"""service_endpoint"""', 'service_endpoint'], {}), "(__self__, 'service_endpoint', service_endpoint)\n", (5643, 5691), False, 'import pulumi\n'), ((5848, 5904), 'pulumi.set', 'pulumi.set', (['__self__', '"""service_version"""', 'service_version'], {}), "(__self__, 'service_version', service_version)\n", (5858, 5904), False, 'import pulumi\n'), ((6031, 6067), 'pulumi.set', 'pulumi.set', (['__self__', '"""state"""', 'state'], {}), "(__self__, 'state', state)\n", (6041, 6067), False, 'import pulumi\n'), ((6240, 6304), 'pulumi.set', 'pulumi.set', (['__self__', '"""storage_size_in_tbs"""', 'storage_size_in_tbs'], {}), "(__self__, 'storage_size_in_tbs', storage_size_in_tbs)\n", (6250, 6304), False, 'import pulumi\n'), ((6477, 6541), 'pulumi.set', 'pulumi.set', (['__self__', '"""storage_used_in_tbs"""', 'storage_used_in_tbs'], {}), "(__self__, 'storage_used_in_tbs', storage_used_in_tbs)\n", (6487, 6541), False, 'import pulumi\n'), ((6689, 6739), 'pulumi.set', 'pulumi.set', (['__self__', '"""time_created"""', 'time_created'], {}), "(__self__, 'time_created', time_created)\n", (6699, 6739), False, 'import pulumi\n'), ((6887, 6937), 'pulumi.set', 'pulumi.set', (['__self__', '"""time_updated"""', 'time_updated'], {}), "(__self__, 'time_updated', time_updated)\n", (6897, 6937), False, 'import pulumi\n'), ((7106, 7170), 'pulumi.set', 'pulumi.set', (['__self__', '"""total_ocpu_capacity"""', 'total_ocpu_capacity'], {}), "(__self__, 'total_ocpu_capacity', total_ocpu_capacity)\n", (7116, 7170), False, 'import pulumi\n'), ((7294, 7336), 'pulumi.get', 'pulumi.get', (['self', '"""blockchain_platform_id"""'], {}), "(self, 'blockchain_platform_id')\n", (7304, 7336), False, 'import pulumi\n'), ((7455, 7495), 'pulumi.get', 'pulumi.get', (['self', '"""ca_cert_archive_text"""'], {}), "(self, 'ca_cert_archive_text')\n", (7465, 7495), False, 'import pulumi\n'), ((7659, 7693), 'pulumi.get', 'pulumi.get', (['self', '"""compartment_id"""'], {}), "(self, 'compartment_id')\n", (7669, 7693), False, 'import pulumi\n'), ((7929, 7966), 'pulumi.get', 'pulumi.get', (['self', '"""component_details"""'], {}), "(self, 'component_details')\n", (7939, 7966), False, 'import pulumi\n'), ((8238, 8271), 'pulumi.get', 'pulumi.get', (['self', '"""compute_shape"""'], {}), "(self, 'compute_shape')\n", (8248, 8271), False, 'import pulumi\n'), ((8550, 8582), 'pulumi.get', 'pulumi.get', (['self', '"""defined_tags"""'], {}), "(self, 'defined_tags')\n", (8560, 8582), False, 'import pulumi\n'), ((8728, 8759), 'pulumi.get', 'pulumi.get', (['self', '"""description"""'], {}), "(self, 'description')\n", (8738, 8759), False, 'import pulumi\n'), ((8943, 8975), 'pulumi.get', 'pulumi.get', (['self', '"""display_name"""'], {}), "(self, 'display_name')\n", (8953, 8975), False, 'import pulumi\n'), ((9089, 9126), 'pulumi.get', 'pulumi.get', (['self', '"""federated_user_id"""'], {}), "(self, 'federated_user_id')\n", (9099, 9126), False, 'import pulumi\n'), ((9430, 9463), 'pulumi.get', 'pulumi.get', (['self', '"""freeform_tags"""'], {}), "(self, 'freeform_tags')\n", (9440, 9463), False, 'import pulumi\n'), ((9733, 9780), 'pulumi.get', 'pulumi.get', (['self', '"""host_ocpu_utilization_infos"""'], {}), "(self, 'host_ocpu_utilization_infos')\n", (9743, 9780), False, 'import pulumi\n'), ((9935, 9957), 'pulumi.get', 'pulumi.get', (['self', '"""id"""'], {}), "(self, 'id')\n", (9945, 9957), False, 'import pulumi\n'), ((10071, 10108), 'pulumi.get', 'pulumi.get', (['self', '"""idcs_access_token"""'], {}), "(self, 'idcs_access_token')\n", (10081, 10108), False, 'import pulumi\n'), ((10259, 10286), 'pulumi.get', 'pulumi.get', (['self', '"""is_byol"""'], {}), "(self, 'is_byol')\n", (10269, 10286), False, 'import pulumi\n'), ((10480, 10511), 'pulumi.get', 'pulumi.get', (['self', '"""is_multi_ad"""'], {}), "(self, 'is_multi_ad')\n", (10490, 10511), False, 'import pulumi\n'), ((10805, 10842), 'pulumi.get', 'pulumi.get', (['self', '"""lifecycle_details"""'], {}), "(self, 'lifecycle_details')\n", (10815, 10842), False, 'import pulumi\n'), ((11074, 11113), 'pulumi.get', 'pulumi.get', (['self', '"""load_balancer_shape"""'], {}), "(self, 'load_balancer_shape')\n", (11084, 11113), False, 'import pulumi\n'), ((11294, 11327), 'pulumi.get', 'pulumi.get', (['self', '"""platform_role"""'], {}), "(self, 'platform_role')\n", (11304, 11327), False, 'import pulumi\n'), ((11520, 11559), 'pulumi.get', 'pulumi.get', (['self', '"""platform_shape_type"""'], {}), "(self, 'platform_shape_type')\n", (11530, 11559), False, 'import pulumi\n'), ((11787, 11815), 'pulumi.get', 'pulumi.get', (['self', '"""replicas"""'], {}), "(self, 'replicas')\n", (11797, 11815), False, 'import pulumi\n'), ((12006, 12042), 'pulumi.get', 'pulumi.get', (['self', '"""service_endpoint"""'], {}), "(self, 'service_endpoint')\n", (12016, 12042), False, 'import pulumi\n'), ((12223, 12258), 'pulumi.get', 'pulumi.get', (['self', '"""service_version"""'], {}), "(self, 'service_version')\n", (12233, 12258), False, 'import pulumi\n'), ((12412, 12437), 'pulumi.get', 'pulumi.get', (['self', '"""state"""'], {}), "(self, 'state')\n", (12422, 12437), False, 'import pulumi\n'), ((12608, 12647), 'pulumi.get', 'pulumi.get', (['self', '"""storage_size_in_tbs"""'], {}), "(self, 'storage_size_in_tbs')\n", (12618, 12647), False, 'import pulumi\n'), ((12818, 12857), 'pulumi.get', 'pulumi.get', (['self', '"""storage_used_in_tbs"""'], {}), "(self, 'storage_used_in_tbs')\n", (12828, 12857), False, 'import pulumi\n'), ((13079, 13111), 'pulumi.get', 'pulumi.get', (['self', '"""time_created"""'], {}), "(self, 'time_created')\n", (13089, 13111), False, 'import pulumi\n'), ((13329, 13361), 'pulumi.get', 'pulumi.get', (['self', '"""time_updated"""'], {}), "(self, 'time_updated')\n", (13339, 13361), False, 'import pulumi\n'), ((13567, 13606), 'pulumi.get', 'pulumi.get', (['self', '"""total_ocpu_capacity"""'], {}), "(self, 'total_ocpu_capacity')\n", (13577, 13606), False, 'import pulumi\n'), ((16075, 16097), 'pulumi.InvokeOptions', 'pulumi.InvokeOptions', ([], {}), '()\n', (16095, 16097), False, 'import pulumi\n'), ((16189, 16335), 'pulumi.runtime.invoke', 'pulumi.runtime.invoke', (['"""oci:blockchain/getBlockchainPlatform:getBlockchainPlatform"""', '__args__'], {'opts': 'opts', 'typ': 'GetBlockchainPlatformResult'}), "(\n 'oci:blockchain/getBlockchainPlatform:getBlockchainPlatform', __args__,\n opts=opts, typ=GetBlockchainPlatformResult)\n", (16210, 16335), False, 'import pulumi\n')] |
"""Test of Funsies python functions capabilities."""
# std
from io import BytesIO
from typing import Dict
# funsies
from funsies import _pyfunc as p
from funsies._constants import Encoding
def capitalize(inputs: Dict[str, bytes]) -> Dict[str, bytes]:
"""Capitalize artifacts."""
out = {}
for key, val in inputs.items():
out[key] = val.decode().upper().encode()
return out
def capitalize2(inputs: Dict[str, bytes]) -> Dict[str, str]:
"""Capitalize artifacts."""
out = {}
for key, val in inputs.items():
out[key] = val.decode().upper()
return out
def test_fun_wrap() -> None:
"""Test the instantiation of a Funsie class."""
out = p.python_funsie(
capitalize, inputs={"in": Encoding.blob}, outputs={"in": Encoding.blob}
)
assert out is not None
def test_fun_run() -> None:
"""Test running python function."""
cmd = p.python_funsie(
capitalize, inputs={"in": Encoding.blob}, outputs={"in": Encoding.blob}
)
inp = {"in": BytesIO(b"bla bla bla")}
out = p.run_python_funsie(cmd, inp)
assert out["in"] == b"BLA BLA BLA"
def test_fun_run_json() -> None:
"""Test running python function that outputs a JSON."""
cmd = p.python_funsie(
capitalize2, inputs={"in": Encoding.blob}, outputs={"in": Encoding.json}
)
inp = {"in": BytesIO(b"bla bla bla")}
out = p.run_python_funsie(cmd, inp)
assert out["in"] == "BLA BLA BLA"
| [
"io.BytesIO",
"funsies._pyfunc.run_python_funsie",
"funsies._pyfunc.python_funsie"
] | [((691, 783), 'funsies._pyfunc.python_funsie', 'p.python_funsie', (['capitalize'], {'inputs': "{'in': Encoding.blob}", 'outputs': "{'in': Encoding.blob}"}), "(capitalize, inputs={'in': Encoding.blob}, outputs={'in':\n Encoding.blob})\n", (706, 783), True, 'from funsies import _pyfunc as p\n'), ((901, 993), 'funsies._pyfunc.python_funsie', 'p.python_funsie', (['capitalize'], {'inputs': "{'in': Encoding.blob}", 'outputs': "{'in': Encoding.blob}"}), "(capitalize, inputs={'in': Encoding.blob}, outputs={'in':\n Encoding.blob})\n", (916, 993), True, 'from funsies import _pyfunc as p\n'), ((1056, 1085), 'funsies._pyfunc.run_python_funsie', 'p.run_python_funsie', (['cmd', 'inp'], {}), '(cmd, inp)\n', (1075, 1085), True, 'from funsies import _pyfunc as p\n'), ((1230, 1323), 'funsies._pyfunc.python_funsie', 'p.python_funsie', (['capitalize2'], {'inputs': "{'in': Encoding.blob}", 'outputs': "{'in': Encoding.json}"}), "(capitalize2, inputs={'in': Encoding.blob}, outputs={'in':\n Encoding.json})\n", (1245, 1323), True, 'from funsies import _pyfunc as p\n'), ((1386, 1415), 'funsies._pyfunc.run_python_funsie', 'p.run_python_funsie', (['cmd', 'inp'], {}), '(cmd, inp)\n', (1405, 1415), True, 'from funsies import _pyfunc as p\n'), ((1021, 1044), 'io.BytesIO', 'BytesIO', (["b'bla bla bla'"], {}), "(b'bla bla bla')\n", (1028, 1044), False, 'from io import BytesIO\n'), ((1351, 1374), 'io.BytesIO', 'BytesIO', (["b'bla bla bla'"], {}), "(b'bla bla bla')\n", (1358, 1374), False, 'from io import BytesIO\n')] |
"""Recompute BERT predictions on UKP dev/test without topic information."""
import os
import csv
import itertools
from tqdm import tqdm
from pytorch_pretrained_bert.tokenization import BertTokenizer
import torch
from torch.utils.data import TensorDataset, DataLoader, SequentialSampler
from train import InputExample, convert_examples_to_features
from SigmoidBERT import SigmoidBERT
def inference(bert_output, test_file, eval_batch_size=32):
"""Perform inference."""
# Import fine-tuned BERT model
max_seq_length = 64
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
tokenizer = BertTokenizer.from_pretrained(bert_output, do_lower_case=True)
model = SigmoidBERT.from_pretrained(bert_output)
model.to(device)
model.eval()
# Import test data
test_sentences = set()
with open(test_file, 'r') as csvfile:
csvreader = csv.reader(csvfile, delimiter='\t', quotechar=None)
for splits in csvreader:
splits = map(str.strip, splits)
__, sentence_a, sentence_b, __ = splits
test_sentences.add(sentence_a)
test_sentences.add(sentence_b)
comb_iter = itertools.combinations(test_sentences, 2)
input_examples = []
output_examples = []
for sentence_a, sentence_b in comb_iter:
input_examples.append(InputExample(text_a=sentence_a,
text_b=sentence_b,
label=-1))
output_examples.append([sentence_a, sentence_b, -1])
eval_features = convert_examples_to_features(input_examples, max_seq_length, tokenizer)
all_input_ids = torch.tensor([f.input_ids for f in eval_features], dtype=torch.long)
all_input_mask = torch.tensor([f.input_mask for f in eval_features], dtype=torch.long)
all_segment_ids = torch.tensor([f.segment_ids for f in eval_features], dtype=torch.long)
eval_data = TensorDataset(all_input_ids, all_input_mask, all_segment_ids)
eval_sampler = SequentialSampler(eval_data)
eval_dataloader = DataLoader(eval_data, sampler=eval_sampler, batch_size=eval_batch_size)
# Inference
predicted_logits = []
with torch.no_grad():
for input_ids, input_mask, segment_ids in tqdm(eval_dataloader, desc="Batch"):
input_ids = input_ids.to(device)
input_mask = input_mask.to(device)
segment_ids = segment_ids.to(device)
logits = model(input_ids, segment_ids, input_mask)
logits = logits.detach().cpu().numpy()
predicted_logits.extend(logits[:, 0])
for idx, logit in enumerate(predicted_logits):
output_examples[idx].append(logit)
# Export results
eval_mode = os.path.basename(test_file).split(".")[0]
output_pred_file = os.path.join(bert_output,
f"{eval_mode}_predictions_epoch_3_no_topic_info.tsv")
with open(output_pred_file, "w") as writer:
for idx, example in enumerate(output_examples):
sentence_a, sentence_b, gold_label, pred_logit = example
writer.write("\t".join([sentence_a.replace("\n", " ").replace("\t", " "),
sentence_b.replace("\n", " ").replace("\t", " "),
str(gold_label), str(pred_logit)]))
writer.write("\n")
| [
"tqdm.tqdm",
"csv.reader",
"train.convert_examples_to_features",
"torch.utils.data.DataLoader",
"os.path.basename",
"pytorch_pretrained_bert.tokenization.BertTokenizer.from_pretrained",
"itertools.combinations",
"train.InputExample",
"SigmoidBERT.SigmoidBERT.from_pretrained",
"torch.utils.data.Ten... | [((623, 685), 'pytorch_pretrained_bert.tokenization.BertTokenizer.from_pretrained', 'BertTokenizer.from_pretrained', (['bert_output'], {'do_lower_case': '(True)'}), '(bert_output, do_lower_case=True)\n', (652, 685), False, 'from pytorch_pretrained_bert.tokenization import BertTokenizer\n'), ((698, 738), 'SigmoidBERT.SigmoidBERT.from_pretrained', 'SigmoidBERT.from_pretrained', (['bert_output'], {}), '(bert_output)\n', (725, 738), False, 'from SigmoidBERT import SigmoidBERT\n'), ((1173, 1214), 'itertools.combinations', 'itertools.combinations', (['test_sentences', '(2)'], {}), '(test_sentences, 2)\n', (1195, 1214), False, 'import itertools\n'), ((1570, 1641), 'train.convert_examples_to_features', 'convert_examples_to_features', (['input_examples', 'max_seq_length', 'tokenizer'], {}), '(input_examples, max_seq_length, tokenizer)\n', (1598, 1641), False, 'from train import InputExample, convert_examples_to_features\n'), ((1663, 1731), 'torch.tensor', 'torch.tensor', (['[f.input_ids for f in eval_features]'], {'dtype': 'torch.long'}), '([f.input_ids for f in eval_features], dtype=torch.long)\n', (1675, 1731), False, 'import torch\n'), ((1753, 1822), 'torch.tensor', 'torch.tensor', (['[f.input_mask for f in eval_features]'], {'dtype': 'torch.long'}), '([f.input_mask for f in eval_features], dtype=torch.long)\n', (1765, 1822), False, 'import torch\n'), ((1845, 1915), 'torch.tensor', 'torch.tensor', (['[f.segment_ids for f in eval_features]'], {'dtype': 'torch.long'}), '([f.segment_ids for f in eval_features], dtype=torch.long)\n', (1857, 1915), False, 'import torch\n'), ((1933, 1994), 'torch.utils.data.TensorDataset', 'TensorDataset', (['all_input_ids', 'all_input_mask', 'all_segment_ids'], {}), '(all_input_ids, all_input_mask, all_segment_ids)\n', (1946, 1994), False, 'from torch.utils.data import TensorDataset, DataLoader, SequentialSampler\n'), ((2014, 2042), 'torch.utils.data.SequentialSampler', 'SequentialSampler', (['eval_data'], {}), '(eval_data)\n', (2031, 2042), False, 'from torch.utils.data import TensorDataset, DataLoader, SequentialSampler\n'), ((2065, 2136), 'torch.utils.data.DataLoader', 'DataLoader', (['eval_data'], {'sampler': 'eval_sampler', 'batch_size': 'eval_batch_size'}), '(eval_data, sampler=eval_sampler, batch_size=eval_batch_size)\n', (2075, 2136), False, 'from torch.utils.data import TensorDataset, DataLoader, SequentialSampler\n'), ((2797, 2876), 'os.path.join', 'os.path.join', (['bert_output', 'f"""{eval_mode}_predictions_epoch_3_no_topic_info.tsv"""'], {}), "(bert_output, f'{eval_mode}_predictions_epoch_3_no_topic_info.tsv')\n", (2809, 2876), False, 'import os\n'), ((890, 941), 'csv.reader', 'csv.reader', (['csvfile'], {'delimiter': '"""\t"""', 'quotechar': 'None'}), "(csvfile, delimiter='\\t', quotechar=None)\n", (900, 941), False, 'import csv\n'), ((2189, 2204), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2202, 2204), False, 'import torch\n'), ((2256, 2291), 'tqdm.tqdm', 'tqdm', (['eval_dataloader'], {'desc': '"""Batch"""'}), "(eval_dataloader, desc='Batch')\n", (2260, 2291), False, 'from tqdm import tqdm\n'), ((569, 594), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (592, 594), False, 'import torch\n'), ((1340, 1400), 'train.InputExample', 'InputExample', ([], {'text_a': 'sentence_a', 'text_b': 'sentence_b', 'label': '(-1)'}), '(text_a=sentence_a, text_b=sentence_b, label=-1)\n', (1352, 1400), False, 'from train import InputExample, convert_examples_to_features\n'), ((2732, 2759), 'os.path.basename', 'os.path.basename', (['test_file'], {}), '(test_file)\n', (2748, 2759), False, 'import os\n')] |
# Copyright (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
import re
from ansible.errors import AnsibleFilterError
from ansible_collections.ansible.windows.plugins.filter.quote import quote
def test_invalid_shell_type():
expected = "Invalid shell specified, valid shell are None, 'cmd', or 'powershell'"
with pytest.raises(AnsibleFilterError, match=re.escape(expected)):
quote('abc', shell='fake')
@pytest.mark.parametrize('value, expected', [
# https://docs.microsoft.com/en-us/cpp/c-language/parsing-c-command-line-arguments?view=vs-2019
(['a b c', 'd', 'e'], r'"a b c" d e'),
(['ab"c', '\\', 'd'], r'"ab\"c" \ d'),
([r'a\\\b', 'de fg', 'h'], r'a\\\b "de fg" h'),
([r'a\\b c', 'd', 'e'], r'"a\\b c" d e'),
# http://daviddeley.com/autohotkey/parameters/parameters.htm#WINCREATE
('CallMeIshmael', r'CallMeIshmael'),
('Call Me Ishmael', r'"Call Me Ishmael"'),
('CallMe"Ishmael', r'"CallMe\"Ishmael"'),
('Call Me Ishmael\\', r'"Call Me Ishmael\\"'),
(r'CallMe\"Ishmael', r'"CallMe\\\"Ishmael"'),
(r'a\\\b', r'a\\\b'),
('C:\\TEST A\\', r'"C:\TEST A\\"'),
(r'"C:\TEST A\"', r'"\"C:\TEST A\\\""'),
# Other tests
(['C:\\Program Files\\file\\', 'arg with " quote'], r'"C:\Program Files\file\\" "arg with \" quote"'),
({'key': 'abc'}, r'key=abc'),
({'KEY2': 'a b c'}, r'KEY2="a b c"'),
({'Key3': r'a\\b c \" def "'}, r'Key3="a\\b c \\\" def \""'),
('{"a": ["b", "c' + "'" + ' d", "d\\"e"], "f": "g\\\\\\"g\\\\i\\""}',
'"{\\"a\\": [\\"b\\", \\"c' + "'" + ' d\\", \\"d\\\\\\"e\\"], \\"f\\": \\"g\\\\\\\\\\\\\\"g\\\\i\\\\\\"\\"}"'),
(None, '""'),
('', '""'),
(['', None, ''], '"" "" ""'),
])
def test_quote_c(value, expected):
actual = quote(value)
assert expected == actual
@pytest.mark.parametrize('value, expected', [
('arg1', 'arg1'),
(None, '""'),
('', '""'),
('arg1 and 2', '^"arg1 and 2^"'),
('malicious argument\\"&whoami', '^"malicious argument\\\\^"^&whoami^"'),
('C:\\temp\\some ^%file% > nul', '^"C:\\temp\\some ^^^%file^% ^> nul^"'),
])
def test_quote_cmd(value, expected):
actual = quote(value, shell='cmd')
assert expected == actual
@pytest.mark.parametrize('value, expected', [
('arg1', "'arg1'"),
(None, "''"),
('', "''"),
('Double " quotes', "'Double \" quotes'"),
("Single ' quotes", "'Single '' quotes'"),
("'Multiple '''' single '' quotes '", "'''Multiple '''''''' single '''' quotes '''"),
(u"a'b\u2018c\u2019d\u201ae\u201bf", u"'a''b\u2018\u2018c\u2019\u2019d\u201a\u201ae\u201b\u201bf'")
])
def test_quote_powershell(value, expected):
actual = quote(value, shell='powershell')
assert expected == actual
| [
"pytest.mark.parametrize",
"ansible_collections.ansible.windows.plugins.filter.quote.quote",
"re.escape"
] | [((617, 1710), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""value, expected"""', '[([\'a b c\', \'d\', \'e\'], \'"a b c" d e\'), ([\'ab"c\', \'\\\\\', \'d\'],\n \'"ab\\\\"c" \\\\ d\'), ([\'a\\\\\\\\\\\\b\', \'de fg\', \'h\'], \'a\\\\\\\\\\\\b "de fg" h\'), (\n [\'a\\\\\\\\b c\', \'d\', \'e\'], \'"a\\\\\\\\b c" d e\'), (\'CallMeIshmael\',\n \'CallMeIshmael\'), (\'Call Me Ishmael\', \'"Call Me Ishmael"\'), (\n \'CallMe"Ishmael\', \'"CallMe\\\\"Ishmael"\'), (\'Call Me Ishmael\\\\\',\n \'"Call Me Ishmael\\\\\\\\"\'), (\'CallMe\\\\"Ishmael\', \'"CallMe\\\\\\\\\\\\"Ishmael"\'\n ), (\'a\\\\\\\\\\\\b\', \'a\\\\\\\\\\\\b\'), (\'C:\\\\TEST A\\\\\', \'"C:\\\\TEST A\\\\\\\\"\'), (\n \'"C:\\\\TEST A\\\\"\', \'"\\\\"C:\\\\TEST A\\\\\\\\\\\\""\'), ([\n \'C:\\\\Program Files\\\\file\\\\\', \'arg with " quote\'],\n \'"C:\\\\Program Files\\\\file\\\\\\\\" "arg with \\\\" quote"\'), ({\'key\': \'abc\'},\n \'key=abc\'), ({\'KEY2\': \'a b c\'}, \'KEY2="a b c"\'), ({\'Key3\':\n \'a\\\\\\\\b c \\\\" def "\'}, \'Key3="a\\\\\\\\b c \\\\\\\\\\\\" def \\\\""\'), (\n \'{"a": ["b", "c\' + "\'" + \' d", "d\\\\"e"], "f": "g\\\\\\\\\\\\"g\\\\\\\\i\\\\""}\', \n \'"{\\\\"a\\\\": [\\\\"b\\\\", \\\\"c\' + "\'" +\n \' d\\\\", \\\\"d\\\\\\\\\\\\"e\\\\"], \\\\"f\\\\": \\\\"g\\\\\\\\\\\\\\\\\\\\\\\\\\\\"g\\\\\\\\i\\\\\\\\\\\\"\\\\"}"\'\n ), (None, \'""\'), (\'\', \'""\'), ([\'\', None, \'\'], \'"" "" ""\')]'], {}), '(\'value, expected\', [([\'a b c\', \'d\', \'e\'],\n \'"a b c" d e\'), ([\'ab"c\', \'\\\\\', \'d\'], \'"ab\\\\"c" \\\\ d\'), ([\'a\\\\\\\\\\\\b\',\n \'de fg\', \'h\'], \'a\\\\\\\\\\\\b "de fg" h\'), ([\'a\\\\\\\\b c\', \'d\', \'e\'],\n \'"a\\\\\\\\b c" d e\'), (\'CallMeIshmael\', \'CallMeIshmael\'), (\n \'Call Me Ishmael\', \'"Call Me Ishmael"\'), (\'CallMe"Ishmael\',\n \'"CallMe\\\\"Ishmael"\'), (\'Call Me Ishmael\\\\\', \'"Call Me Ishmael\\\\\\\\"\'),\n (\'CallMe\\\\"Ishmael\', \'"CallMe\\\\\\\\\\\\"Ishmael"\'), (\'a\\\\\\\\\\\\b\', \'a\\\\\\\\\\\\b\'\n ), (\'C:\\\\TEST A\\\\\', \'"C:\\\\TEST A\\\\\\\\"\'), (\'"C:\\\\TEST A\\\\"\',\n \'"\\\\"C:\\\\TEST A\\\\\\\\\\\\""\'), ([\'C:\\\\Program Files\\\\file\\\\\',\n \'arg with " quote\'],\n \'"C:\\\\Program Files\\\\file\\\\\\\\" "arg with \\\\" quote"\'), ({\'key\': \'abc\'},\n \'key=abc\'), ({\'KEY2\': \'a b c\'}, \'KEY2="a b c"\'), ({\'Key3\':\n \'a\\\\\\\\b c \\\\" def "\'}, \'Key3="a\\\\\\\\b c \\\\\\\\\\\\" def \\\\""\'), (\n \'{"a": ["b", "c\' + "\'" + \' d", "d\\\\"e"], "f": "g\\\\\\\\\\\\"g\\\\\\\\i\\\\""}\', \n \'"{\\\\"a\\\\": [\\\\"b\\\\", \\\\"c\' + "\'" +\n \' d\\\\", \\\\"d\\\\\\\\\\\\"e\\\\"], \\\\"f\\\\": \\\\"g\\\\\\\\\\\\\\\\\\\\\\\\\\\\"g\\\\\\\\i\\\\\\\\\\\\"\\\\"}"\'\n ), (None, \'""\'), (\'\', \'""\'), ([\'\', None, \'\'], \'"" "" ""\')])\n', (640, 1710), False, 'import pytest\n'), ((1990, 2273), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""value, expected"""', '[(\'arg1\', \'arg1\'), (None, \'""\'), (\'\', \'""\'), (\'arg1 and 2\',\n \'^"arg1 and 2^"\'), (\'malicious argument\\\\"&whoami\',\n \'^"malicious argument\\\\\\\\^"^&whoami^"\'), (\n \'C:\\\\temp\\\\some ^%file% > nul\', \'^"C:\\\\temp\\\\some ^^^%file^% ^> nul^"\')]'], {}), '(\'value, expected\', [(\'arg1\', \'arg1\'), (None, \'""\'),\n (\'\', \'""\'), (\'arg1 and 2\', \'^"arg1 and 2^"\'), (\n \'malicious argument\\\\"&whoami\', \'^"malicious argument\\\\\\\\^"^&whoami^"\'),\n (\'C:\\\\temp\\\\some ^%file% > nul\', \'^"C:\\\\temp\\\\some ^^^%file^% ^> nul^"\')])\n', (2013, 2273), False, 'import pytest\n'), ((2397, 2724), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""value, expected"""', '[(\'arg1\', "\'arg1\'"), (None, "\'\'"), (\'\', "\'\'"), (\'Double " quotes\',\n \'\\\'Double " quotes\\\'\'), ("Single \' quotes", "\'Single \'\' quotes\'"), (\n "\'Multiple \'\'\'\' single \'\' quotes \'",\n "\'\'\'Multiple \'\'\'\'\'\'\'\' single \'\'\'\' quotes \'\'\'"), (u"a\'b‘c’d‚e‛f",\n u"\'a\'\'b‘‘c’’d‚‚e‛‛f\'")]'], {}), '(\'value, expected\', [(\'arg1\', "\'arg1\'"), (None, "\'\'"\n ), (\'\', "\'\'"), (\'Double " quotes\', \'\\\'Double " quotes\\\'\'), (\n "Single \' quotes", "\'Single \'\' quotes\'"), (\n "\'Multiple \'\'\'\' single \'\' quotes \'",\n "\'\'\'Multiple \'\'\'\'\'\'\'\' single \'\'\'\' quotes \'\'\'"), (u"a\'b‘c’d‚e‛f",\n u"\'a\'\'b‘‘c’’d‚‚e‛‛f\'")])\n', (2420, 2724), False, 'import pytest\n'), ((1944, 1956), 'ansible_collections.ansible.windows.plugins.filter.quote.quote', 'quote', (['value'], {}), '(value)\n', (1949, 1956), False, 'from ansible_collections.ansible.windows.plugins.filter.quote import quote\n'), ((2338, 2363), 'ansible_collections.ansible.windows.plugins.filter.quote.quote', 'quote', (['value'], {'shell': '"""cmd"""'}), "(value, shell='cmd')\n", (2343, 2363), False, 'from ansible_collections.ansible.windows.plugins.filter.quote import quote\n'), ((2848, 2880), 'ansible_collections.ansible.windows.plugins.filter.quote.quote', 'quote', (['value'], {'shell': '"""powershell"""'}), "(value, shell='powershell')\n", (2853, 2880), False, 'from ansible_collections.ansible.windows.plugins.filter.quote import quote\n'), ((587, 613), 'ansible_collections.ansible.windows.plugins.filter.quote.quote', 'quote', (['"""abc"""'], {'shell': '"""fake"""'}), "('abc', shell='fake')\n", (592, 613), False, 'from ansible_collections.ansible.windows.plugins.filter.quote import quote\n'), ((557, 576), 're.escape', 're.escape', (['expected'], {}), '(expected)\n', (566, 576), False, 'import re\n')] |
import os, glob
import numpy as np
import pandas as pd
from multiprocessing import Pool
from PIL import Image
from tqdm import tqdm
from matplotlib.figure import Figure
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
import tkinter as tk
import warnings
warnings.filterwarnings("ignore")
import torch
from torchvision import transforms
from utils import get_next_day, mkdirs, psd2im
from utils import get_instance_segmentation_model
from utils import reshape_mask
from utils import get_GR, get_SE
class NPFDetection(object):
"""Class for NPF detection."""
def __init__(self, opt):
super().__init__()
self.opt = opt
self.cpu_count = os.cpu_count() // 2 + 1
self.dataroot = os.path.join(opt.dataroot, opt.station)
self.station = opt.station
self.vmax = None if opt.dynamic_vmax else opt.vmax
self.tm_res = opt.time_res
self.df = pd.read_csv(os.path.join(self.dataroot, self.station+'.csv'), parse_dates=[0], index_col=0)
self.days = sorted(np.unique(self.df.index.date.astype(str)).tolist())
print(f'There are {len(self.days)} days of data to be processed.')
self.device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
self.key_index = 0
def draw_one_day_images(self):
"""Draw NPF images with one-day unit"""
self.savefp = os.path.join(self.dataroot, 'images', 'one_day')
mkdirs(self.savefp)
self.dimg = 1
if self.cpu_count >= 8:
with Pool(self.cpu_count) as p:
p.map(self.draw_image, self.days)
else:
for day in tqdm(self.days):
self.draw_image(day)
def draw_two_day_images(self):
"""Draw NPF images with two-day unit"""
self.savefp = os.path.join(self.dataroot, 'images', 'two_day')
mkdirs(self.savefp)
self.dimg = 2
if self.cpu_count >= 8:
with Pool(self.cpu_count) as p:
p.map(self.draw_image, self.days)
else:
for day in tqdm(self.days):
self.draw_image(day)
def draw_image(self, day):
"""Draw an NPF image"""
if self.dimg == 1:
if not os.path.exists(os.path.join(self.savefp, day+'.png')):
try:
psd2im(self.df.loc[day], use_xaxis=False, use_yaxis=False, vmax=self.vmax, savefp=self.savefp, show_figure=False)
except Exception:
print(f'Cannot draw the NPF image for current day {day}.')
elif self.dimg == 2:
day_ = get_next_day(day)
if day_ in self.days and not os.path.exists(os.path.join(self.savefp, day+'_'+day_+'.png')):
try:
psd2im(self.df.loc[day:day_], use_xaxis=False, use_yaxis=False, vmax=self.vmax, savefp=self.savefp, show_figure=False)
except Exception:
print(f'Cannot draw the NPF image for current day {day}_{day_}.')
def detect_one_day_masks(self):
"""Detect masks for one-day NPF images"""
self.load_model()
size = (self.opt.im_size, self.opt.im_size)
res = {}
for im_path in glob.glob(os.path.join(self.dataroot, 'images/one_day')+'/*.png'):
mask = self.detect_mask(im_path, size)
if mask is not None:
res.update(mask)
print(f'Detected {len(res)} one-day masks whose scores are higher than {self.opt.scores:.2f}.')
savefp = os.path.join(self.dataroot, 'masks')
mkdirs(savefp)
np.save(os.path.join(savefp, 'one_day.npy'), res)
def detect_two_day_masks(self):
"""Detect masks for two-day NPF images"""
self.load_model()
size = (self.opt.im_size*2, self.opt.im_size)
res = {}
for im_path in glob.glob(os.path.join(self.dataroot, 'images/two_day')+'/*.png'):
mask = self.detect_mask(im_path, size)
if mask is not None:
res.update(mask)
print(f'Detected {len(res)} two-day masks whose scores are higher than {self.opt.scores:.2f}.')
savefp = os.path.join(self.dataroot, 'masks')
mkdirs(savefp)
np.save(os.path.join(savefp, 'two_day.npy'), res)
def load_model(self):
# load the pre-trained Mask R-CNN model
self.model = get_instance_segmentation_model()
self.model.load_state_dict(torch.load(f'{self.opt.ckpt_dir}/{self.opt.model_name}'))
self.model.to(self.device)
self.model.eval()
@torch.no_grad()
def detect_mask(self, im_path, size):
"""Detect valid masks for NPF images"""
# get mask
im = Image.open(im_path).convert('RGB').resize(size, Image.ANTIALIAS)
ts = transforms.ToTensor()(im)
out = self.model([ts.to(self.device)])[0]
if len(out['scores']) == 0:
return None
else:
idx_bool = out['scores'].cpu().numpy() >= self.opt.scores
index = [i for i, item in enumerate(idx_bool) if item]
if len(index) == 0:
return None
else:
masks = out['masks'][index].squeeze(1).cpu().numpy() >= self.opt.mask_thres
day = im_path.split(os.sep)[-1].split('.')[0].split('_')[0]
return {day: masks}
def visualize_masks(self):
self.masks_oneday = np.load(os.path.join(self.dataroot, 'masks', 'one_day.npy'), allow_pickle=True).tolist()
self.masks_twoday = np.load(os.path.join(self.dataroot, 'masks', 'two_day.npy'), allow_pickle=True).tolist()
self.keys = sorted(list(self.masks_oneday.keys()))
self.keys_ = sorted(list(self.masks_twoday.keys()))
self.len_keys = len(self.keys)
self.win = tk.Tk()
self.win.title('NPF Detection')
self.fig = Figure(dpi=100)
self.canvas = FigureCanvasTkAgg(self.fig, master=self.win)
graph_widget = self.canvas.get_tk_widget()
graph_widget.grid(row=0, column=0, rowspan=2, columnspan=4, ipadx=200, sticky = tk.NW)
self.fig1 = Figure(dpi=100)
self.canvas1 = FigureCanvasTkAgg(self.fig1, master=self.win)
graph_widget1 = self.canvas1.get_tk_widget()
graph_widget1.grid(row=2, column=0, rowspan=2, columnspan=4, ipadx=200, sticky = tk.NW)
tk.Label(self.win, text='Select the one-day mask (select only one mask currently)').grid(row=0, column=5, columnspan=5, ipadx=50)
tk.Label(self.win, text='Select the two-day mask (select only one mask currently)').grid(row=2, column=5, columnspan=5, ipadx=50)
self.plot_next()
tk.Button(self.win,text="Prev",command=self.plot_prev).grid(row=5,column=3, columnspan=5, sticky=tk.W)
tk.Button(self.win,text="Next",command=self.plot_next).grid(row=5,column=7, columnspan=5, sticky=tk.W)
self.win.mainloop()
def plot(self):
self.fig.clear()
self.fig1.clear()
self.key = self.keys[self.key_index]
self.visualize_oneday_mask(self.fig, self.key)
if self.key in self.keys_:
self.visualize_twoday_mask(self.fig1, self.key)
self.canvas.draw_idle()
self.canvas1.draw_idle()
def plot_prev(self):
self.plot()
self.key_index -= 1
tk.Label(self.win, text=f'{self.key_index}/{self.len_keys}', fg='blue').grid(row=4, column=7, ipadx=50)
if self.key_index < 0:
tk.messagebox.showerror(title='Warning', message='You are at the begining, please click the Next button.')
def plot_next(self):
self.plot()
self.key_index += 1
tk.Label(self.win, text=f'{self.key_index}/{self.len_keys}', fg='blue').grid(row=4, column=7, ipadx=50)
if self.key_index == self.len_keys - 1:
tk.messagebox.showinfo(title='Warning', message='Good job! All masks have been checked!')
def visualize_oneday_mask(self, fig, day):
masks = self.masks_oneday[day]
num_masks = masks.shape[0]
ax = fig.add_subplot(1, num_masks+1, 1)
im = Image.open(os.path.join(self.dataroot, 'images/one_day', day+'.png'))
im = im.resize((self.opt.im_size, self.opt.im_size), Image.ANTIALIAS)
ax.imshow(np.array(im))
ax.set_title(day)
ax.axis('off')
# plot masks
for i in range(masks.shape[0]):
ax = fig.add_subplot(1, num_masks+1, i+2)
ax.imshow(masks[i], cmap='gray')
ax.set_title(f'mask {i}')
ax.axis('off')
for i in range(5):
ck_btn = tk.Checkbutton(self.win, text=f'one-day mask {i}')
ck_btn.grid(row=1, column=5+i, ipadx=10, ipady=5)
ck_btn.config(command=lambda btn=ck_btn:self.save_mask(btn))
def visualize_twoday_mask(self, fig, day):
day_ = get_next_day(day)
masks_ = self.masks_twoday[day]
num_masks = masks_.shape[0]
ax = fig.add_subplot(1, num_masks+1, 1)
im_ = Image.open(os.path.join(self.dataroot, 'images/two_day', day+'_'+day_+'.png'))
im_ = im_.resize((self.opt.im_size*2, self.opt.im_size), Image.ANTIALIAS)
ax.imshow(np.array(im_))
ax.set_title(day+'_'+day_)
ax.axis('off')
for i in range(masks_.shape[0]):
ax = fig.add_subplot(1, num_masks+1, i+2)
ax.imshow(masks_[i], cmap='gray')
ax.set_title(f'mask {i}')
ax.axis('off')
for i in range(5):
ck_btn_ = tk.Checkbutton(self.win, text=f'two-day mask {i}')
ck_btn_.grid(row=3, column=5+i, ipadx=10, ipady=5)
ck_btn_.config(command=lambda btn=ck_btn_:self.save_mask(btn))
def save_mask(self, btn):
text = btn.cget('text')
idx = int(text[-1])
if 'one-day' in text:
savefp = os.path.join(self.dataroot, 'masks/one_day')
mkdirs(savefp)
np.save(os.path.join(savefp, f'{self.key}.npy'), self.masks_oneday[self.key][idx])
elif 'two-day' in text:
savefp = os.path.join(self.dataroot, 'masks/two_day')
mkdirs(savefp)
np.save(os.path.join(savefp, f'{self.key}.npy'), self.masks_twoday[self.key][idx])
def get_SE_GR(self, day):
df = self.df.loc[day]
mask = np.load(os.path.join(self.dataroot, 'masks/one_day', day+'.npy'), allow_pickle=True)
mask = reshape_mask(mask, df.shape)
try:
st, et = get_SE(df, mask)
gr_dict = get_GR(df, mask, self.tm_res, savefp=self.savefp, vmax=self.vmax)
except:
# print(day)
return
try:
mask_ = np.load(os.path.join(self.dataroot, 'masks/two_day', day+'.npy'), allow_pickle=True)
df_ = self.df.loc[day:get_next_day(day)]
mask_ = reshape_mask(mask_, df_.shape)
st_two, et_two = get_SE(df_, mask_)
except:
st_two, et_two = st, et
save_dict = {**{
'date': [day],
'start_time_one': [st],
'end_time_one': [et],
'start_time_two': [st_two],
'end_time_two': [et_two]
}, **gr_dict}
pd.DataFrame(save_dict).to_csv(os.path.join(self.savefp, f'{day}.csv'), index=False)
def save_SE_GR(self):
r"""
obtain and save the start time, end time and the growth rates.
"""
files = sorted(glob.glob(os.path.join(self.dataroot, 'masks/one_day')+'/*.npy'))
days = [file.split(os.sep)[-1].split('.')[0] for file in files]
print(f'Calculating growth rates for {len(days)} days.')
self.savefp = os.path.join(self.dataroot, 'GR')
mkdirs(self.savefp)
if self.cpu_count >= 8:
with Pool(self.cpu_count) as p:
p.map(self.get_SE_GR, days)
else:
for day in tqdm(days):
self.get_SE_GR(day)
| [
"utils.get_SE",
"utils.get_GR",
"utils.psd2im",
"torch.device",
"torch.no_grad",
"os.path.join",
"utils.get_next_day",
"tkinter.Label",
"utils.mkdirs",
"tkinter.Checkbutton",
"pandas.DataFrame",
"tkinter.Button",
"torch.load",
"matplotlib.figure.Figure",
"tkinter.Tk",
"tqdm.tqdm",
"u... | [((286, 319), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (309, 319), False, 'import warnings\n'), ((4704, 4719), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (4717, 4719), False, 'import torch\n'), ((765, 804), 'os.path.join', 'os.path.join', (['opt.dataroot', 'opt.station'], {}), '(opt.dataroot, opt.station)\n', (777, 804), False, 'import os, glob\n'), ((1457, 1505), 'os.path.join', 'os.path.join', (['self.dataroot', '"""images"""', '"""one_day"""'], {}), "(self.dataroot, 'images', 'one_day')\n", (1469, 1505), False, 'import os, glob\n'), ((1515, 1534), 'utils.mkdirs', 'mkdirs', (['self.savefp'], {}), '(self.savefp)\n', (1521, 1534), False, 'from utils import get_next_day, mkdirs, psd2im\n'), ((1893, 1941), 'os.path.join', 'os.path.join', (['self.dataroot', '"""images"""', '"""two_day"""'], {}), "(self.dataroot, 'images', 'two_day')\n", (1905, 1941), False, 'import os, glob\n'), ((1951, 1970), 'utils.mkdirs', 'mkdirs', (['self.savefp'], {}), '(self.savefp)\n', (1957, 1970), False, 'from utils import get_next_day, mkdirs, psd2im\n'), ((3641, 3677), 'os.path.join', 'os.path.join', (['self.dataroot', '"""masks"""'], {}), "(self.dataroot, 'masks')\n", (3653, 3677), False, 'import os, glob\n'), ((3687, 3701), 'utils.mkdirs', 'mkdirs', (['savefp'], {}), '(savefp)\n', (3693, 3701), False, 'from utils import get_next_day, mkdirs, psd2im\n'), ((4285, 4321), 'os.path.join', 'os.path.join', (['self.dataroot', '"""masks"""'], {}), "(self.dataroot, 'masks')\n", (4297, 4321), False, 'import os, glob\n'), ((4331, 4345), 'utils.mkdirs', 'mkdirs', (['savefp'], {}), '(savefp)\n', (4337, 4345), False, 'from utils import get_next_day, mkdirs, psd2im\n'), ((4505, 4538), 'utils.get_instance_segmentation_model', 'get_instance_segmentation_model', ([], {}), '()\n', (4536, 4538), False, 'from utils import get_instance_segmentation_model\n'), ((5965, 5972), 'tkinter.Tk', 'tk.Tk', ([], {}), '()\n', (5970, 5972), True, 'import tkinter as tk\n'), ((6034, 6049), 'matplotlib.figure.Figure', 'Figure', ([], {'dpi': '(100)'}), '(dpi=100)\n', (6040, 6049), False, 'from matplotlib.figure import Figure\n'), ((6073, 6117), 'matplotlib.backends.backend_tkagg.FigureCanvasTkAgg', 'FigureCanvasTkAgg', (['self.fig'], {'master': 'self.win'}), '(self.fig, master=self.win)\n', (6090, 6117), False, 'from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg\n'), ((6289, 6304), 'matplotlib.figure.Figure', 'Figure', ([], {'dpi': '(100)'}), '(dpi=100)\n', (6295, 6304), False, 'from matplotlib.figure import Figure\n'), ((6329, 6374), 'matplotlib.backends.backend_tkagg.FigureCanvasTkAgg', 'FigureCanvasTkAgg', (['self.fig1'], {'master': 'self.win'}), '(self.fig1, master=self.win)\n', (6346, 6374), False, 'from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg\n'), ((9084, 9101), 'utils.get_next_day', 'get_next_day', (['day'], {}), '(day)\n', (9096, 9101), False, 'from utils import get_next_day, mkdirs, psd2im\n'), ((10679, 10707), 'utils.reshape_mask', 'reshape_mask', (['mask', 'df.shape'], {}), '(mask, df.shape)\n', (10691, 10707), False, 'from utils import reshape_mask\n'), ((11957, 11990), 'os.path.join', 'os.path.join', (['self.dataroot', '"""GR"""'], {}), "(self.dataroot, 'GR')\n", (11969, 11990), False, 'import os, glob\n'), ((12000, 12019), 'utils.mkdirs', 'mkdirs', (['self.savefp'], {}), '(self.savefp)\n', (12006, 12019), False, 'from utils import get_next_day, mkdirs, psd2im\n'), ((968, 1018), 'os.path.join', 'os.path.join', (['self.dataroot', "(self.station + '.csv')"], {}), "(self.dataroot, self.station + '.csv')\n", (980, 1018), False, 'import os, glob\n'), ((1268, 1293), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1291, 1293), False, 'import torch\n'), ((1244, 1264), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (1256, 1264), False, 'import torch\n'), ((1299, 1318), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (1311, 1318), False, 'import torch\n'), ((1728, 1743), 'tqdm.tqdm', 'tqdm', (['self.days'], {}), '(self.days)\n', (1732, 1743), False, 'from tqdm import tqdm\n'), ((2164, 2179), 'tqdm.tqdm', 'tqdm', (['self.days'], {}), '(self.days)\n', (2168, 2179), False, 'from tqdm import tqdm\n'), ((3719, 3754), 'os.path.join', 'os.path.join', (['savefp', '"""one_day.npy"""'], {}), "(savefp, 'one_day.npy')\n", (3731, 3754), False, 'import os, glob\n'), ((4363, 4398), 'os.path.join', 'os.path.join', (['savefp', '"""two_day.npy"""'], {}), "(savefp, 'two_day.npy')\n", (4375, 4398), False, 'import os, glob\n'), ((4575, 4631), 'torch.load', 'torch.load', (['f"""{self.opt.ckpt_dir}/{self.opt.model_name}"""'], {}), "(f'{self.opt.ckpt_dir}/{self.opt.model_name}')\n", (4585, 4631), False, 'import torch\n'), ((4925, 4946), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (4944, 4946), False, 'from torchvision import transforms\n'), ((7671, 7782), 'tkinter.messagebox.showerror', 'tk.messagebox.showerror', ([], {'title': '"""Warning"""', 'message': '"""You are at the begining, please click the Next button."""'}), "(title='Warning', message=\n 'You are at the begining, please click the Next button.')\n", (7694, 7782), True, 'import tkinter as tk\n'), ((8031, 8125), 'tkinter.messagebox.showinfo', 'tk.messagebox.showinfo', ([], {'title': '"""Warning"""', 'message': '"""Good job! All masks have been checked!"""'}), "(title='Warning', message=\n 'Good job! All masks have been checked!')\n", (8053, 8125), True, 'import tkinter as tk\n'), ((8323, 8382), 'os.path.join', 'os.path.join', (['self.dataroot', '"""images/one_day"""', "(day + '.png')"], {}), "(self.dataroot, 'images/one_day', day + '.png')\n", (8335, 8382), False, 'import os, glob\n'), ((8480, 8492), 'numpy.array', 'np.array', (['im'], {}), '(im)\n', (8488, 8492), True, 'import numpy as np\n'), ((8830, 8880), 'tkinter.Checkbutton', 'tk.Checkbutton', (['self.win'], {'text': 'f"""one-day mask {i}"""'}), "(self.win, text=f'one-day mask {i}')\n", (8844, 8880), True, 'import tkinter as tk\n'), ((9255, 9327), 'os.path.join', 'os.path.join', (['self.dataroot', '"""images/two_day"""', "(day + '_' + day_ + '.png')"], {}), "(self.dataroot, 'images/two_day', day + '_' + day_ + '.png')\n", (9267, 9327), False, 'import os, glob\n'), ((9425, 9438), 'numpy.array', 'np.array', (['im_'], {}), '(im_)\n', (9433, 9438), True, 'import numpy as np\n'), ((9766, 9816), 'tkinter.Checkbutton', 'tk.Checkbutton', (['self.win'], {'text': 'f"""two-day mask {i}"""'}), "(self.win, text=f'two-day mask {i}')\n", (9780, 9816), True, 'import tkinter as tk\n'), ((10105, 10149), 'os.path.join', 'os.path.join', (['self.dataroot', '"""masks/one_day"""'], {}), "(self.dataroot, 'masks/one_day')\n", (10117, 10149), False, 'import os, glob\n'), ((10163, 10177), 'utils.mkdirs', 'mkdirs', (['savefp'], {}), '(savefp)\n', (10169, 10177), False, 'from utils import get_next_day, mkdirs, psd2im\n'), ((10586, 10644), 'os.path.join', 'os.path.join', (['self.dataroot', '"""masks/one_day"""', "(day + '.npy')"], {}), "(self.dataroot, 'masks/one_day', day + '.npy')\n", (10598, 10644), False, 'import os, glob\n'), ((10744, 10760), 'utils.get_SE', 'get_SE', (['df', 'mask'], {}), '(df, mask)\n', (10750, 10760), False, 'from utils import get_GR, get_SE\n'), ((10784, 10849), 'utils.get_GR', 'get_GR', (['df', 'mask', 'self.tm_res'], {'savefp': 'self.savefp', 'vmax': 'self.vmax'}), '(df, mask, self.tm_res, savefp=self.savefp, vmax=self.vmax)\n', (10790, 10849), False, 'from utils import get_GR, get_SE\n'), ((11110, 11140), 'utils.reshape_mask', 'reshape_mask', (['mask_', 'df_.shape'], {}), '(mask_, df_.shape)\n', (11122, 11140), False, 'from utils import reshape_mask\n'), ((11171, 11189), 'utils.get_SE', 'get_SE', (['df_', 'mask_'], {}), '(df_, mask_)\n', (11177, 11189), False, 'from utils import get_GR, get_SE\n'), ((11521, 11560), 'os.path.join', 'os.path.join', (['self.savefp', 'f"""{day}.csv"""'], {}), "(self.savefp, f'{day}.csv')\n", (11533, 11560), False, 'import os, glob\n'), ((12184, 12194), 'tqdm.tqdm', 'tqdm', (['days'], {}), '(days)\n', (12188, 12194), False, 'from tqdm import tqdm\n'), ((716, 730), 'os.cpu_count', 'os.cpu_count', ([], {}), '()\n', (728, 730), False, 'import os, glob\n'), ((1611, 1631), 'multiprocessing.Pool', 'Pool', (['self.cpu_count'], {}), '(self.cpu_count)\n', (1615, 1631), False, 'from multiprocessing import Pool\n'), ((2047, 2067), 'multiprocessing.Pool', 'Pool', (['self.cpu_count'], {}), '(self.cpu_count)\n', (2051, 2067), False, 'from multiprocessing import Pool\n'), ((2711, 2728), 'utils.get_next_day', 'get_next_day', (['day'], {}), '(day)\n', (2723, 2728), False, 'from utils import get_next_day, mkdirs, psd2im\n'), ((3341, 3386), 'os.path.join', 'os.path.join', (['self.dataroot', '"""images/one_day"""'], {}), "(self.dataroot, 'images/one_day')\n", (3353, 3386), False, 'import os, glob\n'), ((3985, 4030), 'os.path.join', 'os.path.join', (['self.dataroot', '"""images/two_day"""'], {}), "(self.dataroot, 'images/two_day')\n", (3997, 4030), False, 'import os, glob\n'), ((6537, 6625), 'tkinter.Label', 'tk.Label', (['self.win'], {'text': '"""Select the one-day mask (select only one mask currently)"""'}), "(self.win, text=\n 'Select the one-day mask (select only one mask currently)')\n", (6545, 6625), True, 'import tkinter as tk\n'), ((6676, 6764), 'tkinter.Label', 'tk.Label', (['self.win'], {'text': '"""Select the two-day mask (select only one mask currently)"""'}), "(self.win, text=\n 'Select the two-day mask (select only one mask currently)')\n", (6684, 6764), True, 'import tkinter as tk\n'), ((6843, 6899), 'tkinter.Button', 'tk.Button', (['self.win'], {'text': '"""Prev"""', 'command': 'self.plot_prev'}), "(self.win, text='Prev', command=self.plot_prev)\n", (6852, 6899), True, 'import tkinter as tk\n'), ((6955, 7011), 'tkinter.Button', 'tk.Button', (['self.win'], {'text': '"""Next"""', 'command': 'self.plot_next'}), "(self.win, text='Next', command=self.plot_next)\n", (6964, 7011), True, 'import tkinter as tk\n'), ((7522, 7593), 'tkinter.Label', 'tk.Label', (['self.win'], {'text': 'f"""{self.key_index}/{self.len_keys}"""', 'fg': '"""blue"""'}), "(self.win, text=f'{self.key_index}/{self.len_keys}', fg='blue')\n", (7530, 7593), True, 'import tkinter as tk\n'), ((7865, 7936), 'tkinter.Label', 'tk.Label', (['self.win'], {'text': 'f"""{self.key_index}/{self.len_keys}"""', 'fg': '"""blue"""'}), "(self.win, text=f'{self.key_index}/{self.len_keys}', fg='blue')\n", (7873, 7936), True, 'import tkinter as tk\n'), ((10199, 10238), 'os.path.join', 'os.path.join', (['savefp', 'f"""{self.key}.npy"""'], {}), "(savefp, f'{self.key}.npy')\n", (10211, 10238), False, 'import os, glob\n'), ((10329, 10373), 'os.path.join', 'os.path.join', (['self.dataroot', '"""masks/two_day"""'], {}), "(self.dataroot, 'masks/two_day')\n", (10341, 10373), False, 'import os, glob\n'), ((10387, 10401), 'utils.mkdirs', 'mkdirs', (['savefp'], {}), '(savefp)\n', (10393, 10401), False, 'from utils import get_next_day, mkdirs, psd2im\n'), ((10958, 11016), 'os.path.join', 'os.path.join', (['self.dataroot', '"""masks/two_day"""', "(day + '.npy')"], {}), "(self.dataroot, 'masks/two_day', day + '.npy')\n", (10970, 11016), False, 'import os, glob\n'), ((11490, 11513), 'pandas.DataFrame', 'pd.DataFrame', (['save_dict'], {}), '(save_dict)\n', (11502, 11513), True, 'import pandas as pd\n'), ((12073, 12093), 'multiprocessing.Pool', 'Pool', (['self.cpu_count'], {}), '(self.cpu_count)\n', (12077, 12093), False, 'from multiprocessing import Pool\n'), ((2349, 2388), 'os.path.join', 'os.path.join', (['self.savefp', "(day + '.png')"], {}), "(self.savefp, day + '.png')\n", (2361, 2388), False, 'import os, glob\n'), ((2432, 2549), 'utils.psd2im', 'psd2im', (['self.df.loc[day]'], {'use_xaxis': '(False)', 'use_yaxis': '(False)', 'vmax': 'self.vmax', 'savefp': 'self.savefp', 'show_figure': '(False)'}), '(self.df.loc[day], use_xaxis=False, use_yaxis=False, vmax=self.vmax,\n savefp=self.savefp, show_figure=False)\n', (2438, 2549), False, 'from utils import get_next_day, mkdirs, psd2im\n'), ((5577, 5628), 'os.path.join', 'os.path.join', (['self.dataroot', '"""masks"""', '"""one_day.npy"""'], {}), "(self.dataroot, 'masks', 'one_day.npy')\n", (5589, 5628), False, 'import os, glob\n'), ((5695, 5746), 'os.path.join', 'os.path.join', (['self.dataroot', '"""masks"""', '"""two_day.npy"""'], {}), "(self.dataroot, 'masks', 'two_day.npy')\n", (5707, 5746), False, 'import os, glob\n'), ((10423, 10462), 'os.path.join', 'os.path.join', (['savefp', 'f"""{self.key}.npy"""'], {}), "(savefp, f'{self.key}.npy')\n", (10435, 10462), False, 'import os, glob\n'), ((11070, 11087), 'utils.get_next_day', 'get_next_day', (['day'], {}), '(day)\n', (11082, 11087), False, 'from utils import get_next_day, mkdirs, psd2im\n'), ((11737, 11781), 'os.path.join', 'os.path.join', (['self.dataroot', '"""masks/one_day"""'], {}), "(self.dataroot, 'masks/one_day')\n", (11749, 11781), False, 'import os, glob\n'), ((2878, 3001), 'utils.psd2im', 'psd2im', (['self.df.loc[day:day_]'], {'use_xaxis': '(False)', 'use_yaxis': '(False)', 'vmax': 'self.vmax', 'savefp': 'self.savefp', 'show_figure': '(False)'}), '(self.df.loc[day:day_], use_xaxis=False, use_yaxis=False, vmax=self.\n vmax, savefp=self.savefp, show_figure=False)\n', (2884, 3001), False, 'from utils import get_next_day, mkdirs, psd2im\n'), ((4846, 4865), 'PIL.Image.open', 'Image.open', (['im_path'], {}), '(im_path)\n', (4856, 4865), False, 'from PIL import Image\n'), ((2786, 2838), 'os.path.join', 'os.path.join', (['self.savefp', "(day + '_' + day_ + '.png')"], {}), "(self.savefp, day + '_' + day_ + '.png')\n", (2798, 2838), False, 'import os, glob\n')] |
#! /usr/bin/env python
import functools
import uuid
from . import session
from . import utils
class Model(object):
session = session.Session()
def __init__(self, data=None):
self._id = str(uuid.uuid1())
self.in_progress = False
self.read_only = False
self.update = False
if data:
self.read_only = True
for k, v in utils.class_from_dict(
type(self).__name__, data,
self.JSON_SCHEMA,
self.EXCLUDED_KEYS).items():
setattr(self, k, v)
self.done = True
def __setattr__(self, key, value):
self.__dict__[key] = value
if key == 'done':
if not self.read_only:
Model.session.add(self)
elif hasattr(self, 'done') and not self.in_progress:
self.__dict__['update'] = True
Model.session.update(self)
def __del__(self):
Model.session.delete(self)
def id(self):
return self._id
def to_dict(self):
return {
'_id': self._id
}
def with_update(f):
@functools.wraps(f)
def wrapped(inst, *args, **kwargs):
inst.__dict__['in_progress'] = True
result = f(inst, *args, **kwargs)
inst.__dict__['in_progress'] = False
inst.__dict__['update'] = True
Model.session.update(inst)
return result
return wrapped
| [
"uuid.uuid1",
"functools.wraps"
] | [((1147, 1165), 'functools.wraps', 'functools.wraps', (['f'], {}), '(f)\n', (1162, 1165), False, 'import functools\n'), ((211, 223), 'uuid.uuid1', 'uuid.uuid1', ([], {}), '()\n', (221, 223), False, 'import uuid\n')] |
"""
Copied from WRF_SPC.py Sep 20, 2019.
Given a model initialization time and a valid time, plot crefuh around hagelslag objects.
"""
import argparse
import datetime
import pdb
import os
import sys
import pandas as pd
import numpy as np
import fieldinfo # levels and color tables - Adapted from /glade/u/home/wrfrt/wwe/python_scripts/fieldinfo.py 20190125.
from wrf import to_np, getvar, get_cartopy, latlon_coords
from metpy.units import units
from netCDF4 import Dataset
import cartopy
import matplotlib
matplotlib.use("Agg") # allows dav slurm jobs
import matplotlib.pyplot as plt
import matplotlib.colors as colors
# =============Arguments===================
parser = argparse.ArgumentParser(description = "Plot WRF and SPC storm reports",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-f", "--fill", type=str, default= 'crefuh', help='netCDF variable name for contour fill field')
parser.add_argument("-b", "--barb", choices=["shr06", "wind10m",""], type=str, default="wind10m", help='wind barbs')
parser.add_argument("-c", "--contour", type=str, default=None, help='contour field')
parser.add_argument("-o", "--outdir", type=str, default='.', help="name of output path")
parser.add_argument("-p", "--padding", type=float, nargs=4, help="padding on west, east, south and north side in km",
default=[175.,175.,175.,175.])
parser.add_argument("--timeshift", type=int, default=0, help="hours to shift background field")
parser.add_argument("--arrow", action='store_true', help="Add storm motion vector from hagelslag")
parser.add_argument("--no-fineprint", action='store_true', help="Don't write image details at bottom")
parser.add_argument("--force_new", action='store_true', help="overwrite any old outfile, if it exists")
parser.add_argument("--no-counties", action='store_true', help="Don't draw county borders (can be slow)")
parser.add_argument("--no-mask", action='store_true', help="Don't draw object mask")
parser.add_argument('-i', "--idir", type=str, default="/glade/p/mmm/parc/sobash/NSC/3KM_WRF_POST_12sec_ts",
help="path to WRF output files")
parser.add_argument('-s', "--stride", type=int, default=1, help="plot every stride points. speed things up with stride>1")
parser.add_argument('-t', "--trackdir", type=str, default="/glade/scratch/ahijevyc/track_data_ncarstorm_3km_REFL_1KM_AGL_csv",
help="path to hagelslag track-step files")
parser.add_argument("--patchdir", type=str, default="/glade/scratch/ahijevyc/track_data_ncarstorm_3km_REFL_1KM_AGL_nc",
help="path to hagelslag netCDF patches")
parser.add_argument("initial_time", type=lambda d: datetime.datetime.strptime(d, '%Y%m%d%H'),
help="model initialization date and hour, yyyymmddhh")
parser.add_argument("valid_time", type=lambda d: datetime.datetime.strptime(d, '%Y%m%d%H'),
help="model valid date and hour, yyyymmddhh")
parser.add_argument("-d", "--debug", action='store_true')
# Assign arguments to simply-named variables
args = parser.parse_args()
barb = args.barb
contour = args.contour
fill = args.fill
odir = args.outdir
padding = args.padding
timeshift = args.timeshift
arrow = args.arrow
no_fineprint = args.no_fineprint
force_new = args.force_new
no_counties = args.no_counties
no_mask = args.no_mask
idir = args.idir
stride = args.stride
patchdir = args.patchdir
trackdir = args.trackdir
initial_time = args.initial_time
valid_time = args.valid_time
debug = args.debug
if debug:
print(args)
# Derive lead time and make sure it is between 12 and 36 hours.
lead_time = valid_time - initial_time
if lead_time < datetime.timedelta(hours=7) or lead_time > datetime.timedelta(hours=36):
print("lead_time:",lead_time, "not between 7 and 36 hours")
#sys.exit(1)
def update_scale_labels(scale_xy):
# Update labels on axes with the distance along each axis.
# Cartopy axes do not have a set_xlabel() or set_ylabel() method. Add labels manually.
xspan = ax.get_xlim()
yspan = ax.get_ylim()
xlabel = "%dkm" % (round((xspan[1]-xspan[0])/1000.))
ylabel = "%dkm" % (round((yspan[1]-yspan[0])/1000.))
x, y = scale_xy
x.set_text(xlabel)
y.set_text(ylabel)
# Read hagelslag track_step csv file into pandas DataFrame.
mysterious_suffix = '' # '_13' or '_12'
tracks = trackdir + '/' + initial_time.strftime('track_step_NCARSTORM_d01_%Y%m%d-%H%M')+mysterious_suffix+'.csv'
if debug:
print("reading csv file",tracks)
df = pd.read_csv(tracks, parse_dates=['Run_Date', 'Valid_Date'])
# Throw out everything except requested valid times.
df = df[df.Valid_Date == valid_time]
if df.empty:
print("csv track step file", tracks, " has no objects at requested valid time",valid_time,". That is probably fine.")
sys.exit(0)
# Throw out weak UH objects
good_UH = 25
igood_UH = df['UP_HELI_MAX_max'] >= good_UH
if 'UP_HELI_MIN_min' in df.columns:
igood_UH = igood_UH | (df['UP_HELI_MIN_min'].abs() >= good_UH)
print("ignoring",(~igood_UH).sum(),"object with abs(UH) <",good_UH)
if debug:
if 'UP_HELI_MIN_min' in df.columns:
print(df[~igood_UH][["Step_ID","UP_HELI_MAX_max","UP_HELI_MIN_min"]])
else:
print(df[~igood_UH][["Step_ID","UP_HELI_MAX_max"]])
df = df[igood_UH]
if df.empty:
print("csv track step file", tracks, " has no good UH objects at requested valid time",valid_time,". That is probably fine.")
sys.exit(0)
# List of all png files that will be created.
pngfiles = odir + '/' + df.Step_ID + "_" + "{:+1.0f}".format(timeshift) + ".png"
if all([os.path.isfile(p) for p in pngfiles]) and not force_new:
# Exit if pngs all already exist and force_new option was not used.
print(initial_time, valid_time, "{:+1.0f}".format(timeshift) +"h",fill,"finished. Moving on.")
sys.exit(0)
if not no_mask:
# Read netCDF patches
patches = patchdir + '/' + initial_time.strftime('NCARSTORM_%Y%m%d-%H%M_d01_model_patches.nc')
pnc = Dataset(patches,'r')
masks = pnc.variables["masks"][:]
mlons = pnc.variables["lon"][:]
mlats = pnc.variables["lat"][:]
mtrack_ids = pnc.variables["track_id"][:]
mtrack_steps = pnc.variables["track_step"][:]
mask_centroid_lats = pnc.variables["centroid_lat"][:]
mask_centroid_lons = pnc.variables["centroid_lon"][:]
pnc.close()
# Get color map, levels, and netCDF variable name appropriate for requested variable (from fieldinfo module).
info = fieldinfo.nsc[fill]
if debug:
print("found nsc in fieldinfo.py. Using",info)
cmap = colors.ListedColormap(info['cmap'])
levels = info['levels']
fill = info['fname'][0]
# Get wrfout filename
history_time = valid_time + datetime.timedelta(hours=timeshift)
wrfout = idir + '/' + initial_time.strftime('%Y%m%d%H') + '/' + history_time.strftime('diags_d01_%Y-%m-%d_%H_%M_%S.nc')
if debug: print("About to open "+wrfout)
wrfnc = Dataset(wrfout,"r")
if fill not in wrfnc.variables:
print("variable "+ fill + " not found")
print("choices:", wrfnc.variables.keys())
sys.exit(1)
# Get a 2D var from wrfout file. It has projection info.
if debug:
print("getvar...")
cvar = getvar(wrfnc,fill)
wrflat, wrflon = latlon_coords(cvar)
# get cartopy mapping object
if debug: print("get_cartopy...")
WRF_proj = get_cartopy(cvar)
fineprint0 = 'fill '+fill+" ("+cvar.units+") "
if 'units' in info.keys():
cvar.metpy.convert_units(info['units'])
if hasattr(cvar, 'long_name'):
label = cvar.long_name
elif hasattr(cvar, 'description'):
label = cvar.description
# convert WRF lat/lons to x,y
pts = WRF_proj.transform_points(cartopy.crs.PlateCarree(), to_np(wrflon[::stride,::stride]), to_np(wrflat[::stride,::stride])) # Transform lon/lat to x and y (in meters) in WRF projection.
x, y, z = pts[:,:,0], pts[:,:,1], pts[:,:,2]
fig = plt.figure(figsize=(10,10))
if debug: print("plt.axes()")
ax = plt.axes(projection=WRF_proj)
ax.add_feature(cartopy.feature.STATES.with_scale('10m'), linewidth=0.35, alpha=0.55)
# Set title (month and hour)
ax.set_title(history_time.strftime("%b %HZ"))
# Empty fineprint placeholder in lower left corner of image.
fineprint_obj = plt.annotate(text=fineprint0, xy=(0,5), xycoords=('axes fraction', 'figure pixels'), va="bottom", fontsize=4)
if cvar.min() > levels[-1] or cvar.max() < levels[0]:
print('levels',levels,'out of range of cvar', cvar.values.min(), cvar.values.max())
sys.exit(1)
if debug:
print('levels:',levels, 'cmap:', cmap.colors)
if debug:
print("plotting filled contour",cvar.name,"...")
cfill = ax.contourf(x, y, to_np(cvar[::stride,::stride]), levels=levels, cmap=cmap)
# Color bar
cb = plt.colorbar(cfill, ax=ax, format='%.0f', shrink=0.52, orientation='horizontal')
if hasattr(cvar,"units"):
cb.set_label(label+" ("+cvar.units+")", fontsize="small")
if len(levels) < 10:
# label every level if there is room.
cb.set_ticks(levels)
cb.ax.tick_params(labelsize='xx-small')
cb.outline.set_linewidth(0.5)
# Create 2 annotation object placeholders for spatial scale. Will be updated with each set_extent().
scale_kw = {"ha":"center","rotation_mode":"anchor","xycoords":"axes fraction","textcoords":"offset points"}
scale_xy = ( ax.annotate("", (0.5, 0), xytext=(0,-5), va='top', rotation='horizontal', **scale_kw),
ax.annotate("", (0, 0.5), xytext=(-5,0), va='bottom', rotation='vertical', **scale_kw) )
# Special case of composite reflectivity, UH overlay
if args.fill == 'crefuh':
max_uh = getvar(wrfnc,info['fname'][1])
min_uh = getvar(wrfnc,info['fname'][2])
max_uh_threshold = info['max_threshold']
min_uh_threshold = info['min_threshold']
print("UH max:", max_uh.max().values)
print("UH min:", min_uh.min().values)
if max_uh.max() > max_uh_threshold:
print("Filled contour UH >",max_uh_threshold)
# Don't use contourf if the data fall outside the levels range. You will get ValueError: 'bboxes' cannot be empty.
# See https://github.com/SciTools/cartopy/issues/1290
cs1 = ax.contourf(x, y, to_np(max_uh), levels=[max_uh_threshold,1000], colors='black',
alpha=0.3 )
if debug: print("solid contour UH >",max_uh_threshold)
cs2 = ax.contour(x, y, to_np(max_uh), levels=max_uh_threshold*np.arange(1,6), colors='black',
linestyles='solid', linewidths=0.4 )
fineprint0 += "UH>"+str(max_uh_threshold) +" "+ max_uh.units + " "
# Oddly, the zero contour is plotted if there are no other valid contours
if 0.0 in cs2.levels:
print("uh has zero contour for some reason. Hide it")
if debug:
pdb.set_trace()
for i in cs2.collections: i.remove()
if min_uh.min() < min_uh_threshold:
print("Filled UH contour <",min_uh_threshold)
# Don't use contourf if the data fall outside the levels range. You will get ValueError: 'bboxes' cannot be empty.
# See https://github.com/SciTools/cartopy/issues/1290
negUH1 = ax.contourf(x, y, to_np(min_uh), levels=[-1000, min_uh_threshold], colors='black',
alpha=0.3 )
if debug: print("dashed contour UH <",min_uh_threshold)
negUH2 = ax.contour(x, y, to_np(min_uh), levels=min_uh_threshold*np.arange(6,0,-1), colors='black',
linestyles='dashed', linewidths=0.4 )
fineprint0 += "UH<"+str(-min_uh_threshold) +" "+ min_uh.units + " "
if 0.0 in negUH2.levels:
print("neg uh has a zero contour. Hide it")
if debug:
pdb.set_trace()
for i in negUH2.collections: i.remove()
# Read my own county shape file.
if not no_counties:
if debug:
print("About to draw counties")
reader = cartopy.io.shapereader.Reader('/glade/work/ahijevyc/share/shapeFiles/cb_2013_us_county_500k/cb_2013_us_county_500k.shp')
counties = list(reader.geometries())
# Create custom cartopy feature that can be added to the axes.
COUNTIES = cartopy.feature.ShapelyFeature(counties, cartopy.crs.PlateCarree())
ax.add_feature(COUNTIES, facecolor="none", edgecolor='black', alpha=0.25, linewidth=0.2)
if barb:
# Get barb netCDF variable name appropriate for requested variable (from fieldinfo module).
info = fieldinfo.nsc[barb]
if debug:
print("found nsc in fieldinfo.py. Using",info)
if args.barb == 'wind10m': u,v = getvar(wrfnc, 'uvmet10', units='kt')
if args.barb == 'shr06':
u = getvar(wrfnc, 'USHR6')*1.93
v = getvar(wrfnc, 'VSHR6')*1.93
u.attrs['units'] = 'kt'
v.attrs['units'] = 'kt'
# Density of barbs stays the same, no matter the domain size (padding)
# larger domain = greater stride
skip = int(round(np.max([(padding[0]+padding[1]), (padding[2]+padding[3])])/50))
if args.fill == 'crefuh': alpha=0.6
else: alpha=1.0
if debug: print("plotBarbs: starting barbs")
# barbs already oriented with map projection. In Basemap, we needed to use m.rotate_vector().
cs2 = ax.barbs(x[::skip*stride,::skip*stride], y[::skip*stride,::skip*stride],
to_np(u)[::skip*stride,::skip*stride], to_np(v)[::skip*stride,::skip*stride], color='black',
alpha=alpha, length=5, linewidth=0.25, sizes={'emptybarb':0.05} )
fineprint0 += "wind barb (" + u.units + ") "
if contour:
# Get netCDF variable name appropriate for requested variable from fieldinfo module.
info = fieldinfo.nsc[contour]
if debug:
print("found nsc in fieldinfo.py. Using",info)
cvar = getvar(wrfnc, info['fname'][0])
if 'units' in info.keys():
cvar.metpy.convert_units(info['units'])
levels = info['levels']
# could use levels from fieldinfo module, but default is often less cluttered.
alpha=0.4
if debug: print("starting "+contour+" contours")
cr = ax.contour(x[::stride,::stride], y[::stride,::stride],
cvar[::stride,::stride], levels=levels, colors='black', alpha=alpha,
linewidths=0.75)
clab = ax.clabel(cr, inline=False, fmt='%.0f', fontsize=6)
fineprint0 += "contour "+contour+" (" + cvar.units + ") "
for lon,lat,stepid,trackid,u,v,pngfile in zip(df.Centroid_Lon, df.Centroid_Lat,df.Step_ID,df.Track_ID,df.Storm_Motion_U,df.Storm_Motion_V,pngfiles):
fineprint = fineprint0 + "\nwrfout " + os.path.realpath(wrfout)
if not no_mask:
fineprint += "\npatches "+patches
fineprint += "\ntracks "+tracks
fineprint += "\ntrackid "+trackid
fineprint += "\ncreated "+str(datetime.datetime.now(tz=None)).split('.')[0]
if not no_fineprint: # show fineprint
fineprint_obj.set_text(fineprint)
x, y = WRF_proj.transform_point(lon, lat, cartopy.crs.PlateCarree()) # Transform lon/lat to x and y (in meters) in WRF projection.
ax.set_extent([x-padding[0]*1000., x+padding[1]*1000., y-padding[2]*1000., y+padding[3]*1000.], crs=WRF_proj)
track_id_int = int(trackid.split('_')[-1])
step_id_int = int(stepid.split('_')[-1])
# Contour object mask
if not no_mask:
# Find matching mask track id and step. For some reason, steps start with 1 in netCDF patches file
matches = (mtrack_ids == track_id_int) & (mtrack_steps == step_id_int+1)
ip = np.where(matches)[0][0]
if not any(matches):
pdb.set_trace()
tolerance = 0.025 # TODO: figure out why centroid of csv object and nc patch differ at all
if np.abs(lon-mask_centroid_lons[ip]) > tolerance:
print(stepid,lon,mask_centroid_lons[ip])
if np.abs(lat-mask_centroid_lats[ip]) > tolerance:
print(stepid,lat,mask_centroid_lats[ip])
mask = masks[ip]
mlon = mlons[ip]
mlat = mlats[ip]
mcntr = ax.contour(mlon, mlat, mask, levels=[0,10], colors='black', alpha=0.6,
linewidths=2., linestyles="solid", zorder=2, transform=cartopy.crs.PlateCarree())
# Update axes labels (distance along axes).
update_scale_labels(scale_xy)
if arrow:
# Storm motion vector points from previous location to present location.
smv = ax.arrow(x-u, y-v, u, v, color=mcntr.colors, alpha=mcntr.get_alpha(), # Can't get head to show. Tried quiver, plot, head_width, head_length..., annotate...
linewidth=1, zorder=2, capstyle='round', transform=WRF_proj) # tried length_includes_head=True, but zero-size gives ValueError about shape Nx2 needed.
# Save image.
plt.savefig(pngfile, dpi=175)
print('created ' + os.path.realpath(pngfile))
if arrow: smv.remove()
# Remove object mask contour
if not no_mask:
for i in mcntr.collections: i.remove()
if debug: pdb.set_trace()
plt.close(fig)
print("to sort -2 -1 +0 +1 +2 numerically:")
print("ls d01*png | sort -g -k 1."+str(len(stepid)+2))
print("to trim whitespace:")
print("convert -crop 980x1012+390+173 in.png out.png")
| [
"numpy.abs",
"argparse.ArgumentParser",
"pandas.read_csv",
"matplotlib.pyplot.axes",
"matplotlib.pyplot.figure",
"os.path.isfile",
"numpy.arange",
"matplotlib.colors.ListedColormap",
"netCDF4.Dataset",
"wrf.get_cartopy",
"matplotlib.pyplot.close",
"matplotlib.pyplot.colorbar",
"numpy.max",
... | [((511, 532), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (525, 532), False, 'import matplotlib\n'), ((679, 808), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Plot WRF and SPC storm reports"""', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), "(description='Plot WRF and SPC storm reports',\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n", (702, 808), False, 'import argparse\n'), ((4536, 4595), 'pandas.read_csv', 'pd.read_csv', (['tracks'], {'parse_dates': "['Run_Date', 'Valid_Date']"}), "(tracks, parse_dates=['Run_Date', 'Valid_Date'])\n", (4547, 4595), True, 'import pandas as pd\n'), ((6570, 6605), 'matplotlib.colors.ListedColormap', 'colors.ListedColormap', (["info['cmap']"], {}), "(info['cmap'])\n", (6591, 6605), True, 'import matplotlib.colors as colors\n'), ((6911, 6931), 'netCDF4.Dataset', 'Dataset', (['wrfout', '"""r"""'], {}), "(wrfout, 'r')\n", (6918, 6931), False, 'from netCDF4 import Dataset\n'), ((7167, 7186), 'wrf.getvar', 'getvar', (['wrfnc', 'fill'], {}), '(wrfnc, fill)\n', (7173, 7186), False, 'from wrf import to_np, getvar, get_cartopy, latlon_coords\n'), ((7203, 7222), 'wrf.latlon_coords', 'latlon_coords', (['cvar'], {}), '(cvar)\n', (7216, 7222), False, 'from wrf import to_np, getvar, get_cartopy, latlon_coords\n'), ((7297, 7314), 'wrf.get_cartopy', 'get_cartopy', (['cvar'], {}), '(cvar)\n', (7308, 7314), False, 'from wrf import to_np, getvar, get_cartopy, latlon_coords\n'), ((7830, 7858), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(10, 10)'}), '(figsize=(10, 10))\n', (7840, 7858), True, 'import matplotlib.pyplot as plt\n'), ((7893, 7922), 'matplotlib.pyplot.axes', 'plt.axes', ([], {'projection': 'WRF_proj'}), '(projection=WRF_proj)\n', (7901, 7922), True, 'import matplotlib.pyplot as plt\n'), ((8162, 8276), 'matplotlib.pyplot.annotate', 'plt.annotate', ([], {'text': 'fineprint0', 'xy': '(0, 5)', 'xycoords': "('axes fraction', 'figure pixels')", 'va': '"""bottom"""', 'fontsize': '(4)'}), "(text=fineprint0, xy=(0, 5), xycoords=('axes fraction',\n 'figure pixels'), va='bottom', fontsize=4)\n", (8174, 8276), True, 'import matplotlib.pyplot as plt\n'), ((8663, 8748), 'matplotlib.pyplot.colorbar', 'plt.colorbar', (['cfill'], {'ax': 'ax', 'format': '"""%.0f"""', 'shrink': '(0.52)', 'orientation': '"""horizontal"""'}), "(cfill, ax=ax, format='%.0f', shrink=0.52, orientation='horizontal'\n )\n", (8675, 8748), True, 'import matplotlib.pyplot as plt\n'), ((16697, 16711), 'matplotlib.pyplot.close', 'plt.close', (['fig'], {}), '(fig)\n', (16706, 16711), True, 'import matplotlib.pyplot as plt\n'), ((4825, 4836), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (4833, 4836), False, 'import sys\n'), ((5457, 5468), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (5465, 5468), False, 'import sys\n'), ((5838, 5849), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (5846, 5849), False, 'import sys\n'), ((6002, 6023), 'netCDF4.Dataset', 'Dataset', (['patches', '"""r"""'], {}), "(patches, 'r')\n", (6009, 6023), False, 'from netCDF4 import Dataset\n'), ((6705, 6740), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': 'timeshift'}), '(hours=timeshift)\n', (6723, 6740), False, 'import datetime\n'), ((7057, 7068), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (7065, 7068), False, 'import sys\n'), ((7621, 7646), 'cartopy.crs.PlateCarree', 'cartopy.crs.PlateCarree', ([], {}), '()\n', (7644, 7646), False, 'import cartopy\n'), ((7648, 7681), 'wrf.to_np', 'to_np', (['wrflon[::stride, ::stride]'], {}), '(wrflon[::stride, ::stride])\n', (7653, 7681), False, 'from wrf import to_np, getvar, get_cartopy, latlon_coords\n'), ((7682, 7715), 'wrf.to_np', 'to_np', (['wrflat[::stride, ::stride]'], {}), '(wrflat[::stride, ::stride])\n', (7687, 7715), False, 'from wrf import to_np, getvar, get_cartopy, latlon_coords\n'), ((7938, 7978), 'cartopy.feature.STATES.with_scale', 'cartopy.feature.STATES.with_scale', (['"""10m"""'], {}), "('10m')\n", (7971, 7978), False, 'import cartopy\n'), ((8419, 8430), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (8427, 8430), False, 'import sys\n'), ((8587, 8618), 'wrf.to_np', 'to_np', (['cvar[::stride, ::stride]'], {}), '(cvar[::stride, ::stride])\n', (8592, 8618), False, 'from wrf import to_np, getvar, get_cartopy, latlon_coords\n'), ((9498, 9529), 'wrf.getvar', 'getvar', (['wrfnc', "info['fname'][1]"], {}), "(wrfnc, info['fname'][1])\n", (9504, 9529), False, 'from wrf import to_np, getvar, get_cartopy, latlon_coords\n'), ((9542, 9573), 'wrf.getvar', 'getvar', (['wrfnc', "info['fname'][2]"], {}), "(wrfnc, info['fname'][2])\n", (9548, 9573), False, 'from wrf import to_np, getvar, get_cartopy, latlon_coords\n'), ((11755, 11885), 'cartopy.io.shapereader.Reader', 'cartopy.io.shapereader.Reader', (['"""/glade/work/ahijevyc/share/shapeFiles/cb_2013_us_county_500k/cb_2013_us_county_500k.shp"""'], {}), "(\n '/glade/work/ahijevyc/share/shapeFiles/cb_2013_us_county_500k/cb_2013_us_county_500k.shp'\n )\n", (11784, 11885), False, 'import cartopy\n'), ((13554, 13585), 'wrf.getvar', 'getvar', (['wrfnc', "info['fname'][0]"], {}), "(wrfnc, info['fname'][0])\n", (13560, 13585), False, 'from wrf import to_np, getvar, get_cartopy, latlon_coords\n'), ((16460, 16489), 'matplotlib.pyplot.savefig', 'plt.savefig', (['pngfile'], {'dpi': '(175)'}), '(pngfile, dpi=175)\n', (16471, 16489), True, 'import matplotlib.pyplot as plt\n'), ((16681, 16696), 'pdb.set_trace', 'pdb.set_trace', ([], {}), '()\n', (16694, 16696), False, 'import pdb\n'), ((3694, 3721), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(7)'}), '(hours=7)\n', (3712, 3721), False, 'import datetime\n'), ((3737, 3765), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(36)'}), '(hours=36)\n', (3755, 3765), False, 'import datetime\n'), ((12040, 12065), 'cartopy.crs.PlateCarree', 'cartopy.crs.PlateCarree', ([], {}), '()\n', (12063, 12065), False, 'import cartopy\n'), ((12403, 12439), 'wrf.getvar', 'getvar', (['wrfnc', '"""uvmet10"""'], {'units': '"""kt"""'}), "(wrfnc, 'uvmet10', units='kt')\n", (12409, 12439), False, 'from wrf import to_np, getvar, get_cartopy, latlon_coords\n'), ((14339, 14363), 'os.path.realpath', 'os.path.realpath', (['wrfout'], {}), '(wrfout)\n', (14355, 14363), False, 'import os\n'), ((14712, 14737), 'cartopy.crs.PlateCarree', 'cartopy.crs.PlateCarree', ([], {}), '()\n', (14735, 14737), False, 'import cartopy\n'), ((2649, 2690), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['d', '"""%Y%m%d%H"""'], {}), "(d, '%Y%m%d%H')\n", (2675, 2690), False, 'import datetime\n'), ((2805, 2846), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['d', '"""%Y%m%d%H"""'], {}), "(d, '%Y%m%d%H')\n", (2831, 2846), False, 'import datetime\n'), ((5605, 5622), 'os.path.isfile', 'os.path.isfile', (['p'], {}), '(p)\n', (5619, 5622), False, 'import os\n'), ((10058, 10071), 'wrf.to_np', 'to_np', (['max_uh'], {}), '(max_uh)\n', (10063, 10071), False, 'from wrf import to_np, getvar, get_cartopy, latlon_coords\n'), ((10244, 10257), 'wrf.to_np', 'to_np', (['max_uh'], {}), '(max_uh)\n', (10249, 10257), False, 'from wrf import to_np, getvar, get_cartopy, latlon_coords\n'), ((11041, 11054), 'wrf.to_np', 'to_np', (['min_uh'], {}), '(min_uh)\n', (11046, 11054), False, 'from wrf import to_np, getvar, get_cartopy, latlon_coords\n'), ((11233, 11246), 'wrf.to_np', 'to_np', (['min_uh'], {}), '(min_uh)\n', (11238, 11246), False, 'from wrf import to_np, getvar, get_cartopy, latlon_coords\n'), ((12481, 12503), 'wrf.getvar', 'getvar', (['wrfnc', '"""USHR6"""'], {}), "(wrfnc, 'USHR6')\n", (12487, 12503), False, 'from wrf import to_np, getvar, get_cartopy, latlon_coords\n'), ((12521, 12543), 'wrf.getvar', 'getvar', (['wrfnc', '"""VSHR6"""'], {}), "(wrfnc, 'VSHR6')\n", (12527, 12543), False, 'from wrf import to_np, getvar, get_cartopy, latlon_coords\n'), ((13117, 13125), 'wrf.to_np', 'to_np', (['u'], {}), '(u)\n', (13122, 13125), False, 'from wrf import to_np, getvar, get_cartopy, latlon_coords\n'), ((13156, 13164), 'wrf.to_np', 'to_np', (['v'], {}), '(v)\n', (13161, 13164), False, 'from wrf import to_np, getvar, get_cartopy, latlon_coords\n'), ((15321, 15336), 'pdb.set_trace', 'pdb.set_trace', ([], {}), '()\n', (15334, 15336), False, 'import pdb\n'), ((15447, 15483), 'numpy.abs', 'np.abs', (['(lon - mask_centroid_lons[ip])'], {}), '(lon - mask_centroid_lons[ip])\n', (15453, 15483), True, 'import numpy as np\n'), ((15559, 15595), 'numpy.abs', 'np.abs', (['(lat - mask_centroid_lats[ip])'], {}), '(lat - mask_centroid_lats[ip])\n', (15565, 15595), True, 'import numpy as np\n'), ((16513, 16538), 'os.path.realpath', 'os.path.realpath', (['pngfile'], {}), '(pngfile)\n', (16529, 16538), False, 'import os\n'), ((10660, 10675), 'pdb.set_trace', 'pdb.set_trace', ([], {}), '()\n', (10673, 10675), False, 'import pdb\n'), ((11566, 11581), 'pdb.set_trace', 'pdb.set_trace', ([], {}), '()\n', (11579, 11581), False, 'import pdb\n'), ((12747, 12805), 'numpy.max', 'np.max', (['[padding[0] + padding[1], padding[2] + padding[3]]'], {}), '([padding[0] + padding[1], padding[2] + padding[3]])\n', (12753, 12805), True, 'import numpy as np\n'), ((15256, 15273), 'numpy.where', 'np.where', (['matches'], {}), '(matches)\n', (15264, 15273), True, 'import numpy as np\n'), ((15894, 15919), 'cartopy.crs.PlateCarree', 'cartopy.crs.PlateCarree', ([], {}), '()\n', (15917, 15919), False, 'import cartopy\n'), ((10283, 10298), 'numpy.arange', 'np.arange', (['(1)', '(6)'], {}), '(1, 6)\n', (10292, 10298), True, 'import numpy as np\n'), ((11272, 11291), 'numpy.arange', 'np.arange', (['(6)', '(0)', '(-1)'], {}), '(6, 0, -1)\n', (11281, 11291), True, 'import numpy as np\n'), ((14534, 14564), 'datetime.datetime.now', 'datetime.datetime.now', ([], {'tz': 'None'}), '(tz=None)\n', (14555, 14564), False, 'import datetime\n')] |
from apex.optimizers import FusedLAMB, FusedAdam
from torch.optim.lr_scheduler import LambdaLR
from torch.optim import AdamW, Adam, SGD
import math
# https://huggingface.co/transformers/_modules/transformers/optimization.html#get_linear_schedule_with_warmup
def get_linear_schedule_with_warmup(
optimizer, num_warmup_steps, num_training_steps, last_epoch=-1
):
"""
Create a schedule with a learning rate that decreases linearly from the initial lr set in the optimizer to 0,
after a warmup period during which it increases linearly from 0 to the initial lr set in the optimizer.
Args:
optimizer (:class:`~torch.optim.Optimizer`):
The optimizer for which to schedule the learning rate.
num_warmup_steps (:obj:`int`):
The number of steps for the warmup phase.
num_training_steps (:obj:`int`):
The total number of training steps.
last_epoch (:obj:`int`, `optional`, defaults to -1):
The index of the last epoch when resuming training.
Return:
:obj:`torch.optim.lr_scheduler.LambdaLR` with the appropriate schedule.
"""
def lr_lambda(current_step: int):
if current_step < num_warmup_steps:
return float(current_step) / float(max(1, num_warmup_steps))
return max(
0.0,
float(num_training_steps - current_step)
/ float(max(1, num_training_steps - num_warmup_steps)),
)
return LambdaLR(optimizer, lr_lambda, last_epoch)
def get_cosine_schedule_with_warmup(
optimizer,
num_warmup_steps: int,
num_training_steps: int,
num_cycles: float = 0.5,
last_epoch: int = -1,
):
"""
Create a schedule with a learning rate that decreases following the values of the cosine function between the
initial lr set in the optimizer to 0, after a warmup period during which it increases linearly between 0 and the
initial lr set in the optimizer.
Args:
optimizer (:class:`~torch.optim.Optimizer`):
The optimizer for which to schedule the learning rate.
num_warmup_steps (:obj:`int`):
The number of steps for the warmup phase.
num_training_steps (:obj:`int`):
The total number of training steps.
num_cycles (:obj:`float`, `optional`, defaults to 0.5):
The number of waves in the cosine schedule (the defaults is to just decrease from the max value to 0
following a half-cosine).
last_epoch (:obj:`int`, `optional`, defaults to -1):
The index of the last epoch when resuming training.
Return:
:obj:`torch.optim.lr_scheduler.LambdaLR` with the appropriate schedule.
"""
def lr_lambda(current_step):
if current_step < num_warmup_steps:
return float(current_step) / float(max(1, num_warmup_steps))
progress = float(current_step - num_warmup_steps) / float(
max(1, num_training_steps - num_warmup_steps)
)
return max(
0.0, 0.5 * (1.0 + math.cos(math.pi * float(num_cycles) * 2.0 * progress))
)
return LambdaLR(optimizer, lr_lambda, last_epoch)
def get_exponent_schedule_with_warmup(
optimizer,
num_warmup_steps: int,
exponent: float = 1 - 2e-3,
step: int = 10,
last_epoch: int = -1,
):
def lr_lambda(current_step: int):
if current_step < num_warmup_steps:
return float(current_step) / float(max(1, num_warmup_steps))
else:
return exponent ** ((current_step - num_warmup_steps) // step)
return LambdaLR(optimizer, lr_lambda, last_epoch)
def get_optim(cfg, model, dataset_iter_num):
cfg = cfg.OPTIM
optim_name = cfg.NAME
optimizer = None
assert optim_name in ["FusedLAMB", "AdamW", "Adam", "SGD"], "optimizer not allowed"
parameters = filter(lambda p: p.requires_grad, model.parameters())
if optim_name == "FusedLAMB":
optimizer = FusedLAMB(parameters, lr=cfg.INIT_LR, eps=cfg.ADAM_EPSILON)
if optim_name == "AdamW":
optimizer = AdamW(parameters, lr=cfg.INIT_LR, eps=cfg.ADAM_EPSILON)
if optim_name == "Adam":
optimizer = Adam(parameters, lr=cfg.INIT_LR, eps=cfg.ADAM_EPSILON)
if optim_name == "SGD":
optimizer = SGD(parameters, lr=cfg.INIT_LR, momentum=cfg.SGD_MOMENTUM)
warmup_step = int(cfg.WARM_UP_EPOCH * dataset_iter_num)
max_step = cfg.MAX_EPOCH * dataset_iter_num
if cfg.USE_LR_SCHEDULER:
if cfg.LR_SCHEDULER_TYPE == "get_exponent_schedule_with_warmup":
scheduler = get_exponent_schedule_with_warmup(
optimizer, warmup_step, exponent=cfg.EXPONENT
)
else:
scheduler = globals()[cfg.LR_SCHEDULER_TYPE](
optimizer, warmup_step, max_step
)
else:
scheduler = None
return optimizer, scheduler
| [
"apex.optimizers.FusedLAMB",
"torch.optim.AdamW",
"torch.optim.Adam",
"torch.optim.lr_scheduler.LambdaLR",
"torch.optim.SGD"
] | [((1471, 1513), 'torch.optim.lr_scheduler.LambdaLR', 'LambdaLR', (['optimizer', 'lr_lambda', 'last_epoch'], {}), '(optimizer, lr_lambda, last_epoch)\n', (1479, 1513), False, 'from torch.optim.lr_scheduler import LambdaLR\n'), ((3126, 3168), 'torch.optim.lr_scheduler.LambdaLR', 'LambdaLR', (['optimizer', 'lr_lambda', 'last_epoch'], {}), '(optimizer, lr_lambda, last_epoch)\n', (3134, 3168), False, 'from torch.optim.lr_scheduler import LambdaLR\n'), ((3589, 3631), 'torch.optim.lr_scheduler.LambdaLR', 'LambdaLR', (['optimizer', 'lr_lambda', 'last_epoch'], {}), '(optimizer, lr_lambda, last_epoch)\n', (3597, 3631), False, 'from torch.optim.lr_scheduler import LambdaLR\n'), ((3960, 4019), 'apex.optimizers.FusedLAMB', 'FusedLAMB', (['parameters'], {'lr': 'cfg.INIT_LR', 'eps': 'cfg.ADAM_EPSILON'}), '(parameters, lr=cfg.INIT_LR, eps=cfg.ADAM_EPSILON)\n', (3969, 4019), False, 'from apex.optimizers import FusedLAMB, FusedAdam\n'), ((4070, 4125), 'torch.optim.AdamW', 'AdamW', (['parameters'], {'lr': 'cfg.INIT_LR', 'eps': 'cfg.ADAM_EPSILON'}), '(parameters, lr=cfg.INIT_LR, eps=cfg.ADAM_EPSILON)\n', (4075, 4125), False, 'from torch.optim import AdamW, Adam, SGD\n'), ((4175, 4229), 'torch.optim.Adam', 'Adam', (['parameters'], {'lr': 'cfg.INIT_LR', 'eps': 'cfg.ADAM_EPSILON'}), '(parameters, lr=cfg.INIT_LR, eps=cfg.ADAM_EPSILON)\n', (4179, 4229), False, 'from torch.optim import AdamW, Adam, SGD\n'), ((4278, 4336), 'torch.optim.SGD', 'SGD', (['parameters'], {'lr': 'cfg.INIT_LR', 'momentum': 'cfg.SGD_MOMENTUM'}), '(parameters, lr=cfg.INIT_LR, momentum=cfg.SGD_MOMENTUM)\n', (4281, 4336), False, 'from torch.optim import AdamW, Adam, SGD\n')] |
#!/usr/bin/env python3
import sys
from build import Builder
package_name = Builder.package_name_from_filename(__file__)
dependencies = ('zlib-1.2.11',
'bzip2-1.0.8',
'jpeg-9c',
'zstd-be3bd70',
'tiff-4.0.10',
'icu4c-65_1',
'boost_1_71_0')
def prepare(builder):
return True
def build(builder):
return True
def cleanup(builder):
return True
if __name__ == "__main__":
print('You must not call this script directly.')
sys.exit(1)
| [
"build.Builder.package_name_from_filename",
"sys.exit"
] | [((77, 121), 'build.Builder.package_name_from_filename', 'Builder.package_name_from_filename', (['__file__'], {}), '(__file__)\n', (111, 121), False, 'from build import Builder\n'), ((540, 551), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (548, 551), False, 'import sys\n')] |
"""
MIT License
Copyright (c) 2017 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Documentation
Sub module for UI implementation.
"""
from pymel.core import *
import maya.OpenMayaUI as OpenMayaUI
from Qt import QtCore, QtGui, QtWidgets
from Qt.QtCore import Slot
try:
from shiboken2 import wrapInstance
except:
from shiboken import wrapInstance
from . import core
class Ui_SpaceSwitcherWindow(object):
def setupUi(self, SpaceSwitcherWindow):
SpaceSwitcherWindow.setObjectName("SpaceSwitcherWindow")
SpaceSwitcherWindow.setWindowModality(QtCore.Qt.NonModal)
SpaceSwitcherWindow.resize(246, 256)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(SpaceSwitcherWindow.sizePolicy().hasHeightForWidth())
SpaceSwitcherWindow.setSizePolicy(sizePolicy)
SpaceSwitcherWindow.setMinimumSize(QtCore.QSize(246, 256))
SpaceSwitcherWindow.setMaximumSize(QtCore.QSize(246, 256))
SpaceSwitcherWindow.setWindowTitle("SpaceSwitcher")
SpaceSwitcherWindow.setWindowOpacity(1.0)
SpaceSwitcherWindow.setToolTip("")
SpaceSwitcherWindow.setTabShape(QtWidgets.QTabWidget.Rounded)
self.centralWidget = QtWidgets.QWidget(SpaceSwitcherWindow)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.centralWidget.sizePolicy().hasHeightForWidth())
self.centralWidget.setSizePolicy(sizePolicy)
self.centralWidget.setMinimumSize(QtCore.QSize(0, 0))
self.centralWidget.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.centralWidget.setLayoutDirection(QtCore.Qt.LeftToRight)
self.centralWidget.setObjectName("centralWidget")
self.layout_centralWidget = QtWidgets.QHBoxLayout(self.centralWidget)
self.layout_centralWidget.setSpacing(2)
self.layout_centralWidget.setContentsMargins(2, 2, 2, 2)
self.layout_centralWidget.setObjectName("layout_centralWidget")
self.frame_Root = QtWidgets.QFrame(self.centralWidget)
self.frame_Root.setFrameShape(QtWidgets.QFrame.NoFrame)
self.frame_Root.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_Root.setObjectName("frame_Root")
self.layout_Root = QtWidgets.QVBoxLayout(self.frame_Root)
self.layout_Root.setSpacing(2)
self.layout_Root.setContentsMargins(2, 2, 2, 2)
self.layout_Root.setObjectName("layout_Root")
self.frame_Parent = QtWidgets.QFrame(self.frame_Root)
self.frame_Parent.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_Parent.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_Parent.setObjectName("frame_Parent")
self.layout_Parent = QtWidgets.QVBoxLayout(self.frame_Parent)
self.layout_Parent.setSpacing(4)
self.layout_Parent.setContentsMargins(2, 2, 2, 2)
self.layout_Parent.setObjectName("layout_Parent")
self.frame_LabelAndButton = QtWidgets.QFrame(self.frame_Parent)
self.frame_LabelAndButton.setFrameShape(QtWidgets.QFrame.NoFrame)
self.frame_LabelAndButton.setFrameShadow(QtWidgets.QFrame.Plain)
self.frame_LabelAndButton.setObjectName("frame_LabelAndButton")
self.layout_LabelAndButton = QtWidgets.QHBoxLayout(self.frame_LabelAndButton)
self.layout_LabelAndButton.setSpacing(2)
self.layout_LabelAndButton.setContentsMargins(0, 0, 0, 0)
self.layout_LabelAndButton.setObjectName("layout_LabelAndButton")
self.label_Parent = QtWidgets.QLabel(self.frame_LabelAndButton)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_Parent.sizePolicy().hasHeightForWidth())
self.label_Parent.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label_Parent.setFont(font)
self.label_Parent.setText("Parent")
self.label_Parent.setObjectName("label_Parent")
self.layout_LabelAndButton.addWidget(self.label_Parent)
self.pushButton_SetParent = QtWidgets.QPushButton(self.frame_LabelAndButton)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_SetParent.sizePolicy().hasHeightForWidth())
self.pushButton_SetParent.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.pushButton_SetParent.setFont(font)
self.pushButton_SetParent.setText("Set")
self.pushButton_SetParent.setObjectName("pushButton_SetParent")
self.layout_LabelAndButton.addWidget(self.pushButton_SetParent)
self.pushButton_ClearParent = QtWidgets.QPushButton(self.frame_LabelAndButton)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_ClearParent.sizePolicy().hasHeightForWidth())
self.pushButton_ClearParent.setSizePolicy(sizePolicy)
self.pushButton_ClearParent.setMaximumSize(QtCore.QSize(52, 16777215))
self.pushButton_ClearParent.setText("Clear")
self.pushButton_ClearParent.setObjectName("pushButton_ClearParent")
self.layout_LabelAndButton.addWidget(self.pushButton_ClearParent)
self.layout_Parent.addWidget(self.frame_LabelAndButton)
self.lineEdit_Parent = QtWidgets.QLineEdit(self.frame_Parent)
self.lineEdit_Parent.setText("")
self.lineEdit_Parent.setObjectName("lineEdit_Parent")
self.layout_Parent.addWidget(self.lineEdit_Parent)
self.layout_Root.addWidget(self.frame_Parent)
self.frame_CreateConstraint = QtWidgets.QFrame(self.frame_Root)
self.frame_CreateConstraint.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_CreateConstraint.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_CreateConstraint.setObjectName("frame_CreateConstraint")
self.layout_CreateConstraint = QtWidgets.QVBoxLayout(self.frame_CreateConstraint)
self.layout_CreateConstraint.setSpacing(0)
self.layout_CreateConstraint.setContentsMargins(2, 2, 2, 2)
self.layout_CreateConstraint.setObjectName("layout_CreateConstraint")
self.label_CreateConstraint = QtWidgets.QLabel(self.frame_CreateConstraint)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_CreateConstraint.sizePolicy().hasHeightForWidth())
self.label_CreateConstraint.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label_CreateConstraint.setFont(font)
self.label_CreateConstraint.setToolTip("Create constraints: Select nodes to be constrained")
self.label_CreateConstraint.setText("Create Constraint")
self.label_CreateConstraint.setObjectName("label_CreateConstraint")
self.layout_CreateConstraint.addWidget(self.label_CreateConstraint)
self.frame_TranslateCheckBoxes = QtWidgets.QFrame(self.frame_CreateConstraint)
self.frame_TranslateCheckBoxes.setFrameShape(QtWidgets.QFrame.NoFrame)
self.frame_TranslateCheckBoxes.setFrameShadow(QtWidgets.QFrame.Plain)
self.frame_TranslateCheckBoxes.setObjectName("frame_TranslateCheckBoxes")
self.layout_TranslateCheckBoxes = QtWidgets.QHBoxLayout(self.frame_TranslateCheckBoxes)
self.layout_TranslateCheckBoxes.setSpacing(8)
self.layout_TranslateCheckBoxes.setContentsMargins(0, 6, 0, 0)
self.layout_TranslateCheckBoxes.setObjectName("layout_TranslateCheckBoxes")
self.label_Translate = QtWidgets.QLabel(self.frame_TranslateCheckBoxes)
self.label_Translate.setText("Translate")
self.label_Translate.setObjectName("label_Translate")
self.layout_TranslateCheckBoxes.addWidget(self.label_Translate)
self.checkBox_TranslateX = QtWidgets.QCheckBox(self.frame_TranslateCheckBoxes)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBox_TranslateX.sizePolicy().hasHeightForWidth())
self.checkBox_TranslateX.setSizePolicy(sizePolicy)
self.checkBox_TranslateX.setText("X")
self.checkBox_TranslateX.setChecked(True)
self.checkBox_TranslateX.setObjectName("checkBox_TranslateX")
self.layout_TranslateCheckBoxes.addWidget(self.checkBox_TranslateX)
self.checkBox_TranslateY = QtWidgets.QCheckBox(self.frame_TranslateCheckBoxes)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBox_TranslateY.sizePolicy().hasHeightForWidth())
self.checkBox_TranslateY.setSizePolicy(sizePolicy)
self.checkBox_TranslateY.setText("Y")
self.checkBox_TranslateY.setChecked(True)
self.checkBox_TranslateY.setObjectName("checkBox_TranslateY")
self.layout_TranslateCheckBoxes.addWidget(self.checkBox_TranslateY)
self.checkBox_TranslateZ = QtWidgets.QCheckBox(self.frame_TranslateCheckBoxes)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBox_TranslateZ.sizePolicy().hasHeightForWidth())
self.checkBox_TranslateZ.setSizePolicy(sizePolicy)
self.checkBox_TranslateZ.setText("Z")
self.checkBox_TranslateZ.setChecked(True)
self.checkBox_TranslateZ.setObjectName("checkBox_TranslateZ")
self.layout_TranslateCheckBoxes.addWidget(self.checkBox_TranslateZ)
self.layout_CreateConstraint.addWidget(self.frame_TranslateCheckBoxes)
self.frame_RotateCheckBoxes = QtWidgets.QFrame(self.frame_CreateConstraint)
self.frame_RotateCheckBoxes.setFrameShape(QtWidgets.QFrame.NoFrame)
self.frame_RotateCheckBoxes.setFrameShadow(QtWidgets.QFrame.Plain)
self.frame_RotateCheckBoxes.setObjectName("frame_RotateCheckBoxes")
self.layout_RotateCheckBoxes = QtWidgets.QHBoxLayout(self.frame_RotateCheckBoxes)
self.layout_RotateCheckBoxes.setSpacing(8)
self.layout_RotateCheckBoxes.setContentsMargins(0, 0, 0, 0)
self.layout_RotateCheckBoxes.setObjectName("layout_RotateCheckBoxes")
self.label_Rotate = QtWidgets.QLabel(self.frame_RotateCheckBoxes)
self.label_Rotate.setText("Rotate")
self.label_Rotate.setObjectName("label_Rotate")
self.layout_RotateCheckBoxes.addWidget(self.label_Rotate)
self.checkBox_RotateX = QtWidgets.QCheckBox(self.frame_RotateCheckBoxes)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBox_RotateX.sizePolicy().hasHeightForWidth())
self.checkBox_RotateX.setSizePolicy(sizePolicy)
self.checkBox_RotateX.setText("X")
self.checkBox_RotateX.setChecked(True)
self.checkBox_RotateX.setObjectName("checkBox_RotateX")
self.layout_RotateCheckBoxes.addWidget(self.checkBox_RotateX)
self.checkBox_RotateY = QtWidgets.QCheckBox(self.frame_RotateCheckBoxes)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBox_RotateY.sizePolicy().hasHeightForWidth())
self.checkBox_RotateY.setSizePolicy(sizePolicy)
self.checkBox_RotateY.setText("Y")
self.checkBox_RotateY.setChecked(True)
self.checkBox_RotateY.setObjectName("checkBox_RotateY")
self.layout_RotateCheckBoxes.addWidget(self.checkBox_RotateY)
self.checkBox_RotateZ = QtWidgets.QCheckBox(self.frame_RotateCheckBoxes)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.checkBox_RotateZ.sizePolicy().hasHeightForWidth())
self.checkBox_RotateZ.setSizePolicy(sizePolicy)
self.checkBox_RotateZ.setText("Z")
self.checkBox_RotateZ.setChecked(True)
self.checkBox_RotateZ.setObjectName("checkBox_RotateZ")
self.layout_RotateCheckBoxes.addWidget(self.checkBox_RotateZ)
self.layout_CreateConstraint.addWidget(self.frame_RotateCheckBoxes)
self.frame_CreateConstraintButtons = QtWidgets.QFrame(self.frame_CreateConstraint)
self.frame_CreateConstraintButtons.setFrameShape(QtWidgets.QFrame.NoFrame)
self.frame_CreateConstraintButtons.setFrameShadow(QtWidgets.QFrame.Plain)
self.frame_CreateConstraintButtons.setObjectName("frame_CreateConstraintButtons")
self.layout_CreateConstraintButtons = QtWidgets.QHBoxLayout(self.frame_CreateConstraintButtons)
self.layout_CreateConstraintButtons.setSpacing(2)
self.layout_CreateConstraintButtons.setContentsMargins(0, 0, 0, 0)
self.layout_CreateConstraintButtons.setObjectName("layout_CreateConstraintButtons")
self.pushButton_CreateConstraint = QtWidgets.QPushButton(self.frame_CreateConstraintButtons)
self.pushButton_CreateConstraint.setToolTip("")
self.pushButton_CreateConstraint.setText("Create")
self.pushButton_CreateConstraint.setObjectName("pushButton_CreateConstraint")
self.layout_CreateConstraintButtons.addWidget(self.pushButton_CreateConstraint)
self.pushButton_CreateAndBakeConstraint = QtWidgets.QPushButton(self.frame_CreateConstraintButtons)
self.pushButton_CreateAndBakeConstraint.setText("Create and Bake")
self.pushButton_CreateAndBakeConstraint.setObjectName("pushButton_CreateAndBakeConstraint")
self.layout_CreateConstraintButtons.addWidget(self.pushButton_CreateAndBakeConstraint)
self.layout_CreateConstraint.addWidget(self.frame_CreateConstraintButtons)
self.layout_Root.addWidget(self.frame_CreateConstraint)
self.frame_DeleteConstraint = QtWidgets.QFrame(self.frame_Root)
self.frame_DeleteConstraint.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_DeleteConstraint.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_DeleteConstraint.setObjectName("frame_DeleteConstraint")
self.layout_DeleteConstraint = QtWidgets.QVBoxLayout(self.frame_DeleteConstraint)
self.layout_DeleteConstraint.setSpacing(0)
self.layout_DeleteConstraint.setContentsMargins(2, 2, 2, 2)
self.layout_DeleteConstraint.setObjectName("layout_DeleteConstraint")
self.label_DeleteConstraint = QtWidgets.QLabel(self.frame_DeleteConstraint)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_DeleteConstraint.sizePolicy().hasHeightForWidth())
self.label_DeleteConstraint.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label_DeleteConstraint.setFont(font)
self.label_DeleteConstraint.setToolTip("Delete constraints: Select constraining locators")
self.label_DeleteConstraint.setText("Delete Constraint")
self.label_DeleteConstraint.setObjectName("label_DeleteConstraint")
self.layout_DeleteConstraint.addWidget(self.label_DeleteConstraint)
self.frame_DeleteConstraintButtons = QtWidgets.QFrame(self.frame_DeleteConstraint)
self.frame_DeleteConstraintButtons.setFrameShape(QtWidgets.QFrame.NoFrame)
self.frame_DeleteConstraintButtons.setFrameShadow(QtWidgets.QFrame.Plain)
self.frame_DeleteConstraintButtons.setObjectName("frame_DeleteConstraintButtons")
self.layout_DeleteConstraintButtons = QtWidgets.QHBoxLayout(self.frame_DeleteConstraintButtons)
self.layout_DeleteConstraintButtons.setSpacing(2)
self.layout_DeleteConstraintButtons.setContentsMargins(0, 4, 0, 0)
self.layout_DeleteConstraintButtons.setObjectName("layout_DeleteConstraintButtons")
self.pushButton_DeleteConstraint = QtWidgets.QPushButton(self.frame_DeleteConstraintButtons)
self.pushButton_DeleteConstraint.setToolTip("")
self.pushButton_DeleteConstraint.setText("Delete")
self.pushButton_DeleteConstraint.setObjectName("pushButton_DeleteConstraint")
self.layout_DeleteConstraintButtons.addWidget(self.pushButton_DeleteConstraint)
self.pushButton_BakeAndDeleteConstraint = QtWidgets.QPushButton(self.frame_DeleteConstraintButtons)
self.pushButton_BakeAndDeleteConstraint.setText("Bake and Delete")
self.pushButton_BakeAndDeleteConstraint.setObjectName("pushButton_BakeAndDeleteConstraint")
self.layout_DeleteConstraintButtons.addWidget(self.pushButton_BakeAndDeleteConstraint)
self.layout_DeleteConstraint.addWidget(self.frame_DeleteConstraintButtons)
self.layout_Root.addWidget(self.frame_DeleteConstraint)
self.frame_BakeRange = QtWidgets.QFrame(self.frame_Root)
self.frame_BakeRange.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_BakeRange.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_BakeRange.setObjectName("frame_BakeRange")
self.layout_BakeRange = QtWidgets.QVBoxLayout(self.frame_BakeRange)
self.layout_BakeRange.setSpacing(0)
self.layout_BakeRange.setContentsMargins(2, 2, 2, 2)
self.layout_BakeRange.setObjectName("layout_BakeRange")
self.frame_BakeRangeTop = QtWidgets.QFrame(self.frame_BakeRange)
self.frame_BakeRangeTop.setFrameShape(QtWidgets.QFrame.NoFrame)
self.frame_BakeRangeTop.setFrameShadow(QtWidgets.QFrame.Plain)
self.frame_BakeRangeTop.setObjectName("frame_BakeRangeTop")
self.layout_BakeRangeTop = QtWidgets.QHBoxLayout(self.frame_BakeRangeTop)
self.layout_BakeRangeTop.setSpacing(0)
self.layout_BakeRangeTop.setContentsMargins(0, 0, 0, 0)
self.layout_BakeRangeTop.setObjectName("layout_BakeRangeTop")
self.label_BakeRange = QtWidgets.QLabel(self.frame_BakeRangeTop)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_BakeRange.sizePolicy().hasHeightForWidth())
self.label_BakeRange.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.label_BakeRange.setFont(font)
self.label_BakeRange.setText("Bake Range")
self.label_BakeRange.setObjectName("label_BakeRange")
self.layout_BakeRangeTop.addWidget(self.label_BakeRange)
self.pushButton_SetFromTimeline = QtWidgets.QPushButton(self.frame_BakeRangeTop)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.pushButton_SetFromTimeline.sizePolicy().hasHeightForWidth())
self.pushButton_SetFromTimeline.setSizePolicy(sizePolicy)
self.pushButton_SetFromTimeline.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.pushButton_SetFromTimeline.setText("Set from timeline")
self.pushButton_SetFromTimeline.setObjectName("pushButton_SetFromTimeline")
self.layout_BakeRangeTop.addWidget(self.pushButton_SetFromTimeline)
self.layout_BakeRange.addWidget(self.frame_BakeRangeTop)
self.frame_BakeRangeSpinBoxes = QtWidgets.QFrame(self.frame_BakeRange)
self.frame_BakeRangeSpinBoxes.setFrameShape(QtWidgets.QFrame.NoFrame)
self.frame_BakeRangeSpinBoxes.setFrameShadow(QtWidgets.QFrame.Plain)
self.frame_BakeRangeSpinBoxes.setObjectName("frame_BakeRangeSpinBoxes")
self.layout_BakeRangeSpinBoxes = QtWidgets.QHBoxLayout(self.frame_BakeRangeSpinBoxes)
self.layout_BakeRangeSpinBoxes.setSpacing(2)
self.layout_BakeRangeSpinBoxes.setContentsMargins(0, 4, 0, 0)
self.layout_BakeRangeSpinBoxes.setObjectName("layout_BakeRangeSpinBoxes")
self.spinBox_BakeStart = QtWidgets.QSpinBox(self.frame_BakeRangeSpinBoxes)
self.spinBox_BakeStart.setAccelerated(True)
self.spinBox_BakeStart.setMinimum(-16777215)
self.spinBox_BakeStart.setMaximum(16777215)
self.spinBox_BakeStart.setProperty("value", 1)
self.spinBox_BakeStart.setObjectName("spinBox_BakeStart")
self.layout_BakeRangeSpinBoxes.addWidget(self.spinBox_BakeStart)
self.spinBox_BakeEnd = QtWidgets.QSpinBox(self.frame_BakeRangeSpinBoxes)
self.spinBox_BakeEnd.setAccelerated(True)
self.spinBox_BakeEnd.setMinimum(-16777215)
self.spinBox_BakeEnd.setMaximum(16777215)
self.spinBox_BakeEnd.setProperty("value", 24)
self.spinBox_BakeEnd.setObjectName("spinBox_BakeEnd")
self.layout_BakeRangeSpinBoxes.addWidget(self.spinBox_BakeEnd)
self.layout_BakeRange.addWidget(self.frame_BakeRangeSpinBoxes)
self.layout_Root.addWidget(self.frame_BakeRange)
self.layout_centralWidget.addWidget(self.frame_Root)
SpaceSwitcherWindow.setCentralWidget(self.centralWidget)
self.retranslateUi(SpaceSwitcherWindow)
QtCore.QMetaObject.connectSlotsByName(SpaceSwitcherWindow)
def retranslateUi(self, SpaceSwitcherWindow):
pass
class ControlMainWindow(QtWidgets.QMainWindow):
def __init__(self, window_title, parent=None):
super(ControlMainWindow, self).__init__(parent)
self.window_title = window_title
self.ui = Ui_SpaceSwitcherWindow()
self.ui.setupUi(self)
# signal - slot connections
self.ui.pushButton_SetParent.clicked.connect(self.setParent_cliciked)
self.ui.pushButton_ClearParent.clicked.connect(self.clearParent_clicked)
self.ui.pushButton_CreateConstraint.clicked.connect(self.createConstraint_clicked)
self.ui.pushButton_CreateAndBakeConstraint.clicked.connect(self.createAndBakeConstraint_clicked)
self.ui.pushButton_DeleteConstraint.clicked.connect(self.deleteConstraint_clicked)
self.ui.pushButton_BakeAndDeleteConstraint.clicked.connect(self.bakeAndDeleteConstraint_clicked)
self.ui.pushButton_SetFromTimeline.clicked.connect(self.setBakeRange_clicked)
#
# UI query methods
#
def get_parentname(self):
return self.ui.lineEdit_Parent.text()
def get_translate_switches(self):
return (self.ui.checkBox_TranslateX.isChecked(),
self.ui.checkBox_TranslateY.isChecked(),
self.ui.checkBox_TranslateZ.isChecked())
def get_rotate_switches(self):
return (self.ui.checkBox_RotateX.isChecked(),
self.ui.checkBox_RotateY.isChecked(),
self.ui.checkBox_RotateZ.isChecked())
def get_bakestart(self):
return self.ui.spinBox_BakeStart.value()
def get_bakeend(self):
return self.ui.spinBox_BakeEnd.value()
#
# UI edit methods
#
def set_parentname(self, name=None):
_name = name
if name is None:
selections = ls(selection=True)
if selections:
_name = selections[0].name()
if _name is not None:
self.ui.lineEdit_Parent.setText(_name)
def set_bakestart(self, value):
self.ui.spinBox_BakeStart.setValue(value)
def set_bakeend(self, value):
self.ui.spinBox_BakeEnd.setValue(value)
#
# UI update methods
#
def update_bakerange(self):
self.set_bakestart(playbackOptions(q=1, minTime=True))
self.set_bakeend(playbackOptions(q=1, maxTime=True))
def update_all(self):
self.update_bakerange()
#
# slot callback functions
#
@Slot()
def setParent_cliciked(self):
self.set_parentname()
@Slot()
def clearParent_clicked(self):
self.set_parentname(name = '')
@Slot()
def createConstraint_clicked(self):
undoInfo(openChunk=True)
parent = None
try:
parent = PyNode(self.get_parentname())
except:
pass
try:
core.switch_space(None, parent,
translate_switches=self.get_translate_switches(),
rotate_switches=self.get_rotate_switches())
except Exception as err:
print(str(err))
finally:
undoInfo(closeChunk=True)
@Slot()
def createAndBakeConstraint_clicked(self):
undoInfo(openChunk=True)
parent = None
try:
parent = PyNode(self.get_parentname())
except:
pass
try:
core.switch_space(None, parent, self.get_translate_switches(), self.get_rotate_switches(),
bake=True, start=self.get_bakestart(), end=self.get_bakeend())
except Exception as err:
print(str(err))
finally:
undoInfo(closeChunk=True)
@Slot()
def deleteConstraint_clicked(self):
undoInfo(openChunk=True)
try:
core.delete_switch_space_constraints()
except Exception as err:
print(str(err))
finally:
undoInfo(closeChunk=True)
@Slot()
def bakeAndDeleteConstraint_clicked(self):
undoInfo(openChunk=True)
try:
core.delete_switch_space_constraints(bake=True, start=self.get_bakestart(), end=self.get_bakeend())
except Exception as err:
print(str(err))
finally:
undoInfo(closeChunk=True)
@Slot()
def setBakeRange_clicked(self):
self.update_bakerange()
def launch_ui(window_title='SpaceSwitcher'):
existing_win_ptr = OpenMayaUI.MQtUtil.findWindow('SpaceSwitcherWindow')
if existing_win_ptr:
existing_win = wrapInstance(long(existing_win_ptr), QtWidgets.QMainWindow)
if existing_win:
if existing_win.windowTitle() == window_title:
existing_win.close()
main_win = ControlMainWindow(window_title,
parent=wrapInstance(long(OpenMayaUI.MQtUtil.mainWindow()), QtWidgets.QWidget))
main_win.setAttribute(QtCore.Qt.WA_DeleteOnClose)
main_win.setWindowTitle(window_title)
main_win.update_all()
main_win.show()
| [
"Qt.QtCore.QMetaObject.connectSlotsByName",
"Qt.QtCore.Slot",
"Qt.QtWidgets.QLabel",
"Qt.QtGui.QFont",
"Qt.QtWidgets.QFrame",
"Qt.QtWidgets.QHBoxLayout",
"maya.OpenMayaUI.MQtUtil.mainWindow",
"Qt.QtWidgets.QLineEdit",
"Qt.QtWidgets.QVBoxLayout",
"Qt.QtWidgets.QSizePolicy",
"Qt.QtWidgets.QPushBut... | [((26133, 26139), 'Qt.QtCore.Slot', 'Slot', ([], {}), '()\n', (26137, 26139), False, 'from Qt.QtCore import Slot\n'), ((26210, 26216), 'Qt.QtCore.Slot', 'Slot', ([], {}), '()\n', (26214, 26216), False, 'from Qt.QtCore import Slot\n'), ((26297, 26303), 'Qt.QtCore.Slot', 'Slot', ([], {}), '()\n', (26301, 26303), False, 'from Qt.QtCore import Slot\n'), ((26831, 26837), 'Qt.QtCore.Slot', 'Slot', ([], {}), '()\n', (26835, 26837), False, 'from Qt.QtCore import Slot\n'), ((27370, 27376), 'Qt.QtCore.Slot', 'Slot', ([], {}), '()\n', (27374, 27376), False, 'from Qt.QtCore import Slot\n'), ((27636, 27642), 'Qt.QtCore.Slot', 'Slot', ([], {}), '()\n', (27640, 27642), False, 'from Qt.QtCore import Slot\n'), ((27970, 27976), 'Qt.QtCore.Slot', 'Slot', ([], {}), '()\n', (27974, 27976), False, 'from Qt.QtCore import Slot\n'), ((28115, 28167), 'maya.OpenMayaUI.MQtUtil.findWindow', 'OpenMayaUI.MQtUtil.findWindow', (['"""SpaceSwitcherWindow"""'], {}), "('SpaceSwitcherWindow')\n", (28144, 28167), True, 'import maya.OpenMayaUI as OpenMayaUI\n'), ((1647, 1726), 'Qt.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Fixed', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)\n', (1668, 1726), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((2342, 2380), 'Qt.QtWidgets.QWidget', 'QtWidgets.QWidget', (['SpaceSwitcherWindow'], {}), '(SpaceSwitcherWindow)\n', (2359, 2380), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((2402, 2490), 'Qt.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Maximum', 'QtWidgets.QSizePolicy.Maximum'], {}), '(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.\n Maximum)\n', (2423, 2490), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((3014, 3055), 'Qt.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', (['self.centralWidget'], {}), '(self.centralWidget)\n', (3035, 3055), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((3267, 3303), 'Qt.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.centralWidget'], {}), '(self.centralWidget)\n', (3283, 3303), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((3511, 3549), 'Qt.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.frame_Root'], {}), '(self.frame_Root)\n', (3532, 3549), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((3727, 3760), 'Qt.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.frame_Root'], {}), '(self.frame_Root)\n', (3743, 3760), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((3982, 4022), 'Qt.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.frame_Parent'], {}), '(self.frame_Parent)\n', (4003, 4022), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((4216, 4251), 'Qt.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.frame_Parent'], {}), '(self.frame_Parent)\n', (4232, 4251), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((4508, 4556), 'Qt.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', (['self.frame_LabelAndButton'], {}), '(self.frame_LabelAndButton)\n', (4529, 4556), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((4774, 4817), 'Qt.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.frame_LabelAndButton'], {}), '(self.frame_LabelAndButton)\n', (4790, 4817), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((4839, 4929), 'Qt.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Preferred', 'QtWidgets.QSizePolicy.Maximum'], {}), '(QtWidgets.QSizePolicy.Preferred, QtWidgets.\n QSizePolicy.Maximum)\n', (4860, 4929), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((5165, 5178), 'Qt.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (5176, 5178), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((5473, 5521), 'Qt.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.frame_LabelAndButton'], {}), '(self.frame_LabelAndButton)\n', (5494, 5521), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((5543, 5631), 'Qt.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Preferred', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Preferred, QtWidgets.\n QSizePolicy.Fixed)\n', (5564, 5631), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((5883, 5896), 'Qt.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (5894, 5896), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((6230, 6278), 'Qt.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.frame_LabelAndButton'], {}), '(self.frame_LabelAndButton)\n', (6251, 6278), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((6300, 6388), 'Qt.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Preferred', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Preferred, QtWidgets.\n QSizePolicy.Fixed)\n', (6321, 6388), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((7006, 7044), 'Qt.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.frame_Parent'], {}), '(self.frame_Parent)\n', (7025, 7044), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((7299, 7332), 'Qt.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.frame_Root'], {}), '(self.frame_Root)\n', (7315, 7332), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((7604, 7654), 'Qt.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.frame_CreateConstraint'], {}), '(self.frame_CreateConstraint)\n', (7625, 7654), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((7890, 7935), 'Qt.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.frame_CreateConstraint'], {}), '(self.frame_CreateConstraint)\n', (7906, 7935), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((7957, 8047), 'Qt.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Preferred', 'QtWidgets.QSizePolicy.Maximum'], {}), '(QtWidgets.QSizePolicy.Preferred, QtWidgets.\n QSizePolicy.Maximum)\n', (7978, 8047), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((8303, 8316), 'Qt.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (8314, 8316), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((8780, 8825), 'Qt.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.frame_CreateConstraint'], {}), '(self.frame_CreateConstraint)\n', (8796, 8825), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((9107, 9160), 'Qt.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', (['self.frame_TranslateCheckBoxes'], {}), '(self.frame_TranslateCheckBoxes)\n', (9128, 9160), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((9401, 9449), 'Qt.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.frame_TranslateCheckBoxes'], {}), '(self.frame_TranslateCheckBoxes)\n', (9417, 9449), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((9669, 9720), 'Qt.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['self.frame_TranslateCheckBoxes'], {}), '(self.frame_TranslateCheckBoxes)\n', (9688, 9720), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((9742, 9828), 'Qt.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Maximum', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.\n Fixed)\n', (9763, 9828), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((10340, 10391), 'Qt.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['self.frame_TranslateCheckBoxes'], {}), '(self.frame_TranslateCheckBoxes)\n', (10359, 10391), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((10413, 10499), 'Qt.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Maximum', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.\n Fixed)\n', (10434, 10499), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((11011, 11062), 'Qt.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['self.frame_TranslateCheckBoxes'], {}), '(self.frame_TranslateCheckBoxes)\n', (11030, 11062), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((11084, 11170), 'Qt.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Maximum', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.\n Fixed)\n', (11105, 11170), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((11764, 11809), 'Qt.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.frame_CreateConstraint'], {}), '(self.frame_CreateConstraint)\n', (11780, 11809), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((12076, 12126), 'Qt.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', (['self.frame_RotateCheckBoxes'], {}), '(self.frame_RotateCheckBoxes)\n', (12097, 12126), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((12352, 12397), 'Qt.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.frame_RotateCheckBoxes'], {}), '(self.frame_RotateCheckBoxes)\n', (12368, 12397), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((12596, 12644), 'Qt.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['self.frame_RotateCheckBoxes'], {}), '(self.frame_RotateCheckBoxes)\n', (12615, 12644), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((12666, 12752), 'Qt.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Maximum', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.\n Fixed)\n', (12687, 12752), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((13237, 13285), 'Qt.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['self.frame_RotateCheckBoxes'], {}), '(self.frame_RotateCheckBoxes)\n', (13256, 13285), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((13307, 13393), 'Qt.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Maximum', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.\n Fixed)\n', (13328, 13393), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((13878, 13926), 'Qt.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['self.frame_RotateCheckBoxes'], {}), '(self.frame_RotateCheckBoxes)\n', (13897, 13926), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((13948, 14034), 'Qt.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Maximum', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.\n Fixed)\n', (13969, 14034), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((14608, 14653), 'Qt.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.frame_CreateConstraint'], {}), '(self.frame_CreateConstraint)\n', (14624, 14653), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((14955, 15012), 'Qt.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', (['self.frame_CreateConstraintButtons'], {}), '(self.frame_CreateConstraintButtons)\n', (14976, 15012), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((15281, 15338), 'Qt.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.frame_CreateConstraintButtons'], {}), '(self.frame_CreateConstraintButtons)\n', (15302, 15338), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((15678, 15735), 'Qt.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.frame_CreateConstraintButtons'], {}), '(self.frame_CreateConstraintButtons)\n', (15699, 15735), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((16191, 16224), 'Qt.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.frame_Root'], {}), '(self.frame_Root)\n', (16207, 16224), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((16496, 16546), 'Qt.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.frame_DeleteConstraint'], {}), '(self.frame_DeleteConstraint)\n', (16517, 16546), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((16782, 16827), 'Qt.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.frame_DeleteConstraint'], {}), '(self.frame_DeleteConstraint)\n', (16798, 16827), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((16849, 16939), 'Qt.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Preferred', 'QtWidgets.QSizePolicy.Maximum'], {}), '(QtWidgets.QSizePolicy.Preferred, QtWidgets.\n QSizePolicy.Maximum)\n', (16870, 16939), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((17195, 17208), 'Qt.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (17206, 17208), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((17674, 17719), 'Qt.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.frame_DeleteConstraint'], {}), '(self.frame_DeleteConstraint)\n', (17690, 17719), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((18021, 18078), 'Qt.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', (['self.frame_DeleteConstraintButtons'], {}), '(self.frame_DeleteConstraintButtons)\n', (18042, 18078), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((18347, 18404), 'Qt.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.frame_DeleteConstraintButtons'], {}), '(self.frame_DeleteConstraintButtons)\n', (18368, 18404), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((18744, 18801), 'Qt.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.frame_DeleteConstraintButtons'], {}), '(self.frame_DeleteConstraintButtons)\n', (18765, 18801), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((19250, 19283), 'Qt.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.frame_Root'], {}), '(self.frame_Root)\n', (19266, 19283), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((19520, 19563), 'Qt.QtWidgets.QVBoxLayout', 'QtWidgets.QVBoxLayout', (['self.frame_BakeRange'], {}), '(self.frame_BakeRange)\n', (19541, 19563), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((19767, 19805), 'Qt.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.frame_BakeRange'], {}), '(self.frame_BakeRange)\n', (19783, 19805), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((20052, 20098), 'Qt.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', (['self.frame_BakeRangeTop'], {}), '(self.frame_BakeRangeTop)\n', (20073, 20098), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((20311, 20352), 'Qt.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.frame_BakeRangeTop'], {}), '(self.frame_BakeRangeTop)\n', (20327, 20352), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((20374, 20464), 'Qt.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Preferred', 'QtWidgets.QSizePolicy.Maximum'], {}), '(QtWidgets.QSizePolicy.Preferred, QtWidgets.\n QSizePolicy.Maximum)\n', (20395, 20464), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((20706, 20719), 'Qt.QtGui.QFont', 'QtGui.QFont', ([], {}), '()\n', (20717, 20719), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((21037, 21083), 'Qt.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.frame_BakeRangeTop'], {}), '(self.frame_BakeRangeTop)\n', (21058, 21083), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((21105, 21193), 'Qt.QtWidgets.QSizePolicy', 'QtWidgets.QSizePolicy', (['QtWidgets.QSizePolicy.Preferred', 'QtWidgets.QSizePolicy.Fixed'], {}), '(QtWidgets.QSizePolicy.Preferred, QtWidgets.\n QSizePolicy.Fixed)\n', (21126, 21193), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((21865, 21903), 'Qt.QtWidgets.QFrame', 'QtWidgets.QFrame', (['self.frame_BakeRange'], {}), '(self.frame_BakeRange)\n', (21881, 21903), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((22180, 22232), 'Qt.QtWidgets.QHBoxLayout', 'QtWidgets.QHBoxLayout', (['self.frame_BakeRangeSpinBoxes'], {}), '(self.frame_BakeRangeSpinBoxes)\n', (22201, 22232), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((22471, 22520), 'Qt.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.frame_BakeRangeSpinBoxes'], {}), '(self.frame_BakeRangeSpinBoxes)\n', (22489, 22520), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((22903, 22952), 'Qt.QtWidgets.QSpinBox', 'QtWidgets.QSpinBox', (['self.frame_BakeRangeSpinBoxes'], {}), '(self.frame_BakeRangeSpinBoxes)\n', (22921, 22952), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((23602, 23660), 'Qt.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['SpaceSwitcherWindow'], {}), '(SpaceSwitcherWindow)\n', (23639, 23660), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((1999, 2021), 'Qt.QtCore.QSize', 'QtCore.QSize', (['(246)', '(256)'], {}), '(246, 256)\n', (2011, 2021), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((2066, 2088), 'Qt.QtCore.QSize', 'QtCore.QSize', (['(246)', '(256)'], {}), '(246, 256)\n', (2078, 2088), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((2755, 2773), 'Qt.QtCore.QSize', 'QtCore.QSize', (['(0)', '(0)'], {}), '(0, 0)\n', (2767, 2773), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((2817, 2849), 'Qt.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(16777215)'], {}), '(16777215, 16777215)\n', (2829, 2849), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((6680, 6706), 'Qt.QtCore.QSize', 'QtCore.QSize', (['(52)', '(16777215)'], {}), '(52, 16777215)\n', (6692, 6706), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((21497, 21529), 'Qt.QtCore.QSize', 'QtCore.QSize', (['(16777215)', '(16777215)'], {}), '(16777215, 16777215)\n', (21509, 21529), False, 'from Qt import QtCore, QtGui, QtWidgets\n'), ((28503, 28534), 'maya.OpenMayaUI.MQtUtil.mainWindow', 'OpenMayaUI.MQtUtil.mainWindow', ([], {}), '()\n', (28532, 28534), True, 'import maya.OpenMayaUI as OpenMayaUI\n')] |
from google_translate_py import AsyncTranslator
from ciri import HelpStr
from ciri.utils import ciri_cmd, eor
@ciri_cmd(pattern="at(?: |$)(.*)")
async def _at(e):
payload = e.text.split(maxsplit=3)
if len(payload) == 3:
text = payload[2]
lang = payload[1]
else:
text = payload[1]
lang = "en"
tr = await AsyncTranslator().translate(text, "", lang)
await eor(e, tr) # eor(e, tr)
HelpStr.update(
{
"autotranslate": {
"at": {
"description": "Auto Translate text while typing",
"usage": ".at <lang> <text>",
}
}
}
)
| [
"ciri.utils.eor",
"google_translate_py.AsyncTranslator",
"ciri.HelpStr.update",
"ciri.utils.ciri_cmd"
] | [((114, 147), 'ciri.utils.ciri_cmd', 'ciri_cmd', ([], {'pattern': '"""at(?: |$)(.*)"""'}), "(pattern='at(?: |$)(.*)')\n", (122, 147), False, 'from ciri.utils import ciri_cmd, eor\n'), ((435, 563), 'ciri.HelpStr.update', 'HelpStr.update', (["{'autotranslate': {'at': {'description': 'Auto Translate text while typing',\n 'usage': '.at <lang> <text>'}}}"], {}), "({'autotranslate': {'at': {'description':\n 'Auto Translate text while typing', 'usage': '.at <lang> <text>'}}})\n", (449, 563), False, 'from ciri import HelpStr\n'), ((408, 418), 'ciri.utils.eor', 'eor', (['e', 'tr'], {}), '(e, tr)\n', (411, 418), False, 'from ciri.utils import ciri_cmd, eor\n'), ((354, 371), 'google_translate_py.AsyncTranslator', 'AsyncTranslator', ([], {}), '()\n', (369, 371), False, 'from google_translate_py import AsyncTranslator\n')] |
from rest_framework import serializers
from rest_framework.serializers import Serializer, ModelSerializer
from core.models import KeyUrlMap
class RedirectionSerializer(Serializer):
pass
class CreateMappingSerializer(ModelSerializer):
potato_url = serializers.SerializerMethodField(read_only=True)
class Meta:
model = KeyUrlMap
fields = ('url', 'potato_url')
@staticmethod
def get_potato_url(obj):
return obj.get_potato_url()
| [
"rest_framework.serializers.SerializerMethodField"
] | [((260, 309), 'rest_framework.serializers.SerializerMethodField', 'serializers.SerializerMethodField', ([], {'read_only': '(True)'}), '(read_only=True)\n', (293, 309), False, 'from rest_framework import serializers\n')] |
# Copyright 2019 Canonical Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ros2cli.node.strategy import NodeStrategy
from ros2service.api import get_service_names_and_types
from ros2service.verb import VerbExtension
from ros2srv.api import service_type_completer
class FindVerb(VerbExtension):
"""Output a list of available services of a given type."""
def add_arguments(self, parser, cli_name):
arg = parser.add_argument(
'service_type',
help='Name of the ROS service type to filter for '
"(e.g. 'rcl_interfaces/srv/ListParameters')")
arg.completer = service_type_completer
parser.add_argument(
'-c', '--count-services', action='store_true',
help='Only display the number of services discovered')
# duplicate the following argument from the command for visibility
parser.add_argument(
'--include-hidden-services', action='store_true',
help='Consider hidden services as well')
def main(self, *, args):
with NodeStrategy(args) as node:
service_names_and_types = get_service_names_and_types(
node=node,
include_hidden_services=args.include_hidden_services)
filtered_services = []
for (service_name, service_type) in service_names_and_types:
if args.service_type in service_type:
filtered_services.append(service_name)
if args.count_services:
print(len(filtered_services))
else:
for filtered_service in filtered_services:
print(filtered_service)
| [
"ros2cli.node.strategy.NodeStrategy",
"ros2service.api.get_service_names_and_types"
] | [((1569, 1587), 'ros2cli.node.strategy.NodeStrategy', 'NodeStrategy', (['args'], {}), '(args)\n', (1581, 1587), False, 'from ros2cli.node.strategy import NodeStrategy\n'), ((1635, 1732), 'ros2service.api.get_service_names_and_types', 'get_service_names_and_types', ([], {'node': 'node', 'include_hidden_services': 'args.include_hidden_services'}), '(node=node, include_hidden_services=args.\n include_hidden_services)\n', (1662, 1732), False, 'from ros2service.api import get_service_names_and_types\n')] |
#This runs on each webserver and funnels the /var/log/auth contents to Kafka broker under topic XYZ
# This is a Kafka producer
from kafka import KafkaProducer
import time
import json
producer = KafkaProducer(bootstrap_servers='broker:9092',value_serializer=lambda v:json.dumps(v).encode('ascii'))
logfile = open('/var/log/auth.log', 'r')
lines = logfile.readlines()
for l in lines:
producer.send('auth.log',{'ws1':l})
logfile.seek(0,2)
while True:
line = logfile.readline()
if not line:
time.sleep(10)
continue
producer.send('auth.log',{'ws1':line})
| [
"json.dumps",
"time.sleep"
] | [((513, 527), 'time.sleep', 'time.sleep', (['(10)'], {}), '(10)\n', (523, 527), False, 'import time\n'), ((267, 280), 'json.dumps', 'json.dumps', (['v'], {}), '(v)\n', (277, 280), False, 'import json\n')] |
import random
import matplotlib
import numpy
from matplotlib import pyplot as plt
from matplotlib.ticker import PercentFormatter
from Resources.data_loader import load_data
from Utils.CrossEntropy import CrossEntropySolver
from Algorithms.CrossEntropy.Solver.ACCADE_cross_entropy_solver import ACCADECrossEntropySolver
from Algorithms.CrossEntropy.Solver.DANE_cross_entropy_solver import DANECrossEntropySolver
from Algorithms.CrossEntropy.Solver.GIANT_cross_entropy_solver import GIANTCrossEntropySolver
from Algorithms.CrossEntropy.Solver.FedGD_cross_entropy_solver import FedGDCrossEntropySolver
from Algorithms.CrossEntropy.Solver.Fedsplit_cross_entropy_solver import FedSplitCrossEntropySolver
from keras.datasets import fashion_mnist
import sys
from constants import color_list, marker_list, GS_DCA, DCA_ONLY, GS_SDR, SDR_ONLY, PERFECT_AGGREGATION, \
first_order_list, second_order_list, GBMA, THRESHOLD, DC_FRAMEWORK
home_dir = '../'
sys.path.append(home_dir)
class CrossEntropyDemo(object):
def __init__(self, data_name, max_iter, repeat, gamma, sigma, p, m, distance_list, data_size_list):
self.data_name = data_name
self.max_iter = max_iter
self.repeat = repeat
self.gamma = gamma
self.sigma = sigma
self.p = p
self.m = m
self.distance_list = distance_list
self.data_size_list = data_size_list
self.n = None
self.d = None
self.x_train = None
self.x_test = None
self.y_train = None
self.y_test = None
self.w_opt = None
self.cond_num = None
self.num_class = None
self.shards = None
def fit(self, x_train, y_train, shards, x_test, y_test):
self.x_train = x_train
self.y_train = y_train
self.shards = shards
self.x_test = x_test
self.y_test = y_test
self.n, self.d = self.x_train.shape
print(self.x_train.shape)
print(self.y_train.shape)
self.num_class = numpy.max(self.y_train) + 1
file_name = home_dir + 'Resources/' + self.data_name + '_optimal.npz'
npz_file = numpy.load(file_name)
self.w_opt = npz_file['w_opt']
print(self.w_opt)
print(self.w_opt.shape)
def perform_training(self, tau_list, k_list, modes, is_search=True, newton_iter=100):
for r in range(self.repeat):
for i in range(len(k_list)):
for j in range(len(tau_list)):
print('repeat ' + str(r) + ' : k = ' + str(k_list[i]) + ' , tau = ' + str(tau_list[j]))
h_mat = numpy.random.randn(self.max_iter, k_list[i], self.m) / numpy.sqrt(
2) + 1j * numpy.random.randn(self.max_iter, k_list[i], self.m) / numpy.sqrt(2)
for device in range(self.m):
PL = (10 ** 2) * ((self.distance_list[device] / 1) ** (-3.76))
h_mat[:, :, device] = numpy.sqrt(PL) * h_mat[:, :, device]
solver = ACCADECrossEntropySolver(m=self.m, h_mat=h_mat, tau=tau_list[j], p=self.p,
x_test=self.x_test, y_test=self.y_test,
opt_mode=DCA_ONLY,
num_class=self.num_class)
solver.fit(self.x_train, self.y_train, self.data_size_list, self.shards)
err, acc = solver.train(self.gamma, self.w_opt, max_iter=self.max_iter, is_search=is_search,
newton_max_iter=newton_iter)
out_file_name = home_dir + 'Outputs/cross_entropy_demo/cross_entropy_demo_ACCADE_' + self.data_name + '_antenna_' + str(
k_list[i]) + '_tau_' + str(tau_list[j]) + '_repeat_' + str(r) + '_GS-DCA.npz'
numpy.savez(out_file_name, err=err, acc=acc, data_name=self.data_name)
solver = FedGDCrossEntropySolver(m=self.m, h_mat=h_mat, tau=tau_list[j], p=self.p,
x_test=self.x_test, y_test=self.y_test, opt_mode=DC_FRAMEWORK,
num_class=self.num_class)
solver.fit(self.x_train, self.y_train, self.data_size_list, self.shards)
err, acc = solver.train(self.gamma, self.w_opt, max_iter=self.max_iter, is_search=is_search,
newton_max_iter=newton_iter)
out_file_name = home_dir + 'Outputs/cross_entropy_demo/cross_entropy_demo_FedGD_' + self.data_name + '_antenna_' + str(
k_list[i]) + '_tau_' + str(tau_list[j]) + '_repeat_' + str(r) + '_DC_FRAMEWORK.npz'
numpy.savez(out_file_name, err=err, acc=acc, data_name=self.data_name)
solver = FedSplitCrossEntropySolver(m=self.m, h_mat=h_mat, tau=tau_list[j], p=self.p,
x_test=self.x_test, y_test=self.y_test, opt_mode=THRESHOLD,
num_class=self.num_class)
solver.fit(self.x_train, self.y_train, self.data_size_list, self.shards)
err, acc = solver.train(self.gamma, self.w_opt, max_iter=self.max_iter, is_search=is_search,
newton_max_iter=newton_iter)
out_file_name = home_dir + 'Outputs/cross_entropy_demo/cross_entropy_demo_FedSplit_' + self.data_name + '_antenna_' + str(
k_list[i]) + '_tau_' + str(tau_list[j]) + '_repeat_' + str(r) + '_THRESHOLD.npz'
numpy.savez(out_file_name, err=err, acc=acc, data_name=self.data_name)
solver = DANECrossEntropySolver(m=self.m, h_mat=h_mat, tau=tau_list[j], p=self.p,
x_test=self.x_test, y_test=self.y_test, opt_mode=DCA_ONLY,
num_class=self.num_class)
solver.fit(self.x_train, self.y_train, self.data_size_list, self.shards)
err, acc = solver.train(self.gamma, self.w_opt, max_iter=self.max_iter, is_search=is_search,
newton_max_iter=newton_iter)
out_file_name = home_dir + 'Outputs/cross_entropy_demo/cross_entropy_demo_DANE_' + self.data_name + '_antenna_' + str(
k_list[i]) + '_tau_' + str(tau_list[j]) + '_repeat_' + str(r) + '_DCA only.npz'
numpy.savez(out_file_name, err=err, acc=acc, data_name=self.data_name)
solver = GIANTCrossEntropySolver(m=self.m, h_mat=h_mat, tau=tau_list[j], p=self.p,
x_test=self.x_test, y_test=self.y_test, opt_mode=DCA_ONLY,
num_class=self.num_class)
solver.fit(self.x_train, self.y_train, self.data_size_list, self.shards)
err, acc = solver.train(self.gamma, self.w_opt, max_iter=self.max_iter, is_search=is_search,
newton_max_iter=newton_iter)
out_file_name = home_dir + 'Outputs/cross_entropy_demo/cross_entropy_demo_GIANT_' + self.data_name + '_antenna_' + str(
k_list[i]) + '_tau_' + str(tau_list[j]) + '_repeat_' + str(r) + '_DCA only.npz'
numpy.savez(out_file_name, err=err, acc=acc, data_name=self.data_name)
del solver
def plot_results_versus_iteration(self, data_name, k, tau, modes, solvers, repeat, max_iter, legends):
err_mat = numpy.zeros((len(modes) + 1, repeat, max_iter))
acc_mat = numpy.zeros((len(modes) + 1, repeat, max_iter))
# centralized
for r in range(repeat):
file_name = home_dir + 'Outputs/centralized_training_demo/centralized_training_demo_' + data_name + '_repeat_' + str(
r) + '.npz'
npz_file = numpy.load(file_name)
err_mat[0][r] = npz_file['err']
acc_mat[0][r] = npz_file['acc']
for j in range(len(solvers)):
for r in range(repeat):
file_name = home_dir + 'Outputs/cross_entropy_demo/cross_entropy_demo_' + solvers[
j] + '_' + data_name + '_antenna_' + str(
k) + '_tau_' + str(tau) + '_repeat_' + str(r) + '_' + modes[j] + '.npz'
npz_file = numpy.load(file_name)
# print(npz_file['acc'])
# print(npz_file['err'])
err_mat[j+1][r] = npz_file['err']
acc_mat[j+1][r] = npz_file['acc']
fig = plt.figure(figsize=(9, 8))
matplotlib.rcParams['mathtext.fontset'] = 'stix'
matplotlib.rcParams['font.family'] = 'STIXGeneral'
line_list = []
for i in range(len(modes)+1):
line, = plt.semilogy(numpy.median(err_mat[i], axis=0), color=color_list[i], linestyle='-',
marker=marker_list[i],
markerfacecolor='none', ms=7, markeredgewidth=2.5, linewidth=2.5)
line_list.append(line)
plt.legend(line_list, legends, fontsize=20)
plt.xlabel('Communication Rounds', fontsize=20)
plt.ylabel('Training Loss', fontsize=20)
plt.xlim(0, max_iter - 1)
plt.ylim(0.25, 2.2)
plt.tight_layout()
plt.grid()
image_name = home_dir + 'Outputs/cross_entropy_demo/cross_entropy_demo_err_' + data_name + '_antenna_' + str(
k) + '_tau_' + str(tau) + '.pdf'
fig.savefig(image_name, format='pdf', dpi=1200)
plt.show()
fig = plt.figure(figsize=(9, 8))
line_list = []
for i in range(len(modes)+1):
line, = plt.plot(numpy.median(acc_mat[i], axis=0), color=color_list[i], linestyle='-',
marker=marker_list[i],
markerfacecolor='none', ms=7, markeredgewidth=2.5, linewidth=2.5, clip_on=False)
line_list.append(line)
plt.legend(line_list, legends, fontsize=20)
plt.xlabel('Communication Rounds', fontsize=20)
plt.ylabel('Test Accuracy', fontsize=20)
plt.xlim(0, max_iter - 1)
plt.gca().yaxis.set_major_formatter(PercentFormatter(1))
plt.tight_layout()
plt.grid()
image_name = home_dir + 'Outputs/cross_entropy_demo/cross_entropy_demo_acc_' + data_name + '_antenna_' + str(
k) + '_tau_' + str(tau) + '.pdf'
fig.savefig(image_name, format='pdf', dpi=1200)
plt.show()
def normalization(x_train, x_test):
mean = numpy.mean(x_train)
std_ev = numpy.sqrt(numpy.var(x_train))
normalized_x_train = numpy.divide(numpy.subtract(x_train, mean), std_ev)
mean = numpy.mean(x_test)
std_ev = numpy.sqrt(numpy.var(x_test))
normalized_x_test = numpy.divide(numpy.subtract(x_test, mean), std_ev)
return normalized_x_train, normalized_x_test
if __name__ == '__main__':
max_iter = 25
repeat = 5
gamma = 1e-8
sigma = 1
tau = numpy.sqrt(10)
k = 5
p = 1
m = 10
is_search = True
newton_iter = 50
datasets = ['fashion_mnist']
tau_list = [1e-9]
k_list = [5]
# modes = [GS_DCA, PERFECT_AGGREGATION, DC_FRAMEWORK, THRESHOLD, DCA_ONLY, DCA_ONLY]
# solvers = ['ACCADE', 'ACCADE', 'FedGD', 'FedSplit', 'GIANT', 'DANE']
# legends = ['Proposed Algorithm', 'Baseline 0', 'Baseline 1', 'Baseline 2', 'Baseline 3', 'Baseline 4']
modes = [GS_DCA, DC_FRAMEWORK, THRESHOLD, DCA_ONLY, DCA_ONLY]
solvers = ['ACCADE', 'FedGD', 'FedSplit', 'GIANT', 'DANE']
legends = ['Baseline 0', 'Proposed Algorithm', 'Baseline 1', 'Baseline 2', 'Baseline 3', 'Baseline 4']
for data_name in datasets:
(x_train, y_train), (x_test, y_test) = fashion_mnist.load_data()
train_n = x_train.shape[0]
test_n = x_test.shape[0]
print(x_train.shape)
print(y_test.shape)
x_train = x_train.reshape(train_n, 28 * 28)
idx = numpy.argsort(y_train)
# idx = numpy.random.permutation(train_n)
y_train = numpy.array(y_train).reshape(train_n, 1)
x_test = x_test.reshape(test_n, 28 * 28)
y_test = numpy.array(y_test).reshape(test_n, 1)
x_train, x_test = normalization(x_train, x_test)
# non-iid data distribution construction
# print(idx)
x_train = x_train[idx]
y_train = y_train[idx]
shard_size = train_n // (6 * m)
sub_shards = [range(i, i + shard_size) for i in range(0, 6 * shard_size * m, shard_size)]
shard_ls = random.sample(range(6 * m), k=6 * m)
# first_shards = [sub_shards[shard_ls[i]] for i in range(0, 2 * m, 2)]
# second_shards = [sub_shards[shard_ls[i + 1]] for i in range(0, 2 * m, 2)]
# shards = [list(sub_shards[shard_ls[i]]) + list(sub_shards[shard_ls[i+1]]) for i in range(0, 2 * m, 2)]
shards = [list(sub_shards[shard_ls[i]]) + list(sub_shards[shard_ls[i + 1]]) + list(
sub_shards[shard_ls[i + 2]]) + list(sub_shards[shard_ls[i + 3]]) + list(sub_shards[shard_ls[i + 4]]) + list(
sub_shards[shard_ls[i + 5]]) for i
in range(0, 6 * m, 6)]
# print(shards[0])
# heterogeneity construction for data size and distance
distance_list = numpy.random.randint(100, 120, size=m)
# distance_list[0: int(m / 10)] = numpy.random.randint(5, 10, size=int(m / 10))
# distance_list[int(m / 10):] = numpy.random.randint(100, 120, size=9 * int(m / 10))
perm = numpy.random.permutation(m)
distance_list = distance_list[perm]
# print(distance_list)
data_size_list = numpy.zeros(m, dtype=int)
data_size_list[0:m] = 6 * shard_size
# data_size_list[0: int(m / 10)] = numpy.random.randint(int(0.08 * s), int(0.1 * s + 1), size=int(m / 10))
# data_size_list[int(m / 10):] = numpy.random.randint(int(1 * s), int(1.1 * s + 1), size=9 * int(m / 10))
perm = numpy.random.permutation(m)
data_size_list = data_size_list[perm]
demo = CrossEntropyDemo(data_name, max_iter, repeat, gamma, sigma, p, m, distance_list, data_size_list)
demo.fit(x_train, y_train, shards, x_test, y_test)
demo.perform_training(tau_list, k_list, modes, is_search=is_search, newton_iter=newton_iter)
for k in k_list:
for tau in tau_list:
demo.plot_results_versus_iteration(data_name, k, tau, modes, solvers, repeat, max_iter + 1, legends)
| [
"numpy.load",
"Algorithms.CrossEntropy.Solver.GIANT_cross_entropy_solver.GIANTCrossEntropySolver",
"numpy.argsort",
"matplotlib.pyplot.figure",
"numpy.mean",
"numpy.random.randint",
"matplotlib.pyplot.gca",
"numpy.sqrt",
"matplotlib.pyplot.tight_layout",
"Algorithms.CrossEntropy.Solver.FedGD_cross... | [((950, 975), 'sys.path.append', 'sys.path.append', (['home_dir'], {}), '(home_dir)\n', (965, 975), False, 'import sys\n'), ((10791, 10810), 'numpy.mean', 'numpy.mean', (['x_train'], {}), '(x_train)\n', (10801, 10810), False, 'import numpy\n'), ((10943, 10961), 'numpy.mean', 'numpy.mean', (['x_test'], {}), '(x_test)\n', (10953, 10961), False, 'import numpy\n'), ((11232, 11246), 'numpy.sqrt', 'numpy.sqrt', (['(10)'], {}), '(10)\n', (11242, 11246), False, 'import numpy\n'), ((2131, 2152), 'numpy.load', 'numpy.load', (['file_name'], {}), '(file_name)\n', (2141, 2152), False, 'import numpy\n'), ((8800, 8826), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(9, 8)'}), '(figsize=(9, 8))\n', (8810, 8826), True, 'from matplotlib import pyplot as plt\n'), ((9306, 9349), 'matplotlib.pyplot.legend', 'plt.legend', (['line_list', 'legends'], {'fontsize': '(20)'}), '(line_list, legends, fontsize=20)\n', (9316, 9349), True, 'from matplotlib import pyplot as plt\n'), ((9358, 9405), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Communication Rounds"""'], {'fontsize': '(20)'}), "('Communication Rounds', fontsize=20)\n", (9368, 9405), True, 'from matplotlib import pyplot as plt\n'), ((9414, 9454), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Training Loss"""'], {'fontsize': '(20)'}), "('Training Loss', fontsize=20)\n", (9424, 9454), True, 'from matplotlib import pyplot as plt\n'), ((9463, 9488), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', '(max_iter - 1)'], {}), '(0, max_iter - 1)\n', (9471, 9488), True, 'from matplotlib import pyplot as plt\n'), ((9497, 9516), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(0.25)', '(2.2)'], {}), '(0.25, 2.2)\n', (9505, 9516), True, 'from matplotlib import pyplot as plt\n'), ((9525, 9543), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (9541, 9543), True, 'from matplotlib import pyplot as plt\n'), ((9552, 9562), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (9560, 9562), True, 'from matplotlib import pyplot as plt\n'), ((9791, 9801), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (9799, 9801), True, 'from matplotlib import pyplot as plt\n'), ((9817, 9843), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(9, 8)'}), '(figsize=(9, 8))\n', (9827, 9843), True, 'from matplotlib import pyplot as plt\n'), ((10209, 10252), 'matplotlib.pyplot.legend', 'plt.legend', (['line_list', 'legends'], {'fontsize': '(20)'}), '(line_list, legends, fontsize=20)\n', (10219, 10252), True, 'from matplotlib import pyplot as plt\n'), ((10261, 10308), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Communication Rounds"""'], {'fontsize': '(20)'}), "('Communication Rounds', fontsize=20)\n", (10271, 10308), True, 'from matplotlib import pyplot as plt\n'), ((10317, 10357), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Test Accuracy"""'], {'fontsize': '(20)'}), "('Test Accuracy', fontsize=20)\n", (10327, 10357), True, 'from matplotlib import pyplot as plt\n'), ((10366, 10391), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', '(max_iter - 1)'], {}), '(0, max_iter - 1)\n', (10374, 10391), True, 'from matplotlib import pyplot as plt\n'), ((10465, 10483), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (10481, 10483), True, 'from matplotlib import pyplot as plt\n'), ((10492, 10502), 'matplotlib.pyplot.grid', 'plt.grid', ([], {}), '()\n', (10500, 10502), True, 'from matplotlib import pyplot as plt\n'), ((10731, 10741), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (10739, 10741), True, 'from matplotlib import pyplot as plt\n'), ((10835, 10853), 'numpy.var', 'numpy.var', (['x_train'], {}), '(x_train)\n', (10844, 10853), False, 'import numpy\n'), ((10893, 10922), 'numpy.subtract', 'numpy.subtract', (['x_train', 'mean'], {}), '(x_train, mean)\n', (10907, 10922), False, 'import numpy\n'), ((10986, 11003), 'numpy.var', 'numpy.var', (['x_test'], {}), '(x_test)\n', (10995, 11003), False, 'import numpy\n'), ((11042, 11070), 'numpy.subtract', 'numpy.subtract', (['x_test', 'mean'], {}), '(x_test, mean)\n', (11056, 11070), False, 'import numpy\n'), ((11981, 12006), 'keras.datasets.fashion_mnist.load_data', 'fashion_mnist.load_data', ([], {}), '()\n', (12004, 12006), False, 'from keras.datasets import fashion_mnist\n'), ((12199, 12221), 'numpy.argsort', 'numpy.argsort', (['y_train'], {}), '(y_train)\n', (12212, 12221), False, 'import numpy\n'), ((13514, 13552), 'numpy.random.randint', 'numpy.random.randint', (['(100)', '(120)'], {'size': 'm'}), '(100, 120, size=m)\n', (13534, 13552), False, 'import numpy\n'), ((13749, 13776), 'numpy.random.permutation', 'numpy.random.permutation', (['m'], {}), '(m)\n', (13773, 13776), False, 'import numpy\n'), ((13877, 13902), 'numpy.zeros', 'numpy.zeros', (['m'], {'dtype': 'int'}), '(m, dtype=int)\n', (13888, 13902), False, 'import numpy\n'), ((14192, 14219), 'numpy.random.permutation', 'numpy.random.permutation', (['m'], {}), '(m)\n', (14216, 14219), False, 'import numpy\n'), ((2006, 2029), 'numpy.max', 'numpy.max', (['self.y_train'], {}), '(self.y_train)\n', (2015, 2029), False, 'import numpy\n'), ((8117, 8138), 'numpy.load', 'numpy.load', (['file_name'], {}), '(file_name)\n', (8127, 8138), False, 'import numpy\n'), ((10436, 10455), 'matplotlib.ticker.PercentFormatter', 'PercentFormatter', (['(1)'], {}), '(1)\n', (10452, 10455), False, 'from matplotlib.ticker import PercentFormatter\n'), ((8581, 8602), 'numpy.load', 'numpy.load', (['file_name'], {}), '(file_name)\n', (8591, 8602), False, 'import numpy\n'), ((9038, 9070), 'numpy.median', 'numpy.median', (['err_mat[i]'], {'axis': '(0)'}), '(err_mat[i], axis=0)\n', (9050, 9070), False, 'import numpy\n'), ((9934, 9966), 'numpy.median', 'numpy.median', (['acc_mat[i]'], {'axis': '(0)'}), '(acc_mat[i], axis=0)\n', (9946, 9966), False, 'import numpy\n'), ((12290, 12310), 'numpy.array', 'numpy.array', (['y_train'], {}), '(y_train)\n', (12301, 12310), False, 'import numpy\n'), ((12397, 12416), 'numpy.array', 'numpy.array', (['y_test'], {}), '(y_test)\n', (12408, 12416), False, 'import numpy\n'), ((3022, 3190), 'Algorithms.CrossEntropy.Solver.ACCADE_cross_entropy_solver.ACCADECrossEntropySolver', 'ACCADECrossEntropySolver', ([], {'m': 'self.m', 'h_mat': 'h_mat', 'tau': 'tau_list[j]', 'p': 'self.p', 'x_test': 'self.x_test', 'y_test': 'self.y_test', 'opt_mode': 'DCA_ONLY', 'num_class': 'self.num_class'}), '(m=self.m, h_mat=h_mat, tau=tau_list[j], p=self.p,\n x_test=self.x_test, y_test=self.y_test, opt_mode=DCA_ONLY, num_class=\n self.num_class)\n', (3046, 3190), False, 'from Algorithms.CrossEntropy.Solver.ACCADE_cross_entropy_solver import ACCADECrossEntropySolver\n'), ((3886, 3956), 'numpy.savez', 'numpy.savez', (['out_file_name'], {'err': 'err', 'acc': 'acc', 'data_name': 'self.data_name'}), '(out_file_name, err=err, acc=acc, data_name=self.data_name)\n', (3897, 3956), False, 'import numpy\n'), ((3987, 4157), 'Algorithms.CrossEntropy.Solver.FedGD_cross_entropy_solver.FedGDCrossEntropySolver', 'FedGDCrossEntropySolver', ([], {'m': 'self.m', 'h_mat': 'h_mat', 'tau': 'tau_list[j]', 'p': 'self.p', 'x_test': 'self.x_test', 'y_test': 'self.y_test', 'opt_mode': 'DC_FRAMEWORK', 'num_class': 'self.num_class'}), '(m=self.m, h_mat=h_mat, tau=tau_list[j], p=self.p,\n x_test=self.x_test, y_test=self.y_test, opt_mode=DC_FRAMEWORK,\n num_class=self.num_class)\n', (4010, 4157), False, 'from Algorithms.CrossEntropy.Solver.FedGD_cross_entropy_solver import FedGDCrossEntropySolver\n'), ((4803, 4873), 'numpy.savez', 'numpy.savez', (['out_file_name'], {'err': 'err', 'acc': 'acc', 'data_name': 'self.data_name'}), '(out_file_name, err=err, acc=acc, data_name=self.data_name)\n', (4814, 4873), False, 'import numpy\n'), ((4904, 5075), 'Algorithms.CrossEntropy.Solver.Fedsplit_cross_entropy_solver.FedSplitCrossEntropySolver', 'FedSplitCrossEntropySolver', ([], {'m': 'self.m', 'h_mat': 'h_mat', 'tau': 'tau_list[j]', 'p': 'self.p', 'x_test': 'self.x_test', 'y_test': 'self.y_test', 'opt_mode': 'THRESHOLD', 'num_class': 'self.num_class'}), '(m=self.m, h_mat=h_mat, tau=tau_list[j], p=self.p,\n x_test=self.x_test, y_test=self.y_test, opt_mode=THRESHOLD, num_class=\n self.num_class)\n', (4930, 5075), False, 'from Algorithms.CrossEntropy.Solver.Fedsplit_cross_entropy_solver import FedSplitCrossEntropySolver\n'), ((5726, 5796), 'numpy.savez', 'numpy.savez', (['out_file_name'], {'err': 'err', 'acc': 'acc', 'data_name': 'self.data_name'}), '(out_file_name, err=err, acc=acc, data_name=self.data_name)\n', (5737, 5796), False, 'import numpy\n'), ((5827, 5993), 'Algorithms.CrossEntropy.Solver.DANE_cross_entropy_solver.DANECrossEntropySolver', 'DANECrossEntropySolver', ([], {'m': 'self.m', 'h_mat': 'h_mat', 'tau': 'tau_list[j]', 'p': 'self.p', 'x_test': 'self.x_test', 'y_test': 'self.y_test', 'opt_mode': 'DCA_ONLY', 'num_class': 'self.num_class'}), '(m=self.m, h_mat=h_mat, tau=tau_list[j], p=self.p,\n x_test=self.x_test, y_test=self.y_test, opt_mode=DCA_ONLY, num_class=\n self.num_class)\n', (5849, 5993), False, 'from Algorithms.CrossEntropy.Solver.DANE_cross_entropy_solver import DANECrossEntropySolver\n'), ((6631, 6701), 'numpy.savez', 'numpy.savez', (['out_file_name'], {'err': 'err', 'acc': 'acc', 'data_name': 'self.data_name'}), '(out_file_name, err=err, acc=acc, data_name=self.data_name)\n', (6642, 6701), False, 'import numpy\n'), ((6732, 6899), 'Algorithms.CrossEntropy.Solver.GIANT_cross_entropy_solver.GIANTCrossEntropySolver', 'GIANTCrossEntropySolver', ([], {'m': 'self.m', 'h_mat': 'h_mat', 'tau': 'tau_list[j]', 'p': 'self.p', 'x_test': 'self.x_test', 'y_test': 'self.y_test', 'opt_mode': 'DCA_ONLY', 'num_class': 'self.num_class'}), '(m=self.m, h_mat=h_mat, tau=tau_list[j], p=self.p,\n x_test=self.x_test, y_test=self.y_test, opt_mode=DCA_ONLY, num_class=\n self.num_class)\n', (6755, 6899), False, 'from Algorithms.CrossEntropy.Solver.GIANT_cross_entropy_solver import GIANTCrossEntropySolver\n'), ((7540, 7610), 'numpy.savez', 'numpy.savez', (['out_file_name'], {'err': 'err', 'acc': 'acc', 'data_name': 'self.data_name'}), '(out_file_name, err=err, acc=acc, data_name=self.data_name)\n', (7551, 7610), False, 'import numpy\n'), ((10400, 10409), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (10407, 10409), True, 'from matplotlib import pyplot as plt\n'), ((2603, 2655), 'numpy.random.randn', 'numpy.random.randn', (['self.max_iter', 'k_list[i]', 'self.m'], {}), '(self.max_iter, k_list[i], self.m)\n', (2621, 2655), False, 'import numpy\n'), ((2658, 2671), 'numpy.sqrt', 'numpy.sqrt', (['(2)'], {}), '(2)\n', (2668, 2671), False, 'import numpy\n'), ((2759, 2772), 'numpy.sqrt', 'numpy.sqrt', (['(2)'], {}), '(2)\n', (2769, 2772), False, 'import numpy\n'), ((2955, 2969), 'numpy.sqrt', 'numpy.sqrt', (['PL'], {}), '(PL)\n', (2965, 2969), False, 'import numpy\n'), ((2704, 2756), 'numpy.random.randn', 'numpy.random.randn', (['self.max_iter', 'k_list[i]', 'self.m'], {}), '(self.max_iter, k_list[i], self.m)\n', (2722, 2756), False, 'import numpy\n')] |
# -*- coding: utf-8 -*-
__author__ = '<NAME>'
import threading
#import logging
#logger = logging.getLogger(__name__)
import idle_queue
from weak_ref import weak_ref
class Signal:
def __init__(self, name):
self.name = name
self.callbacks = []
self.lock = threading.Lock()
def __call__(self, *arg, **kwargs):
self.emit(*arg, **kwargs)
def connect(self, callback):
with self.lock:
callback = weak_ref(callback)
self.callbacks.append(callback)
def disconnect(self, callback):
with self.lock:
for index, weakref_callback in enumerate(self.callbacks):
if callback == weakref_callback():
del self.callbacks[index]
break
def emitNow(self, *args, **kwargs):
for weakref_callback in self.callbacks:
callback = weakref_callback()
if callback is not None:
callback(*args,**kwargs)
else: #lost reference
self.callbacks.remove(weakref_callback)
def emit(self, *args, **kwargs):
with self.lock:
#connected_methods = [callback.__name__ for callback in self.callbacks]
#logger.debug("Event emitted: {}".format(self.name))
for weakref_callback in self.callbacks:
callback = weakref_callback()
if callback is not None:
idle_queue.idle_add(callback, *args, **kwargs)
else: #lost reference
self.callbacks.remove(weakref_callback)
#if not self.callbacks:
#logger.debug("No signals assosiated to: {}".format(self.name)) | [
"threading.Lock",
"weak_ref.weak_ref",
"idle_queue.idle_add"
] | [((286, 302), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (300, 302), False, 'import threading\n'), ((459, 477), 'weak_ref.weak_ref', 'weak_ref', (['callback'], {}), '(callback)\n', (467, 477), False, 'from weak_ref import weak_ref\n'), ((1445, 1491), 'idle_queue.idle_add', 'idle_queue.idle_add', (['callback', '*args'], {}), '(callback, *args, **kwargs)\n', (1464, 1491), False, 'import idle_queue\n')] |
# %%
import os
import unittest
from pathlib import Path
import jsonlines
from eaas import Client, Config
curr_dir = Path(__file__).parent
def read_jsonlines_to_list(file_name):
lines = []
with jsonlines.open(file_name, "r") as reader:
for obj in reader:
lines.append(obj)
return lines
class TestMetrics(unittest.TestCase):
def test_api(self):
config = Config()
client = Client(config)
input_file = os.path.join(curr_dir, "inputs", "multi_references.jsonl")
inputs = read_jsonlines_to_list(input_file)
# res = client.score(inputs)
res = client.score(inputs, metrics=["bleu", "rouge2"])
print(res)
def test_multilingual(self):
config = Config()
client = Client(config)
for lang in ["en", "fr", "zh"]:
# Single ref
print(f"****** LANG: {lang} ******")
print("For single reference")
input_file = os.path.join(
curr_dir, "inputs", f"{lang}_single_ref_tiny.jsonl"
)
inputs = read_jsonlines_to_list(input_file)
# res = client.score(inputs, task="sum", metrics=None, lang=lang)
res = client.score(inputs, metrics=["bleu", "rouge2"])
print(res)
# Multi ref
if lang != "en":
# Moverscore does not support languages other than English
metrics = [
# "bart_score_cnn_hypo_ref",
# "bart_score_summ",
# "bart_score_mt",
# "bert_score_p",
# "bert_score_r",
# "bert_score_f",
"bleu",
# "chrf",
# "comet",
# "comet_qe",
# "prism",
# "prism_qe",
# "rouge1",
"rouge2",
# "rougeL"
]
else:
metrics = [
"bleu",
"rouge2",
]
print("For multiple references")
input_file = os.path.join(
curr_dir, "inputs", f"{lang}_multi_ref_tiny.jsonl"
)
inputs = read_jsonlines_to_list(input_file)
res = client.score(inputs, metrics=metrics)
print(res)
def test_main_example(self):
client = Client(Config())
inputs = [
{
"source": "Hello, my world",
"references": ["Hello, world", "Hello my world"],
"hypothesis": "Hi, my world",
}
]
metrics = ["rouge1", "bleu", "chrf"]
score_list = client.score(inputs, metrics=metrics)
print(score_list)
| [
"eaas.Client",
"eaas.Config",
"pathlib.Path",
"jsonlines.open",
"os.path.join"
] | [((119, 133), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (123, 133), False, 'from pathlib import Path\n'), ((206, 236), 'jsonlines.open', 'jsonlines.open', (['file_name', '"""r"""'], {}), "(file_name, 'r')\n", (220, 236), False, 'import jsonlines\n'), ((403, 411), 'eaas.Config', 'Config', ([], {}), '()\n', (409, 411), False, 'from eaas import Client, Config\n'), ((429, 443), 'eaas.Client', 'Client', (['config'], {}), '(config)\n', (435, 443), False, 'from eaas import Client, Config\n'), ((466, 524), 'os.path.join', 'os.path.join', (['curr_dir', '"""inputs"""', '"""multi_references.jsonl"""'], {}), "(curr_dir, 'inputs', 'multi_references.jsonl')\n", (478, 524), False, 'import os\n'), ((747, 755), 'eaas.Config', 'Config', ([], {}), '()\n', (753, 755), False, 'from eaas import Client, Config\n'), ((773, 787), 'eaas.Client', 'Client', (['config'], {}), '(config)\n', (779, 787), False, 'from eaas import Client, Config\n'), ((970, 1035), 'os.path.join', 'os.path.join', (['curr_dir', '"""inputs"""', 'f"""{lang}_single_ref_tiny.jsonl"""'], {}), "(curr_dir, 'inputs', f'{lang}_single_ref_tiny.jsonl')\n", (982, 1035), False, 'import os\n'), ((2182, 2246), 'os.path.join', 'os.path.join', (['curr_dir', '"""inputs"""', 'f"""{lang}_multi_ref_tiny.jsonl"""'], {}), "(curr_dir, 'inputs', f'{lang}_multi_ref_tiny.jsonl')\n", (2194, 2246), False, 'import os\n'), ((2470, 2478), 'eaas.Config', 'Config', ([], {}), '()\n', (2476, 2478), False, 'from eaas import Client, Config\n')] |
from FEM.Mesh.Geometry import Geometry
from FEM.Mesh.Delaunay import Delaunay
from FEM.PlaneStrain import PlaneStrain
from FEM.Utils.polygonal import roundCorner, giveCoordsCircle
import matplotlib.pyplot as plt
import numpy as np
E = 30*10**(5)
v = 0.25
b = 10
h = 20
he = h/4
ancho_en_h10_in = 18
ancho_en_h20_in = 10
p0 = 200
pp = 1000
ppx = pp*3/5
ppy = -pp*4/5
def darPolinomio(X, Y):
n = len(X)
A = np.zeros([n, n])
B = np.zeros([n, 1])
for i in range(n):
for j in range(n):
A[i, j] = X[i]**j
B[i, 0] = Y[i]
U = np.linalg.solve(A, B)
def f(x):
suma = 0
for i in range(n):
suma += U[i, 0]*x**i
return suma
return f
n = 20
parabola = darPolinomio(np.array([0, 10, 20]), np.array(
[0, b-ancho_en_h10_in/2, b-ancho_en_h20_in/2]))
c = [
[0, 0],
[2*b, 0]]
for i in range(1, n):
x = 2*b-parabola(h/n*i)
y = h/n*i
c += [[x, y]]
c += [[2*b-parabola(4*he), 4*he],
[parabola(4*he), 4*he]]
for i in reversed(range(1, n)):
x = parabola(h/n*i)
y = h/n*i
c += [[x, y]]
holes = []
radi = 2
cent = [b, h/2]
vert, seg = giveCoordsCircle(cent, radi, n=50)
hole = {'center': cent, 'segments': seg, 'vertices': vert}
holes += [hole]
params = Delaunay._strdelaunay(constrained=True, delaunay=True, a='0.1', o=2)
geometria = Delaunay(c, params, nvn=2, holes_dict=holes)
geometria.generateSegmentsFromCoords([0, 0], [2*b, 0])
geometria.generateSegmentsFromCoords(
[2*b-parabola(4*he), 4*he], [parabola(4*he), 4*he])
geometria.cbe = geometria.cbFromSegment(-2, 0, 1)
geometria.cbe += geometria.cbFromSegment(-2, 0, 2)
geometria.saveMesh('Mesh_tests/tunel')
geometria.show()
plt.show()
geometria.loadOnSegment(-1, fy=lambda s: -p0)
geometria.mask = None
O = PlaneStrain(geometria, E, v)
O.elementMatrices()
O.ensembling()
O.borderConditions()
O.solveES()
O.postProcess()
plt.show()
| [
"matplotlib.pyplot.show",
"FEM.PlaneStrain.PlaneStrain",
"numpy.zeros",
"FEM.Mesh.Delaunay.Delaunay",
"FEM.Utils.polygonal.giveCoordsCircle",
"FEM.Mesh.Delaunay.Delaunay._strdelaunay",
"numpy.array",
"numpy.linalg.solve"
] | [((1155, 1189), 'FEM.Utils.polygonal.giveCoordsCircle', 'giveCoordsCircle', (['cent', 'radi'], {'n': '(50)'}), '(cent, radi, n=50)\n', (1171, 1189), False, 'from FEM.Utils.polygonal import roundCorner, giveCoordsCircle\n'), ((1274, 1342), 'FEM.Mesh.Delaunay.Delaunay._strdelaunay', 'Delaunay._strdelaunay', ([], {'constrained': '(True)', 'delaunay': '(True)', 'a': '"""0.1"""', 'o': '(2)'}), "(constrained=True, delaunay=True, a='0.1', o=2)\n", (1295, 1342), False, 'from FEM.Mesh.Delaunay import Delaunay\n'), ((1355, 1399), 'FEM.Mesh.Delaunay.Delaunay', 'Delaunay', (['c', 'params'], {'nvn': '(2)', 'holes_dict': 'holes'}), '(c, params, nvn=2, holes_dict=holes)\n', (1363, 1399), False, 'from FEM.Mesh.Delaunay import Delaunay\n'), ((1706, 1716), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1714, 1716), True, 'import matplotlib.pyplot as plt\n'), ((1789, 1817), 'FEM.PlaneStrain.PlaneStrain', 'PlaneStrain', (['geometria', 'E', 'v'], {}), '(geometria, E, v)\n', (1800, 1817), False, 'from FEM.PlaneStrain import PlaneStrain\n'), ((1903, 1913), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1911, 1913), True, 'import matplotlib.pyplot as plt\n'), ((417, 433), 'numpy.zeros', 'np.zeros', (['[n, n]'], {}), '([n, n])\n', (425, 433), True, 'import numpy as np\n'), ((442, 458), 'numpy.zeros', 'np.zeros', (['[n, 1]'], {}), '([n, 1])\n', (450, 458), True, 'import numpy as np\n'), ((571, 592), 'numpy.linalg.solve', 'np.linalg.solve', (['A', 'B'], {}), '(A, B)\n', (586, 592), True, 'import numpy as np\n'), ((752, 773), 'numpy.array', 'np.array', (['[0, 10, 20]'], {}), '([0, 10, 20])\n', (760, 773), True, 'import numpy as np\n'), ((775, 838), 'numpy.array', 'np.array', (['[0, b - ancho_en_h10_in / 2, b - ancho_en_h20_in / 2]'], {}), '([0, b - ancho_en_h10_in / 2, b - ancho_en_h20_in / 2])\n', (783, 838), True, 'import numpy as np\n')] |
# Generated by Django 4.0 on 2022-04-16 00:55
import sudan_art.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("sudan_art", "0003_alter_artwork_tags"),
]
operations = [
migrations.AlterField(
model_name="artwork",
name="tags",
field=models.CharField(
max_length=250, validators=[sudan_art.models.validate_tags]
),
),
]
| [
"django.db.models.CharField"
] | [((359, 436), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(250)', 'validators': '[sudan_art.models.validate_tags]'}), '(max_length=250, validators=[sudan_art.models.validate_tags])\n', (375, 436), False, 'from django.db import migrations, models\n')] |
import Rhino # type: ignore
import rhinoscriptsyntax as rs
import scriptcontext
import re
from integral_timber_joints.rhino.load import get_process, get_process_artist, process_is_none
from integral_timber_joints.geometry import JointHalfLap
from integral_timber_joints.rhino.assembly_artist import AssemblyNurbsArtist
from integral_timber_joints.rhino.process_artist import ProcessArtist, RobotClampAssemblyProcess, Assembly
from integral_timber_joints.process import RoboticMovement, ObjectState
from compas_rhino.utilities import clear_layer, delete_objects, draw_mesh
from compas_rhino.utilities import draw_polylines
from compas.geometry import Frame, Transformation, Cylinder, Point, transform_points, transpose_matrix, multiply_matrices
from compas.datastructures import Mesh, mesh_weld
try:
from typing import Any, Dict, List, Optional, Tuple, Type
except:
pass
if __name__ == '__main__':
process = get_process()
artist = get_process_artist()
| [
"integral_timber_joints.rhino.load.get_process",
"integral_timber_joints.rhino.load.get_process_artist"
] | [((925, 938), 'integral_timber_joints.rhino.load.get_process', 'get_process', ([], {}), '()\n', (936, 938), False, 'from integral_timber_joints.rhino.load import get_process, get_process_artist, process_is_none\n'), ((952, 972), 'integral_timber_joints.rhino.load.get_process_artist', 'get_process_artist', ([], {}), '()\n', (970, 972), False, 'from integral_timber_joints.rhino.load import get_process, get_process_artist, process_is_none\n')] |
"""Contains FileView."""
from tornado.web import HTTPError
from rubberband.models import TestSet, Result
from rubberband.constants import EXPORT_DATA_FORMATS, EXPORT_FILE_TYPES
from .base import BaseHandler
class FileView(BaseHandler):
"""Request handler handling the view or download of log files or log file contents."""
def get(self, file_id):
"""
Answer to GET requests.
Displays file or part of file as different file types, formats, for download or plain view.
Options available via query string parameters.
Parameters
----------
file_id : str
file to be viewed.
Renders `file.html`.
"""
instance_id = self.get_argument("instance", default=None)
for_download = self.get_argument("download", default=False)
fformat = self.get_argument("format", default="raw")
ftype = self.get_argument("ftype", default=".out")
# perform some validation on the query params
if fformat not in EXPORT_DATA_FORMATS:
raise HTTPError(404)
if ftype not in EXPORT_FILE_TYPES:
raise HTTPError(404)
# load the appropriate object
if instance_id:
obj = Result.get(id=instance_id, routing=file_id)
else:
obj = TestSet.get(id=file_id)
# e.g. `result.json(ftype=".set")`
file_contents = getattr(obj, fformat)(ftype=ftype)
if file_contents is None:
raise HTTPError(404)
if for_download:
self.write(file_contents)
else:
self.render("file.html", contents=file_contents)
| [
"rubberband.models.Result.get",
"tornado.web.HTTPError",
"rubberband.models.TestSet.get"
] | [((1067, 1081), 'tornado.web.HTTPError', 'HTTPError', (['(404)'], {}), '(404)\n', (1076, 1081), False, 'from tornado.web import HTTPError\n'), ((1144, 1158), 'tornado.web.HTTPError', 'HTTPError', (['(404)'], {}), '(404)\n', (1153, 1158), False, 'from tornado.web import HTTPError\n'), ((1240, 1283), 'rubberband.models.Result.get', 'Result.get', ([], {'id': 'instance_id', 'routing': 'file_id'}), '(id=instance_id, routing=file_id)\n', (1250, 1283), False, 'from rubberband.models import TestSet, Result\n'), ((1316, 1339), 'rubberband.models.TestSet.get', 'TestSet.get', ([], {'id': 'file_id'}), '(id=file_id)\n', (1327, 1339), False, 'from rubberband.models import TestSet, Result\n'), ((1496, 1510), 'tornado.web.HTTPError', 'HTTPError', (['(404)'], {}), '(404)\n', (1505, 1510), False, 'from tornado.web import HTTPError\n')] |
from abc import abstractmethod
from collections import defaultdict, deque
from typing import Deque, Dict
from adventofcode2020.utils.abstract import FileReaderSolution
class Day11:
count_seats: int
@abstractmethod
def count_next_to_it(self, grid, row, col) -> int:
"""Count the number of seats next to it/ Depends on the part"""
def str_to_map(self, input_data) -> Dict[int, Dict]:
grid = {}
for row, line in enumerate(input_data.splitlines()):
grid[row] = {k: v for k, v in enumerate(line)}
return grid
def generation(self, grid) -> Dict[int, Dict]:
# First we will compute the numbers, and then assign it to a new dict
# This works, but it not really performance proof..
new_grid: Dict[int, Dict] = defaultdict(dict)
for row in range(0, len(grid)):
for col in range(0, len(grid[row])):
if grid[row][col] == ".":
new_grid[row][col] = "."
continue
count = self.count_next_to_it(grid, row, col)
if count == 0:
new_grid[row][col] = "#"
elif grid[row][col] == "#" and count >= self.count_seats:
# If a seat is occupied (#) and four or more seats adjacent to it
# are also occupied, the seat becomes empty
new_grid[row][col] = "L"
elif grid[row][col] == "#" and count < self.count_seats:
# Occupied but other then 4, it stays the same
new_grid[row][col] = grid[row][col]
else:
new_grid[row][col] = grid[row][col]
return new_grid
def count_filled(self, grid, char="#") -> int:
rij = []
for row, value in grid.items():
rij += value.values()
return rij.count(char)
def print_grid(self, grid):
from time import sleep
sleep(0.1)
print(chr(27) + "[2J")
print("\n")
for row, value in grid.items():
print("".join(value.values()))
print("\n")
def run_day(self, input_data):
average: Deque[int] = deque()
grid = self.str_to_map(input_data)
while True:
grid = self.generation(grid)
# Filled seats
filled = self.count_filled(grid)
average.append(filled)
# self.print_grid(map)
if len(average) >= 5:
popped = average.popleft()
if popped == filled:
return filled
class Day11PartA(Day11, FileReaderSolution):
count_seats = 4
def count_next_to_it(self, grid, row, col) -> int:
"""Returns how many seats next to this one are filled"""
res = [
grid.get(row, {}).get(col - 1, 0), # Left
grid.get(row, {}).get(col + 1, 0), # Right
grid.get(row - 1, {}).get(col, 0), # Bottom
grid.get(row + 1, {}).get(col, 0), # Top
#
grid.get(row - 1, {}).get(col - 1, 0), # Bottom Left
grid.get(row - 1, {}).get(col + 1, 0), # Bottom Right
grid.get(row + 1, {}).get(col - 1, 0), # Top Left
grid.get(row + 1, {}).get(col + 1, 0), # Top Right
]
return res.count("#")
def solve(self, input_data: str) -> int:
return self.run_day(input_data)
class Day11PartB(Day11, FileReaderSolution):
count_seats = 5
def find_recursive(self, grid, row, col, drow, dcol, offset=1) -> bool:
"""
Find if a spot is taken.
If a spot is the floor, it will look further.
If the seat is taken, it will return True, else it will return False
"""
spot = grid.get(row + (drow * offset), {}).get(col + (dcol * offset), None)
if spot == ".":
return self.find_recursive(grid, row, col, drow, dcol, offset + 1)
elif spot == "#":
return True
elif spot == "L":
return False
elif spot is None:
# Off the grid
return False
else:
raise ValueError(f"Unknown char at {spot}")
def count_next_to_it(self, grid, row, col) -> int:
"""
Returns how many seats next to it are empty, but when a a seat
is next to the floor, we look ahead!
"""
res = [
self.find_recursive(grid, row, col, drow, dcol)
for drow, dcol in (
(0, -1), # Left
(0, +1), # Right
(-1, 0), # Top
(+1, 0), # Bottom
(-1, -1), # Bottom left
(-1, +1), # Bottom right
(+1, -1), # Top left
(+1, +1), # Top right
)
]
return sum(res)
def solve(self, input_data: str) -> int:
return self.run_day(input_data)
| [
"collections.defaultdict",
"collections.deque",
"time.sleep"
] | [((797, 814), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (808, 814), False, 'from collections import defaultdict, deque\n'), ((1978, 1988), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (1983, 1988), False, 'from time import sleep\n'), ((2209, 2216), 'collections.deque', 'deque', ([], {}), '()\n', (2214, 2216), False, 'from collections import defaultdict, deque\n')] |
# -*- coding: utf-8 -*-
import logging
import os
import requests.exceptions
import six
from configparser import ConfigParser
from ..exc import EmptyResourceError, InvalidResourceError, MissingResourceError
from ..utils import download, is_url
__all__ = [
'parse_bel_resource',
'get_lines',
'get_bel_resource',
]
log = logging.getLogger(__name__)
def _get_bel_resource_kvp(line, delimiter):
"""
:param str line:
:param str delimiter:
:rtype: tuple[str,str]
"""
split_line = line.rsplit(delimiter, 1)
key = split_line[0].strip()
value = split_line[1].strip() if 2 == len(split_line) else None
return key, value
def parse_bel_resource(lines):
"""Parses a BEL config (BELNS, BELANNO, or BELEQ) file from the given line iterator over the file
:param iter[str] lines: An iterable over the lines in a BEL config file
:return: A config-style dictionary representing the BEL config file
:rtype: dict
"""
lines = list(lines)
value_line = 1 + max(
index
for index, line in enumerate(lines)
if '[Values]' == line.strip()
)
metadata_config = ConfigParser(strict=False)
metadata_config.optionxform = lambda option: option
metadata_config.read_file(lines[:value_line])
delimiter = metadata_config['Processing']['DelimiterString']
value_dict = dict(
_get_bel_resource_kvp(line, delimiter)
for line in lines[value_line:]
)
res = {}
res.update({k: dict(v) for k, v in metadata_config.items()})
res['Values'] = value_dict
return res
def get_lines(location):
"""Gets the lines from a location
:param str location: The URL location to download or a file path to open. File path expands user.
:return: list[str]
:raises: requests.exceptions.HTTPError
"""
if is_url(location):
res = download(location)
return list(line.decode('utf-8', errors='ignore').strip() for line in res.iter_lines())
else:
with open(os.path.expanduser(location)) as f:
return list(f)
def get_bel_resource(location):
"""Loads/downloads and parses a config file from the given url or file path
:param str location: The URL or file path to a BELNS, BELANNO, or BELEQ file to download and parse
:return: A config-style dictionary representing the BEL config file
:rtype: dict
:raises: pybel.resources.exc.ResourceError
"""
log.debug('getting resource: %s', location)
try:
lines = get_lines(location)
except requests.exceptions.HTTPError as e:
six.raise_from(MissingResourceError(location), e)
try:
result = parse_bel_resource(lines)
except ValueError as e:
six.raise_from(InvalidResourceError(location), e)
if not result['Values']:
raise EmptyResourceError(location)
return result
| [
"configparser.ConfigParser",
"os.path.expanduser",
"logging.getLogger"
] | [((334, 361), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (351, 361), False, 'import logging\n'), ((1149, 1175), 'configparser.ConfigParser', 'ConfigParser', ([], {'strict': '(False)'}), '(strict=False)\n', (1161, 1175), False, 'from configparser import ConfigParser\n'), ((2014, 2042), 'os.path.expanduser', 'os.path.expanduser', (['location'], {}), '(location)\n', (2032, 2042), False, 'import os\n')] |
# Generated by Django 2.2.3 on 2020-03-09 08:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('usersapp', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='customuser',
options={'ordering': ['username']},
),
migrations.AlterField(
model_name='customuser',
name='balance',
field=models.DecimalField(db_index=True, decimal_places=2, default=0, max_digits=12),
),
migrations.AlterField(
model_name='customuser',
name='inn',
field=models.CharField(db_index=True, max_length=12),
),
]
| [
"django.db.models.CharField",
"django.db.models.DecimalField",
"django.db.migrations.AlterModelOptions"
] | [((225, 313), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""customuser"""', 'options': "{'ordering': ['username']}"}), "(name='customuser', options={'ordering': [\n 'username']})\n", (253, 313), False, 'from django.db import migrations, models\n'), ((459, 537), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'db_index': '(True)', 'decimal_places': '(2)', 'default': '(0)', 'max_digits': '(12)'}), '(db_index=True, decimal_places=2, default=0, max_digits=12)\n', (478, 537), False, 'from django.db import migrations, models\n'), ((660, 706), 'django.db.models.CharField', 'models.CharField', ([], {'db_index': '(True)', 'max_length': '(12)'}), '(db_index=True, max_length=12)\n', (676, 706), False, 'from django.db import migrations, models\n')] |
import numpy as np
import os
from astropy.io import ascii
class mesa:
def init(self):
###### parameters ####################
self.path = '/Users/Jason/code/mesa_progenitors/' # file path
# self.type = 'solar' # file type
self.name = 'profileXX.data' # file name
def __init__(self,**kwargs):
self.init()
self.__dict__.update(kwargs)
# sample lab 1 solution
def read_mesa_profile(self):
h = ascii.read(self.name,header_start=1,data_start=2,data_end=3)
f = ascii.read(self.name,header_start=4,data_start=5)
return h,f
def read_mesa_star(self):
h,f = self.read_mesa_profile()
self.data = f
self.header = h
| [
"astropy.io.ascii.read"
] | [((505, 568), 'astropy.io.ascii.read', 'ascii.read', (['self.name'], {'header_start': '(1)', 'data_start': '(2)', 'data_end': '(3)'}), '(self.name, header_start=1, data_start=2, data_end=3)\n', (515, 568), False, 'from astropy.io import ascii\n'), ((578, 629), 'astropy.io.ascii.read', 'ascii.read', (['self.name'], {'header_start': '(4)', 'data_start': '(5)'}), '(self.name, header_start=4, data_start=5)\n', (588, 629), False, 'from astropy.io import ascii\n')] |
import cv2
import math
import numpy as np
def get_density_map_gaussian(im, points):
im_density = np.zeros_like(im, dtype=np.float64)
h, w = im_density.shape
if points is None:
return im_density
if points.shape[0] == 1:
x1 = max(0, min(w-1, round(points[0, 0])))
y1 = max(0, min(h-1, round(points[0, 1])))
im_density[y1, x1] = 255
return im_density
for j in range(points.shape[0]):
f_sz = 15
sigma = 4.0
H = np.multiply(cv2.getGaussianKernel(f_sz, sigma), (cv2.getGaussianKernel(f_sz, sigma)).T)
x = min(w-1, max(0, abs(int(math.floor(points[j, 0])))))
y = min(h-1, max(0, abs(int(math.floor(points[j, 1])))))
if x >= w or y >= h:
continue
x1 = x - f_sz//2 + 0
y1 = y - f_sz//2 + 0
x2 = x + f_sz//2 + 1
y2 = y + f_sz//2 + 1
dfx1, dfy1, dfx2, dfy2 = 0, 0, 0, 0
change_H = False
if x1 < 0:
dfx1 = abs(x1) + 0
x1 = 0
change_H = True
if y1 < 0:
dfy1 = abs(y1) + 0
y1 = 0
change_H = True
if x2 > w:
dfx2 = x2 - w
x2 = w
change_H = True
if y2 > h:
dfy2 = y2 - h
y2 = h
change_H = True
x1h, y1h, x2h, y2h = 1 + dfx1, 1 + dfy1, f_sz - dfx2, f_sz - dfy2
if change_H is True:
H = np.multiply(cv2.getGaussianKernel(y2h-y1h+1, sigma), (cv2.getGaussianKernel(x2h-x1h+1, sigma)).T)
im_density[y1:y2, x1:x2] += H
return im_density
| [
"numpy.zeros_like",
"math.floor",
"cv2.getGaussianKernel"
] | [((104, 139), 'numpy.zeros_like', 'np.zeros_like', (['im'], {'dtype': 'np.float64'}), '(im, dtype=np.float64)\n', (117, 139), True, 'import numpy as np\n'), ((506, 540), 'cv2.getGaussianKernel', 'cv2.getGaussianKernel', (['f_sz', 'sigma'], {}), '(f_sz, sigma)\n', (527, 540), False, 'import cv2\n'), ((543, 577), 'cv2.getGaussianKernel', 'cv2.getGaussianKernel', (['f_sz', 'sigma'], {}), '(f_sz, sigma)\n', (564, 577), False, 'import cv2\n'), ((1456, 1499), 'cv2.getGaussianKernel', 'cv2.getGaussianKernel', (['(y2h - y1h + 1)', 'sigma'], {}), '(y2h - y1h + 1, sigma)\n', (1477, 1499), False, 'import cv2\n'), ((1498, 1541), 'cv2.getGaussianKernel', 'cv2.getGaussianKernel', (['(x2h - x1h + 1)', 'sigma'], {}), '(x2h - x1h + 1, sigma)\n', (1519, 1541), False, 'import cv2\n'), ((618, 642), 'math.floor', 'math.floor', (['points[j, 0]'], {}), '(points[j, 0])\n', (628, 642), False, 'import math\n'), ((683, 707), 'math.floor', 'math.floor', (['points[j, 1]'], {}), '(points[j, 1])\n', (693, 707), False, 'import math\n')] |
""" test of base functions"""
import os
import sys
# pylint: disable=import-error disable=wrong-import-position
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from pydit import setup_logging, clean_string
# import numpy as np
# from datetime import datetime, date, timedelta
# from pandas import Timestamp
logger = setup_logging()
def test_clean_string():
"""test the clean string function"""
assert clean_string(" <NAME> 123 456 . ") == "john_smith_123_456"
assert (
clean_string(" <NAME> 123 456 . ", space_to_underscore=False)
== "<NAME> 123 456"
)
if __name__ == "__main__":
test_clean_string
| [
"pydit.clean_string",
"os.path.abspath",
"pydit.setup_logging"
] | [((349, 364), 'pydit.setup_logging', 'setup_logging', ([], {}), '()\n', (362, 364), False, 'from pydit import setup_logging, clean_string\n'), ((445, 481), 'pydit.clean_string', 'clean_string', (['""" <NAME> 123 456 . """'], {}), "(' <NAME> 123 456 . ')\n", (457, 481), False, 'from pydit import setup_logging, clean_string\n'), ((528, 591), 'pydit.clean_string', 'clean_string', (['""" <NAME> 123 456 . """'], {'space_to_underscore': '(False)'}), "(' <NAME> 123 456 . ', space_to_underscore=False)\n", (540, 591), False, 'from pydit import setup_logging, clean_string\n'), ((161, 186), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (176, 186), False, 'import os\n')] |
from django.contrib import admin
from django.utils.html import mark_safe, format_html
from . import models
class CategoryAdmin(admin.ModelAdmin):
fields = ('image_tag', 'title', 'description', 'image',)
readonly_fields = ('image_tag',)
class PostAdmin(admin.ModelAdmin):
fields = ('title', 'content', 'author', 'image_tag', 'image', 'category',
'n_views', 'tags')
readonly_fields = ('image_tag', 'n_views', 'author', )
def save_model(self, request, obj, form, change):
if getattr(obj, 'author', None) is None:
obj.author = request.user
obj.save()
class ProfileAdmin(admin.ModelAdmin):
fields = ('user', 'image', 'description',)
readonly_fields = ('image_tag',)
# Register your models here.
admin.site.register(models.Post, PostAdmin)
admin.site.register(models.Category, CategoryAdmin)
admin.site.register(models.Profile, ProfileAdmin)
admin.site.register(models.Subscriber)
| [
"django.contrib.admin.site.register"
] | [((770, 813), 'django.contrib.admin.site.register', 'admin.site.register', (['models.Post', 'PostAdmin'], {}), '(models.Post, PostAdmin)\n', (789, 813), False, 'from django.contrib import admin\n'), ((814, 865), 'django.contrib.admin.site.register', 'admin.site.register', (['models.Category', 'CategoryAdmin'], {}), '(models.Category, CategoryAdmin)\n', (833, 865), False, 'from django.contrib import admin\n'), ((866, 915), 'django.contrib.admin.site.register', 'admin.site.register', (['models.Profile', 'ProfileAdmin'], {}), '(models.Profile, ProfileAdmin)\n', (885, 915), False, 'from django.contrib import admin\n'), ((916, 954), 'django.contrib.admin.site.register', 'admin.site.register', (['models.Subscriber'], {}), '(models.Subscriber)\n', (935, 954), False, 'from django.contrib import admin\n')] |
# -*- coding: utf-8 -*-
"""Simple networks of caches modeled as single caches."""
import random
import numpy as np
from icarus.util import inheritdoc
from icarus.tools import DiscreteDist
from icarus.registry import register_cache_policy, CACHE_POLICY
from .policies import Cache
__all__ = [
'PathCache',
'TreeCache',
'ArrayCache',
'ShardedCache',
]
"""
So let me get this straight, these "systems" do not implement ANY delay or
any kind of change between fetching or adding data to or from specific nodes?!
- as it says at the end of the document, modeled as single caches -
"""
@register_cache_policy('PATH')
class PathCache(object):
"""Path of caches
This is not a single-node cache implementation but rather it implements
a path of caching nodes in which requests are fed to the first node of the
path and, in case of a miss, are propagated down to the remaining nodes
of the path. A miss occurs if none of the nodes on the path has the
requested content.
"""
def __init__(self, caches, **kwargs):
"""Constructor
Parameters
----------
caches : array-like
An array of caching nodes instances on the path
"""
self._caches = caches
self._len = len(caches)
"""
TODO: Implement all the below methods, with the appropriate "cache"
(implement EDRs as "caches")
"""
def __len__(self):
return self._len
@property
def maxlen(self):
return self._len
def has(self, k):
for c in self._caches:
if c.has(k):
return True
else:
return False
def get(self, k):
for i in range(self._len):
if self._caches[i].get(k):
break
else:
return False
# Put contents on all caches traversed by the retrieved content
for j in range(i):
self._caches[j].put(k)
return True
def put(self, k):
"""Insert an item in the cache if not already inserted.
If the element is already present in the cache, it will pushed to the
top of the cache.
Parameters
----------
k : any hashable type
The item to be inserted
Returns
-------
evicted : any hashable type
The evicted object or *None* if no contents were evicted.
"""
for c in self._caches:
c.put(k)
def remove(self, k):
raise NotImplementedError('This method is not implemented')
def position(self, k):
raise NotImplementedError('This method is not implemented')
def dump(self, serialized=True):
dump = [c.dump() for c in self._caches]
return sum(dump, []) if serialized else dump
def clear(self):
for c in self._caches:
c.clear()
@register_cache_policy('TREE')
class TreeCache(object):
"""Path of caches
This is not a single-node cache implementation but rather it implements
a tree of caching nodes in which requests are fed to a random leaf node
and, in case of a miss, are propagated down to the remaining nodes
of the path. A miss occurs if none of the nodes on the path has the
requested content.
Notes
-----
This cache can only be operated in a read-through manner and not in write
through or read/write aside. In other words, before issuing a put, you
must issue a get for the same item. The reason for this limitation is
to ensure that matching get/put requests go through the same randomly
selected node.
"""
def __init__(self, leaf_caches, root_cache, **kwargs):
"""Constructor
Parameters
----------
caches : array-like
An array of caching nodes instances on the path
segments : int
The number of segments
"""
self._leaf_caches = leaf_caches
self._root_cache = root_cache
self._len = sum(len(c) for c in leaf_caches) + len(root_cache)
self._n_leaves = len(leaf_caches)
self._leaf = None
def __len__(self):
return self._len
@property
def maxlen(self):
return self._len
def has(self, k):
raise NotImplementedError('This method is not implemented')
def get(self, k):
self._leaf = random.choice(self._leaf_caches)
if self._leaf.get(k):
return True
else:
if self._root_cache.get(k):
self._leaf.put(k)
return True
else:
return False
def put(self, k):
"""Insert an item in the cache if not already inserted.
If the element is already present in the cache, it will pushed to the
top of the cache.
Parameters
----------
k : any hashable type
The item to be inserted
Returns
-------
evicted : any hashable type
The evicted object or *None* if no contents were evicted.
"""
if self._leaf is None:
raise ValueError("You are trying to insert an item not requested before. "
"Tree cache can be used in read-through mode only")
self._leaf.put(k)
self._root_cache.put(k)
def remove(self, k):
raise NotImplementedError('This method is not implemented')
def position(self, k):
raise NotImplementedError('This method is not implemented')
def dump(self, serialized=True):
dump = [c.dump() for c in self._leaf_caches]
dump.append(self._root_cache.dump())
return sum(dump, []) if serialized else dump
def clear(self):
for c in self._caches:
c.clear()
@register_cache_policy('ARRAY')
class ArrayCache(object):
"""Array of caches
This is not a single-node cache implementation but rather it implements
an array of caching nodes in which requests are fed to a random node of
a set.
Notes
-----
This cache can only be operated in a read-through manner and not in write
through or read/write aside. In other words, before issuing a put, you
must issue a get for the same item. The reason for this limitation is
to ensure that matching get/put requests go through the same randomly
selected node.
"""
def __init__(self, caches, weights=None, **kwargs):
"""Constructor
Parameters
----------
caches : array-like
An array of caching nodes instances on the array
weights : array-like
Random weights according to which a cache of the array should be
selected to process a given request
"""
self._caches = caches
self._len = sum(len(c) for c in caches)
self._n_caches = len(caches)
self._selected_cache = None
if weights is not None:
if np.abs(np.sum(weights) - 1) > 0.0001:
raise ValueError("weights must sum up to 1")
if len(weights) != self._n_caches:
raise ValueError("weights must have as many elements as nr of caches")
randvar = DiscreteDist(weights)
self.select_cache = lambda : self._caches[randvar.rv() - 1]
else:
self.select_cache = lambda : random.choice(self._caches)
def __len__(self):
return self._len
@property
def maxlen(self):
return self._len
def has(self, k):
raise NotImplementedError('This method is not implemented')
def get(self, k):
self._selected_cache = self.select_cache()
return self._selected_cache.get(k)
def put(self, k):
"""Insert an item in the cache if not already inserted.
If the element is already present in the cache, it will pushed to the
top of the cache.
Parameters
----------
k : any hashable type
The item to be inserted
Returns
-------
evicted : any hashable type
The evicted object or *None* if no contents were evicted.
"""
if self._selected_cache is None:
raise ValueError("You are trying to insert an item not requested before. "
"Array cache can be used in read-through mode only")
self._selected_cache.put(k)
def remove(self, k):
raise NotImplementedError('This method is not implemented')
def position(self, k):
raise NotImplementedError('This method is not implemented')
def dump(self, serialized=True):
dump = [c.dump() for c in self._caches]
return sum(dump, []) if serialized else dump
def clear(self):
for c in self._caches:
c.clear()
@register_cache_policy('SHARD')
class ShardedCache(Cache):
"""Set of sharded caches.
Set of caches coordinately storing items. When a request reaches the
caches, the request is forwarded to the specific cache (shard) based on the
outcome of a hash function. So, an item can be stored only by a single
node of the system.
"""
def __init__(self, maxlen, policy='LRU', nodes=4, f_map=None,
policy_attr={}, **kwargs):
"""Constructor
Parameters
----------
maxlen : int
The maximum number of items the cache can store.
policy : str, optional
The eviction policy of each node (e.g., LRU, LFU, FIFO...).
Default is LRU.
nodes : int, optional
The number of nodes, default is 4.
f_map : callable, optional
A callable governing the mapping between items and caching nodes.
It receives as argument a value of an item :math:`k` and returns an
integer between :math:`0` and :math:`nodes - 1` identifying the
target node.
If not specified, the mapping is done by computing the hash of the
given item modulo the number of nodes.
policy_attr : dict, optional
A set of parameters for initializing the underlying caching policy.
Notes
-----
The maxlen parameter refers to the cumulative size of the caches in the
set. The size of each shard is derived dividing maxlen by the number
of nodes.
"""
maxlen = int(maxlen)
if maxlen <= 0:
raise ValueError('maxlen must be positive')
if not isinstance(nodes, int) or nodes <= 0 or nodes > maxlen:
raise ValueError('nodes must be an integer and 0 < nodes <= maxlen')
# If maxlen is not a multiple of nodes, then some nodes have one slot
# more than others
self._node_maxlen = [maxlen // nodes for _ in range(nodes)]
for i in range(maxlen % nodes):
self._node_maxlen[i] += 1
self._maxlen = maxlen
self._node = [CACHE_POLICY[policy](self._node_maxlen[i], **policy_attr)
for i in range(nodes)]
self.f_map = f_map if f_map is not None else lambda k: hash(k) % nodes
@inheritdoc(Cache)
def __len__(self):
return sum(len(s) for s in self._node)
@property
def maxlen(self):
return self._maxlen
@inheritdoc(Cache)
def has(self, k):
return self._node[self.f_map(k)].has(k)
@inheritdoc(Cache)
def get(self, k):
return self._node[self.f_map(k)].get(k)
@inheritdoc(Cache)
def put(self, k):
return self._node[self.f_map(k)].put(k)
@inheritdoc(Cache)
def dump(self, serialized=True):
dump = list(s.dump() for s in self._node)
return sum(dump, []) if serialized else dump
@inheritdoc(Cache)
def remove(self, k):
return self._node[self.f_map(k)].remove(k)
@inheritdoc(Cache)
def clear(self):
for s in self._node:
s.clear()
| [
"numpy.sum",
"icarus.registry.register_cache_policy",
"random.choice",
"icarus.tools.DiscreteDist",
"icarus.util.inheritdoc"
] | [((622, 651), 'icarus.registry.register_cache_policy', 'register_cache_policy', (['"""PATH"""'], {}), "('PATH')\n", (643, 651), False, 'from icarus.registry import register_cache_policy, CACHE_POLICY\n'), ((2904, 2933), 'icarus.registry.register_cache_policy', 'register_cache_policy', (['"""TREE"""'], {}), "('TREE')\n", (2925, 2933), False, 'from icarus.registry import register_cache_policy, CACHE_POLICY\n'), ((5804, 5834), 'icarus.registry.register_cache_policy', 'register_cache_policy', (['"""ARRAY"""'], {}), "('ARRAY')\n", (5825, 5834), False, 'from icarus.registry import register_cache_policy, CACHE_POLICY\n'), ((8821, 8851), 'icarus.registry.register_cache_policy', 'register_cache_policy', (['"""SHARD"""'], {}), "('SHARD')\n", (8842, 8851), False, 'from icarus.registry import register_cache_policy, CACHE_POLICY\n'), ((11142, 11159), 'icarus.util.inheritdoc', 'inheritdoc', (['Cache'], {}), '(Cache)\n', (11152, 11159), False, 'from icarus.util import inheritdoc\n'), ((11301, 11318), 'icarus.util.inheritdoc', 'inheritdoc', (['Cache'], {}), '(Cache)\n', (11311, 11318), False, 'from icarus.util import inheritdoc\n'), ((11395, 11412), 'icarus.util.inheritdoc', 'inheritdoc', (['Cache'], {}), '(Cache)\n', (11405, 11412), False, 'from icarus.util import inheritdoc\n'), ((11489, 11506), 'icarus.util.inheritdoc', 'inheritdoc', (['Cache'], {}), '(Cache)\n', (11499, 11506), False, 'from icarus.util import inheritdoc\n'), ((11583, 11600), 'icarus.util.inheritdoc', 'inheritdoc', (['Cache'], {}), '(Cache)\n', (11593, 11600), False, 'from icarus.util import inheritdoc\n'), ((11747, 11764), 'icarus.util.inheritdoc', 'inheritdoc', (['Cache'], {}), '(Cache)\n', (11757, 11764), False, 'from icarus.util import inheritdoc\n'), ((11847, 11864), 'icarus.util.inheritdoc', 'inheritdoc', (['Cache'], {}), '(Cache)\n', (11857, 11864), False, 'from icarus.util import inheritdoc\n'), ((4392, 4424), 'random.choice', 'random.choice', (['self._leaf_caches'], {}), '(self._leaf_caches)\n', (4405, 4424), False, 'import random\n'), ((7224, 7245), 'icarus.tools.DiscreteDist', 'DiscreteDist', (['weights'], {}), '(weights)\n', (7236, 7245), False, 'from icarus.tools import DiscreteDist\n'), ((7373, 7400), 'random.choice', 'random.choice', (['self._caches'], {}), '(self._caches)\n', (7386, 7400), False, 'import random\n'), ((6976, 6991), 'numpy.sum', 'np.sum', (['weights'], {}), '(weights)\n', (6982, 6991), True, 'import numpy as np\n')] |
import pandas as pd
import pickle
from narrowing_ai_research.utils.list_utils import flatten_list
import narrowing_ai_research
import datetime
import logging
import os
import json
project_dir = narrowing_ai_research.project_dir
def make_ai_ids():
"""Function to extract AI Ids from the categories and expanded paper
list files
"""
with open(f"{project_dir}/data/interim/find_ai_outputs.p", "rb") as infile:
t = pickle.load(infile)
paper_categories = pd.read_csv(
f"{project_dir}/data/raw/arxiv_article_categories.csv"
)
ai_cats = set(["cs.AI", "cs.NE", "stat.ML", "cs.LG"])
ai_core_papers = set(
paper_categories.loc[paper_categories["category_id"].isin(ai_cats)][
"article_id"
]
)
ai_papers_expanded = set(flatten_list([x for x in t[0].values()]))
all_ai_ids = ai_core_papers.union(ai_papers_expanded)
return all_ai_ids
def process_paper_data():
"""Some final data processing
* Add AI dates to relevant datasets (papers and Grid)
* Add dates to the papers df
* Create long topic df
* Add DeepMind and OpenAI papers to the paper_grid file
"""
# Add dates
# This reads the first line of the papers to check if year is there.
# papers = pd.read_csv(
# f"{project_dir}/data/raw/arxiv_articles.csv", dtype={"article_id": str}
# )
if os.path.exists(f"{project_dir}/data/processed/arxiv_articles.csv") is True:
logging.info("Already processed paper data")
logging.info("Already added AI ids to data")
else:
papers = pd.read_csv(
f"{project_dir}/data/raw/arxiv_articles.csv", dtype={"article_id": str}
)
ai_ids = make_ai_ids()
logging.info("Adding dates to paper_df")
papers["date"] = papers["created"].apply(
lambda x: datetime.datetime(int(x.split("-")[0]), int(x.split("-")[1]), 1)
)
papers["year"] = papers["date"].apply(lambda x: x.year)
logging.info("Add AI dummy")
papers["is_ai"] = papers["article_id"].isin(ai_ids)
papers.to_csv(f"{project_dir}/data/processed/arxiv_articles.csv", index=False)
papers_year_dict = papers.set_index("article_id").to_dict()
if os.path.exists(f"{project_dir}/data/processed/arxiv_topics_years.csv") is True:
logging.info("Already created topic year df")
else:
logging.info("making topic year df")
topic_mix = pd.read_csv(
f"{project_dir}/data/processed/ai_topic_mix.csv", dtype={"article_id": str}
)
topic_long = topic_mix.melt(id_vars="article_id")
topic_long["year"], topic_long["date"] = [
[papers_year_dict[var][_id] for _id in topic_long["article_id"]]
for var in ["created", "date"]
]
topic_long.to_csv(
f"{project_dir}/data/processed/arxiv_topics_years.csv", index=False
)
if os.path.exists(f"{project_dir}/data/processed/arxiv_grid.csv") is True:
logging.info("Already processed GRID data")
else:
logging.info("Processing GRID data")
logging.info("Fixing UCL bug")
pd.options.mode.chained_assignment = None
g = pd.read_csv(
f"{project_dir}/data/raw/arxiv_grid_short.csv", dtype={"article_id": str}
)
ucl_aus = g.loc[g["institute_name"] == "UCL Australia"]
ucl_aus["institute_name"] = "UCL"
ucl_aus["institute_country"] = "United Kingdom"
ucl_aus["institute_lat"] = 0.1340
ucl_aus["institute_lon"] = 51.5246
ucl_aus["org_type"] = "Education"
g_no_aus = g.loc[g["institute_name"] != "UCL Australia"]
g_fixed = pd.concat([g_no_aus, ucl_aus], axis=0)
# g_fixed.to_csv("arxiv_grid_proc.csv",index=False)
logging.info("Adding DeepMind and OpenAI")
with open(f"{project_dir}/data/raw/scraped_arxiv.json", "r") as infile:
scraped = json.load(infile)
with open(f"{project_dir}/data/interim/scraped_meta.json", "r") as infile:
scraped_meta = json.load(infile)
scraped_c = {k.split("/")[-1]: v for k, v in scraped.items()}
new_results = []
# Create a df with the information for deepmind / openai ids
scr_no_dupes = g.loc[
[x in set(scraped_c.keys()) for x in g["article_id"]]
].drop_duplicates("article_id")
# For each id there we create a new series with org metadata
for _id, r in scr_no_dupes.iterrows():
if r["article_id"] in scraped_c.keys():
paper_vector = {}
n = scraped_c[r["article_id"]]
paper_vector["institute_name"] = n
paper_vector["article_id"] = r["article_id"]
paper_vector["mag_id"] = r["mag_id"]
paper_vector["mag_authors"] = r["mag_authors"]
paper_vector["is_multinational"] = 0
paper_vector["institute_id"] = f"extra_{n}"
paper_vector["institute_country"] = scraped_meta[n]["institute_country"]
paper_vector["institute_lat"] = scraped_meta[n]["lat"]
paper_vector["institute_lon"] = scraped_meta[n]["lon"]
paper_vector["org_type"] = scraped_meta[n]["org_type"]
paper_series = pd.Series(paper_vector)
new_results.append(paper_series)
grid_out = pd.concat([g_fixed, pd.DataFrame(new_results)], axis=0)
logging.info("Adding AI labels")
ai_ids = make_ai_ids()
grid_out["is_ai"] = grid_out["article_id"].isin(ai_ids)
logging.info("Saving grid file")
grid_out.to_csv(f"{project_dir}/data/processed/arxiv_grid.csv", index=False)
if __name__ == "__main__":
process_paper_data()
os.remove(f"{project_dir}/data/raw/arxiv_articles.csv")
os.remove(f"{project_dir}/data/raw/arxiv_grid_short.csv")
| [
"pandas.DataFrame",
"os.remove",
"json.load",
"pandas.read_csv",
"os.path.exists",
"logging.info",
"pickle.load",
"pandas.Series",
"pandas.concat"
] | [((482, 549), 'pandas.read_csv', 'pd.read_csv', (['f"""{project_dir}/data/raw/arxiv_article_categories.csv"""'], {}), "(f'{project_dir}/data/raw/arxiv_article_categories.csv')\n", (493, 549), True, 'import pandas as pd\n'), ((5817, 5872), 'os.remove', 'os.remove', (['f"""{project_dir}/data/raw/arxiv_articles.csv"""'], {}), "(f'{project_dir}/data/raw/arxiv_articles.csv')\n", (5826, 5872), False, 'import os\n'), ((5878, 5935), 'os.remove', 'os.remove', (['f"""{project_dir}/data/raw/arxiv_grid_short.csv"""'], {}), "(f'{project_dir}/data/raw/arxiv_grid_short.csv')\n", (5887, 5935), False, 'import os\n'), ((438, 457), 'pickle.load', 'pickle.load', (['infile'], {}), '(infile)\n', (449, 457), False, 'import pickle\n'), ((1385, 1451), 'os.path.exists', 'os.path.exists', (['f"""{project_dir}/data/processed/arxiv_articles.csv"""'], {}), "(f'{project_dir}/data/processed/arxiv_articles.csv')\n", (1399, 1451), False, 'import os\n'), ((1469, 1513), 'logging.info', 'logging.info', (['"""Already processed paper data"""'], {}), "('Already processed paper data')\n", (1481, 1513), False, 'import logging\n'), ((1522, 1566), 'logging.info', 'logging.info', (['"""Already added AI ids to data"""'], {}), "('Already added AI ids to data')\n", (1534, 1566), False, 'import logging\n'), ((1594, 1683), 'pandas.read_csv', 'pd.read_csv', (['f"""{project_dir}/data/raw/arxiv_articles.csv"""'], {'dtype': "{'article_id': str}"}), "(f'{project_dir}/data/raw/arxiv_articles.csv', dtype={\n 'article_id': str})\n", (1605, 1683), True, 'import pandas as pd\n'), ((1742, 1782), 'logging.info', 'logging.info', (['"""Adding dates to paper_df"""'], {}), "('Adding dates to paper_df')\n", (1754, 1782), False, 'import logging\n'), ((2004, 2032), 'logging.info', 'logging.info', (['"""Add AI dummy"""'], {}), "('Add AI dummy')\n", (2016, 2032), False, 'import logging\n'), ((2258, 2328), 'os.path.exists', 'os.path.exists', (['f"""{project_dir}/data/processed/arxiv_topics_years.csv"""'], {}), "(f'{project_dir}/data/processed/arxiv_topics_years.csv')\n", (2272, 2328), False, 'import os\n'), ((2346, 2391), 'logging.info', 'logging.info', (['"""Already created topic year df"""'], {}), "('Already created topic year df')\n", (2358, 2391), False, 'import logging\n'), ((2411, 2447), 'logging.info', 'logging.info', (['"""making topic year df"""'], {}), "('making topic year df')\n", (2423, 2447), False, 'import logging\n'), ((2468, 2561), 'pandas.read_csv', 'pd.read_csv', (['f"""{project_dir}/data/processed/ai_topic_mix.csv"""'], {'dtype': "{'article_id': str}"}), "(f'{project_dir}/data/processed/ai_topic_mix.csv', dtype={\n 'article_id': str})\n", (2479, 2561), True, 'import pandas as pd\n'), ((2946, 3008), 'os.path.exists', 'os.path.exists', (['f"""{project_dir}/data/processed/arxiv_grid.csv"""'], {}), "(f'{project_dir}/data/processed/arxiv_grid.csv')\n", (2960, 3008), False, 'import os\n'), ((3026, 3069), 'logging.info', 'logging.info', (['"""Already processed GRID data"""'], {}), "('Already processed GRID data')\n", (3038, 3069), False, 'import logging\n'), ((3089, 3125), 'logging.info', 'logging.info', (['"""Processing GRID data"""'], {}), "('Processing GRID data')\n", (3101, 3125), False, 'import logging\n'), ((3134, 3164), 'logging.info', 'logging.info', (['"""Fixing UCL bug"""'], {}), "('Fixing UCL bug')\n", (3146, 3164), False, 'import logging\n'), ((3228, 3319), 'pandas.read_csv', 'pd.read_csv', (['f"""{project_dir}/data/raw/arxiv_grid_short.csv"""'], {'dtype': "{'article_id': str}"}), "(f'{project_dir}/data/raw/arxiv_grid_short.csv', dtype={\n 'article_id': str})\n", (3239, 3319), True, 'import pandas as pd\n'), ((3713, 3751), 'pandas.concat', 'pd.concat', (['[g_no_aus, ucl_aus]'], {'axis': '(0)'}), '([g_no_aus, ucl_aus], axis=0)\n', (3722, 3751), True, 'import pandas as pd\n'), ((3821, 3863), 'logging.info', 'logging.info', (['"""Adding DeepMind and OpenAI"""'], {}), "('Adding DeepMind and OpenAI')\n", (3833, 3863), False, 'import logging\n'), ((5503, 5535), 'logging.info', 'logging.info', (['"""Adding AI labels"""'], {}), "('Adding AI labels')\n", (5515, 5535), False, 'import logging\n'), ((5640, 5672), 'logging.info', 'logging.info', (['"""Saving grid file"""'], {}), "('Saving grid file')\n", (5652, 5672), False, 'import logging\n'), ((3966, 3983), 'json.load', 'json.load', (['infile'], {}), '(infile)\n', (3975, 3983), False, 'import json\n'), ((4095, 4112), 'json.load', 'json.load', (['infile'], {}), '(infile)\n', (4104, 4112), False, 'import json\n'), ((5345, 5368), 'pandas.Series', 'pd.Series', (['paper_vector'], {}), '(paper_vector)\n', (5354, 5368), True, 'import pandas as pd\n'), ((5458, 5483), 'pandas.DataFrame', 'pd.DataFrame', (['new_results'], {}), '(new_results)\n', (5470, 5483), True, 'import pandas as pd\n')] |
#!/usr/bin/env python
# coding: utf-8
from tensorflow.keras.models import Model
from tensorflow.keras.layers import GlobalAveragePooling2D, Conv2D, Dense, Concatenate, Flatten, Input
from .abe_models import ABE
from ..layers import L2Normalisation
from ..layers.horde_layers import CKOP, PKOB
from .extractors import get_extractor, get_preprocess_method
def KOrderModel(extractor_name,
embedding_sizes,
high_order_dims,
ho_trainable=False,
end_layer=None):
model = get_extractor(extractor_name, end_layer=end_layer)
inputs = model.input
x = model.output
max_order = len(high_order_dims)
output_list = [x]
# Add all high-order approximation layers:
for k, order_dim in enumerate(high_order_dims, start=2):
x_ho = CKOP(output_dim=order_dim, name='CKOP_' + str(k), ho_trainable=ho_trainable)([x] * k)
output_list.append(x_ho)
# Add pooling and embedding layers:
for k in range(len(output_list)):
output_list[k] = GlobalAveragePooling2D(name='GAP_' + extractor_name + '_O' + str(k + 1))(output_list[k])
if embedding_sizes[k] > 0:
output_list[k] = Dense(embedding_sizes[k], use_bias=False)(output_list[k])
output_list[k] = L2Normalisation(name='L2_' + extractor_name + '_O' + str(k + 1))(output_list[k])
return Model(inputs=inputs, outputs=output_list, name=extractor_name + '_O' + str(max_order)), get_preprocess_method(extractor_name)
def CascadedKOrder(extractor_name,
embedding_sizes,
high_order_dims,
ho_trainable=True,
end_layer=None):
model = get_extractor(extractor_name, end_layer=end_layer)
inputs = model.input
x = model.output
max_order = len(high_order_dims)
output_list = [x]
# Add all high-order approximation layers:
for k, order_dim in enumerate(high_order_dims, start=2):
only_project_second = False if k == 2 else True
x_ho = PKOB(order_dim,
only_project_second=only_project_second,
ho_trainable=ho_trainable)([output_list[-1], x])
output_list.append(x_ho)
# Add pooling and embedding layers:
for k in range(len(output_list)):
output_list[k] = GlobalAveragePooling2D(name='GAP_' + extractor_name + '_O' + str(k + 1))(output_list[k])
if ho_trainable:
output_list[k] = Dense(embedding_sizes[k],
use_bias=False,
name='Proj_' + extractor_name + '_O' + str(k + 1))(output_list[k])
elif k == 0:
output_list[k] = Dense(embedding_sizes[k],
use_bias=False,
name='Proj_' + extractor_name + '_O' + str(k + 1))(output_list[k])
output_list[k] = L2Normalisation(name='L2_' + extractor_name + '_O' + str(k + 1))(output_list[k])
return Model(inputs=inputs, outputs=output_list, name=extractor_name + '_O' + str(max_order)), get_preprocess_method(extractor_name)
def CascadedABE(embedding_size,
high_order_dims,
features_reduction=256,
ho_trainable=True,
n_head=8):
model, preprocess_method = ABE(embedding_size[0], n_head=8)
inp = model.input
multi_head_out = [model.get_layer(name='inception_5b/output').get_output_at(k) for k in range(n_head)]
concat = Concatenate()(multi_head_out) # Nx H x W x n_ensemble*1024
if features_reduction is not None:
concat = Conv2D(filters=features_reduction,
kernel_size=(1, 1),
use_bias=False)(concat)
output_list = [concat]
# Add all high-order approximation layers:
for k, order_dim in enumerate(high_order_dims, start=2):
only_project_second = False if k == 2 else True
x_ho = PKOB(order_dim,
only_project_second=only_project_second,
ho_trainable=ho_trainable)([output_list[-1], concat])
output_list.append(x_ho)
# Add pooling and embedding layers:
for k in range(1, len(output_list)):
output_list[k] = GlobalAveragePooling2D(name='GAP_O' + str(k + 1))(output_list[k])
if ho_trainable:
output_list[k] = Dense(embedding_size[k],
use_bias=False,
name='Proj_O' + str(k + 1))(output_list[k])
output_list[k] = L2Normalisation(name='L2_O' + str(k + 1))(output_list[k])
# Finally we replace the first order by the true model:
output_list[0] = model.get_layer(name='ABE'+str(n_head)).output
return Model(inp, output_list, name='ABE'+str(n_head)+'_O'+str(len(embedding_size))), preprocess_method
| [
"tensorflow.keras.layers.Conv2D",
"tensorflow.keras.layers.Concatenate",
"tensorflow.keras.layers.Dense"
] | [((3482, 3495), 'tensorflow.keras.layers.Concatenate', 'Concatenate', ([], {}), '()\n', (3493, 3495), False, 'from tensorflow.keras.layers import GlobalAveragePooling2D, Conv2D, Dense, Concatenate, Flatten, Input\n'), ((3599, 3669), 'tensorflow.keras.layers.Conv2D', 'Conv2D', ([], {'filters': 'features_reduction', 'kernel_size': '(1, 1)', 'use_bias': '(False)'}), '(filters=features_reduction, kernel_size=(1, 1), use_bias=False)\n', (3605, 3669), False, 'from tensorflow.keras.layers import GlobalAveragePooling2D, Conv2D, Dense, Concatenate, Flatten, Input\n'), ((1193, 1234), 'tensorflow.keras.layers.Dense', 'Dense', (['embedding_sizes[k]'], {'use_bias': '(False)'}), '(embedding_sizes[k], use_bias=False)\n', (1198, 1234), False, 'from tensorflow.keras.layers import GlobalAveragePooling2D, Conv2D, Dense, Concatenate, Flatten, Input\n')] |
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test UN energy data processing
"""
from un.energy import process
import filecmp
import os
import sys
import unittest
from absl import app
# Allows the following module imports to work when running as a script
sys.path.append(
os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(__file__)))))
# module_dir_ is the path to where this test is running from.
module_dir_ = os.path.dirname(__file__)
class TestUNEnergyProcess(unittest.TestCase):
def test_un_energy_process(self):
"""Test the process() function for UN energy data set.
Generates output files for the test_data input and compares it to the
expected output files.
"""
data_input = os.path.join(module_dir_, 'test_data/un_energy_input.csv')
# create a tmp output directory
tmp_dir = os.path.join(module_dir_, 'tmp')
if not os.path.exists(tmp_dir):
os.mkdir(tmp_dir)
test_output = os.path.join(tmp_dir, 'un_energy_test_output')
expected_output = os.path.join(module_dir_,
'test_data/un_energy_output')
print(f'test file path: {data_input}, output: {test_output}')
test_counters = process.process([data_input], test_output, 10000)
self.assertTrue(test_counters['input_files'] > 0)
self.assertTrue(test_counters['inputs_processed'] > 0)
self.assertTrue(test_counters['output_csv_rows'] > 0)
self.assertTrue(test_counters['output_stat_vars'] > 0)
# Verify there are no error counters
errors = 0
for c in test_counters:
if 'error' in c:
errors += test_counters[c]
self.assertEqual(errors, 0)
# Compare file outputs
for output in ['.csv', '.mcf', '.tmcf']:
self.assertTrue(
filecmp.cmp(test_output + output, expected_output + output))
if __name__ == '__main__':
app.run()
unittest.main()
| [
"unittest.main",
"os.mkdir",
"os.path.abspath",
"os.path.dirname",
"os.path.exists",
"un.energy.process.process",
"absl.app.run",
"filecmp.cmp",
"os.path.join"
] | [((976, 1001), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (991, 1001), False, 'import os\n'), ((2520, 2529), 'absl.app.run', 'app.run', ([], {}), '()\n', (2527, 2529), False, 'from absl import app\n'), ((2534, 2549), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2547, 2549), False, 'import unittest\n'), ((1294, 1352), 'os.path.join', 'os.path.join', (['module_dir_', '"""test_data/un_energy_input.csv"""'], {}), "(module_dir_, 'test_data/un_energy_input.csv')\n", (1306, 1352), False, 'import os\n'), ((1411, 1443), 'os.path.join', 'os.path.join', (['module_dir_', '"""tmp"""'], {}), "(module_dir_, 'tmp')\n", (1423, 1443), False, 'import os\n'), ((1536, 1582), 'os.path.join', 'os.path.join', (['tmp_dir', '"""un_energy_test_output"""'], {}), "(tmp_dir, 'un_energy_test_output')\n", (1548, 1582), False, 'import os\n'), ((1609, 1664), 'os.path.join', 'os.path.join', (['module_dir_', '"""test_data/un_energy_output"""'], {}), "(module_dir_, 'test_data/un_energy_output')\n", (1621, 1664), False, 'import os\n'), ((1799, 1848), 'un.energy.process.process', 'process.process', (['[data_input]', 'test_output', '(10000)'], {}), '([data_input], test_output, 10000)\n', (1814, 1848), False, 'from un.energy import process\n'), ((1459, 1482), 'os.path.exists', 'os.path.exists', (['tmp_dir'], {}), '(tmp_dir)\n', (1473, 1482), False, 'import os\n'), ((1496, 1513), 'os.mkdir', 'os.mkdir', (['tmp_dir'], {}), '(tmp_dir)\n', (1504, 1513), False, 'import os\n'), ((869, 894), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (884, 894), False, 'import os\n'), ((2426, 2485), 'filecmp.cmp', 'filecmp.cmp', (['(test_output + output)', '(expected_output + output)'], {}), '(test_output + output, expected_output + output)\n', (2437, 2485), False, 'import filecmp\n')] |
#! /usr/bin/env python
import wx
import sys
import os
import uivar
import uidef
sys.path.append(os.path.abspath(".."))
from win import bootDeviceWin_FlexspiNand
class secBootUiFlexspiNand(bootDeviceWin_FlexspiNand.bootDeviceWin_FlexspiNand):
def __init__(self, parent):
bootDeviceWin_FlexspiNand.bootDeviceWin_FlexspiNand.__init__(self, parent)
flexspiNandOpt, flexspiNandFcbOpt, flexspiNandImageInfo, flexspiNandKeyBlob = uivar.getBootDeviceConfiguration(uidef.kBootDevice_FlexspiNand)
self.flexspiNandOpt = flexspiNandOpt
self.flexspiNandFcbOpt = flexspiNandFcbOpt
self.flexspiNandImageInfo = flexspiNandImageInfo
self.flexspiNandKeyBlob = flexspiNandKeyBlob
def _getFrequence( self ):
txt = self.m_choice_Max_Freq.GetString(self.m_choice_Max_Freq.GetSelection())
if txt == '30MHz':
val = 0x1
elif txt == '50MHz':
val = 0x2
elif txt == '60MHz':
val = 0x3
elif txt == '75MHz':
val = 0x4
elif txt == '80MHz':
val = 0x5
elif txt == '100MHz':
val = 0x6
else:
pass
self.flexspiNandOpt = (self.flexspiNandOpt & 0xFFFFFFF0) | (val << 0)
def _getPageSize( self ):
txt = self.m_choice_Page_Size.GetString(self.m_choice_Page_Size.GetSelection())
if txt == '2KB':
val = 0x2
elif txt == '4KB':
val = 0x4
else:
pass
self.flexspiNandOpt = (self.flexspiNandOpt & 0xFFFFFF0F) | (val << 4)
def _getPagePerBlock( self ):
txt = self.m_choice_Pages.GetString(self.m_choice_Pages.GetSelection())
if txt == '64':
val = 0x0
elif txt == '128':
val = 0x1
elif txt == '256':
val = 0x2
elif txt == '32':
val = 0x3
else:
pass
self.flexspiNandOpt = (self.flexspiNandOpt & 0xFFFFF0FF) | (val << 8)
def _getFlashSize( self ):
txt = self.m_choice_Flash_size.GetString(self.m_choice_Flash_size.GetSelection())
if txt == '512M':
val = 0x0
elif txt == '1GB':
val = 0x1
elif txt == '2GB':
val = 0x2
elif txt == '4GB':
val = 0x4
else:
pass
self.flexspiNandOpt = (self.flexspiNandOpt & 0xFFF0FFFF) | (val << 16)
def _getMultiplane( self ):
txt = self.m_choice_planes.GetString(self.m_choice_planes.GetSelection())
if txt == '1 plane':
val = 0x0
elif txt == '2 planes':
val = 0x1
else:
pass
self.flexspiNandOpt = (self.flexspiNandOpt & 0xFFFF0FFF) | (val << 12)
def _getOptionSize( self ):
txt = self.m_choice_Option_size.GetString(self.m_choice_Option_size.GetSelection())
if txt == '0':
val = 0x0
elif txt == '1':
val = 0x1
elif txt == '2':
val = 0x2
elif txt == '3':
val = 0x3
elif txt == '4':
val = 0x4
elif txt == '5':
val = 0x5
elif txt == '6':
val = 0x6
elif txt == '7':
val = 0x7
elif txt == '8':
val = 0x8
elif txt == '9':
val = 0x9
elif txt == '10':
val = 0xA
elif txt == '11':
val = 0xB
elif txt == '12':
val = 0xC
elif txt == '13':
val = 0xD
elif txt == '14':
val = 0xE
elif txt == '15':
val = 0xF
else:
pass
self.flexspiNandOpt = (self.flexspiNandOpt & 0xF0FFFFFF) | (val << 24)
def _getFCBSize( self ):
txt = self.m_choice_Size.GetString(self.m_choice_Size.GetSelection())
if txt == '3':
val = 0x3
elif txt == '4':
val = 0x4
elif txt == '5':
val = 0x5
elif txt == '6':
val = 0x6
elif txt == '7':
val = 0x7
elif txt == '8':
val = 0x8
elif txt == '9':
val = 0x9
elif txt == '10':
val = 0x10
else:
pass
self.flexspiNandFcbOpt = (self.flexspiNandFcbOpt & 0xFFFFFFF0) | (val << 0)
def _getAddressType( self ):
txt = self.m_choice_address_type.GetString(self.m_choice_address_type.GetSelection())
if txt == 'byte address':
val = 0x0
elif txt == 'block address':
val = 0x1
else:
pass
self.flexspiNandFcbOpt = (self.flexspiNandFcbOpt & 0xFFFFF0FF) | (val << 8)
def _getSearchStride( self ):
txt = self.m_choice_search_stride.GetString(self.m_choice_search_stride.GetSelection())
if txt == '64 pages':
val = 0x0
elif txt == '128 pages':
val = 0x1
elif txt == '256 pages':
val = 0x2
elif txt == '32 pages':
val = 0x3
else:
pass
self.flexspiNandFcbOpt = (self.flexspiNandFcbOpt & 0xFF0FFFFF) | (val << 20)
def _getSearchCount( self ):
txt = self.m_choice_search_count.GetString(self.m_choice_search_count.GetSelection())
if txt == '1':
val = 0x1
elif txt == '2':
val = 0x2
elif txt == '3':
val = 0x3
elif txt == '4':
val = 0x4
else:
pass
self.flexspiNandFcbOpt = (self.flexspiNandFcbOpt & 0xF0FFFFFF) | (val << 24)
################################# may be exist problem Need to be confirmed#################################
def _getBlockCountandID( self ):
val_block_count = int(self.m_textCtrl_block_count.GetLineText(0))
val_block_id = int(self.m_textCtrl_block_id.GetLineText(0))
if val_block_id > val_block_count:
wx.MessageBox('Block ID Error', 'Confirm', wx.OK)
if val_block_count > 8:
wx.MessageBox('Max Block Number Error', 'Confirm', wx.OK)
self.flexspiNandImageInfo = (self.flexspiNandImageInfo & 0xFFFF0000) | (val_block_id << 0)
self.flexspiNandImageInfo = (self.flexspiNandImageInfo & 0x0000FFFF) | (val_block_count << 16)
################################# may be exist problem Need to be confirmed#################################
def _getImageIndex( self ):
txt = self.m_choice_image_index.GetString(self.m_choice_image_index.GetSelection())
if txt == '0':
val = 0x0
elif txt == '1':
val = 0x1
elif txt == '2':
val = 0x2
elif txt == '3':
val = 0x3
elif txt == '4':
val = 0x4
elif txt == '5':
val = 0x5
elif txt == '6':
val = 0x6
elif txt == '7':
val = 0x7
elif txt == '8':
val = 0x8
elif txt == '9':
val = 0x9
elif txt == '10':
val = 0xA
elif txt == '11':
val = 0xB
elif txt == '12':
val = 0xC
elif txt == '13':
val = 0xD
elif txt == '14':
val = 0xE
elif txt == '15':
val = 0xF
else:
pass
if (self.flexspiNandKeyBlob & 0x0F000000) == 0x01000000:
self.flexspiNandKeyBlob = (self.flexspiNandKeyBlob & 0xFFFFFFF0) | (val << 0)
def _getDekSize( self ):
txt = self.m_choice_dek_size.GetString(self.m_choice_dek_size.GetSelection())
if txt == '128bits':
val = 0x0
else:
pass
if (self.flexspiNandKeyBlob & 0x0F000000) == 0x00000000:
self.flexspiNandKeyBlob = (self.flexspiNandKeyBlob & 0xFFFFFF0F) | (val << 4)
def _getKeyBlobInfoSize( self ):
txt = self.m_choice_keyblob_infosize.GetString(self.m_choice_keyblob_infosize.GetSelection())
if txt == '0':
val = 0x0
elif txt == '1':
val = 0x1
elif txt == '2':
val = 0x2
elif txt == '3':
val = 0x3
elif txt == '4':
val = 0x4
elif txt == '5':
val = 0x5
elif txt == '6':
val = 0x6
elif txt == '7':
val = 0x7
elif txt == '8':
val = 0x8
elif txt == '9':
val = 0x9
elif txt == '10':
val = 0xA
elif txt == '11':
val = 0xB
elif txt == '12':
val = 0xC
elif txt == '13':
val = 0xD
elif txt == '14':
val = 0xE
elif txt == '15':
val = 0xF
else:
pass
if (self.flexspiNandKeyBlob & 0x0F000000) == 0x00000000:
if txt != '3':
wx.MessageBox('keyblob_info size must equal to 3 if Type = Update', 'Confirm', wx.OK )
else:
self.flexspiNandKeyBlob = (self.flexspiNandKeyBlob & 0xFF0FFFFF) | (val << 20)
def _getType( self ):
txt = self.m_choice_type.GetString(self.m_choice_type.GetSelection())
if txt == 'Update':
val = 0x0
elif txt == 'Program':
val = 0x1
else:
pass
self.flexspiNandKeyBlob = (self.flexspiNandKeyBlob & 0xF0FFFFFF) | (val << 24)
def cancel_of_FLEXSPI_NAND(self, event):
self.Show(False)
def apply_of_FLEXSPI_NAND(self, event):
self._getFrequence()
self._getPageSize()
self._getPageSize()
self._getPagePerBlock()
self._getFlashSize()
self._getMultiplane()
self._getOptionSize()
self._getFCBSize()
self._getAddressType()
self._getSearchStride()
self._getSearchCount()
self._getBlockCountandID()
self._getType()
self._getImageIndex()
self._getDekSize()
self._getKeyBlobInfoSize()
uivar.setBootDeviceConfiguration(uidef.kBootDevice_FlexspiNand, self.flexspiNandOpt, self.flexspiNandFcbOpt, self.flexspiNandImageInfo, self.flexspiNandKeyBlob)
self.Show(False)
def OnClose_FLEXSPI_NAND(self, event):
ret = wx.MessageBox('Do you really want to leave?', 'Confirm', wx.OK | wx.CANCEL)
if ret == wx.OK:
self.Show(False) | [
"os.path.abspath",
"uivar.setBootDeviceConfiguration",
"uivar.getBootDeviceConfiguration",
"win.bootDeviceWin_FlexspiNand.bootDeviceWin_FlexspiNand.__init__",
"wx.MessageBox"
] | [((102, 123), 'os.path.abspath', 'os.path.abspath', (['""".."""'], {}), "('..')\n", (117, 123), False, 'import os\n'), ((294, 368), 'win.bootDeviceWin_FlexspiNand.bootDeviceWin_FlexspiNand.__init__', 'bootDeviceWin_FlexspiNand.bootDeviceWin_FlexspiNand.__init__', (['self', 'parent'], {}), '(self, parent)\n', (354, 368), False, 'from win import bootDeviceWin_FlexspiNand\n'), ((456, 519), 'uivar.getBootDeviceConfiguration', 'uivar.getBootDeviceConfiguration', (['uidef.kBootDevice_FlexspiNand'], {}), '(uidef.kBootDevice_FlexspiNand)\n', (488, 519), False, 'import uivar\n'), ((10300, 10470), 'uivar.setBootDeviceConfiguration', 'uivar.setBootDeviceConfiguration', (['uidef.kBootDevice_FlexspiNand', 'self.flexspiNandOpt', 'self.flexspiNandFcbOpt', 'self.flexspiNandImageInfo', 'self.flexspiNandKeyBlob'], {}), '(uidef.kBootDevice_FlexspiNand, self.\n flexspiNandOpt, self.flexspiNandFcbOpt, self.flexspiNandImageInfo, self\n .flexspiNandKeyBlob)\n', (10332, 10470), False, 'import uivar\n'), ((10548, 10623), 'wx.MessageBox', 'wx.MessageBox', (['"""Do you really want to leave?"""', '"""Confirm"""', '(wx.OK | wx.CANCEL)'], {}), "('Do you really want to leave?', 'Confirm', wx.OK | wx.CANCEL)\n", (10561, 10623), False, 'import wx\n'), ((6129, 6178), 'wx.MessageBox', 'wx.MessageBox', (['"""Block ID Error"""', '"""Confirm"""', 'wx.OK'], {}), "('Block ID Error', 'Confirm', wx.OK)\n", (6142, 6178), False, 'import wx\n'), ((6225, 6282), 'wx.MessageBox', 'wx.MessageBox', (['"""Max Block Number Error"""', '"""Confirm"""', 'wx.OK'], {}), "('Max Block Number Error', 'Confirm', wx.OK)\n", (6238, 6282), False, 'import wx\n'), ((9146, 9235), 'wx.MessageBox', 'wx.MessageBox', (['"""keyblob_info size must equal to 3 if Type = Update"""', '"""Confirm"""', 'wx.OK'], {}), "('keyblob_info size must equal to 3 if Type = Update',\n 'Confirm', wx.OK)\n", (9159, 9235), False, 'import wx\n')] |
from .CosmoHammerSampler import CosmoHammerSampler
from collections import namedtuple
from cosmoHammer.util.SampleFileUtil import SampleFileUtil
from mpi4py import MPI
import emcee
import itertools
class MpiCosmoHammerSampler(CosmoHammerSampler):
"""
A sampler implementation extending the regular sampler in order to allow for distributing
the computation with MPI.
:param kwargs:
key word arguments passed to the CosmoHammerSampler
"""
def __init__(self, **kwargs):
"""
CosmoHammer sampler implementation
"""
self._rank = MPI.COMM_WORLD.Get_rank()
super(MpiCosmoHammerSampler, self).__init__(**kwargs)
self.M = self._getMapFunction()
def _getMapFunction(self):
"""
Returns the build in map function
"""
return map
def createSampleFileUtil(self):
"""
Returns a new instance of a File Util
"""
return SampleFileUtil(self.filePrefix, self.isMaster(), reuseBurnin=self.reuseBurnin)
def sampleBurnin(self, p0):
"""
Starts the sampling process. The master node (mpi rank = 0) persists the result to the disk
"""
p0 = self.mpiBCast(p0)
self.log("MPI Process rank "+ str(self._rank)+" starts sampling")
return super(MpiCosmoHammerSampler, self).sampleBurnin(p0);
def sample(self, burninPos, burninProb, burninRstate, datas):
"""
Starts the sampling process. The master node (mpi rank = 0) persists the result to the disk
"""
burninPos = self.mpiBCast(burninPos)
burninProb = self.mpiBCast(burninProb)
burninRstate = self.mpiBCast(burninRstate)
self.log("MPI Process rank "+ str(self._rank)+" starts sampling")
super(MpiCosmoHammerSampler, self).sample(burninPos, burninProb, burninRstate, datas);
def loadBurnin(self):
"""
loads the burn in form the file system
"""
if(self.isMaster()):
pos, prob, rstate = super(MpiCosmoHammerSampler, self).loadBurnin()
else:
pos, prob, rstate = []
pos = self.mpiBCast(pos)
prob = self.mpiBCast(prob)
rstate = self.mpiBCast(rstate)
self.log("loading done")
return pos, prob, rstate
def createEmceeSampler(self, callable):
"""
Factory method to create the emcee sampler
"""
self.log("Using emcee "+str(emcee.__version__))
#create a tuple to emulate to pool's map function using our self.mpiParallelizedMap
pool = namedtuple('pool',['map'])(self.mpiParallelizedMap)
return emcee.EnsembleSampler(self.nwalkers, self.paramCount, callable,
threads=self.threadCount, pool=pool)
def createInitPos(self):
"""
Factory method to create initial positions
"""
#bcast the positions to ensure that all mpi nodes start at the same position
return self.mpiBCast(super(MpiCosmoHammerSampler, self).createInitPos())
#MPI sync routines
def mpiBCast(self, value):
"""
Mpi bcasts the value and Returns the value from the master (rank = 0).
"""
return MPI.COMM_WORLD.bcast(value)
def mpiParallelizedMap(self, function,list):
"""
Emulates a pool map function using Mpi.
Retrieves the number of mpi processes and splits the list of walker position
in order to allow each process its block
"""
(rank,size) = (MPI.COMM_WORLD.Get_rank(),MPI.COMM_WORLD.Get_size())
#sync
list = self.mpiBCast(list)
#split, process and merge the list
return self.mergeList(MPI.COMM_WORLD.allgather(self.M(function, self.splitList(list,size)[rank])))
def splitList(self, list, n):
"""
Splits the list into block of eqals sizes (listlength/n)
"""
blockLen = len(list) / float(n)
return [list[int(round(blockLen * i)): int(round(blockLen * (i + 1)))] for i in range(n)]
def mergeList(self, lists):
"""
Merges the lists into one single list
"""
return list(itertools.chain(*lists))
def isMaster(self):
"""
Returns true if the rank is 0
"""
return (self._rank==0)
| [
"mpi4py.MPI.COMM_WORLD.bcast",
"emcee.EnsembleSampler",
"mpi4py.MPI.COMM_WORLD.Get_rank",
"collections.namedtuple",
"itertools.chain",
"mpi4py.MPI.COMM_WORLD.Get_size"
] | [((608, 633), 'mpi4py.MPI.COMM_WORLD.Get_rank', 'MPI.COMM_WORLD.Get_rank', ([], {}), '()\n', (631, 633), False, 'from mpi4py import MPI\n'), ((2786, 2891), 'emcee.EnsembleSampler', 'emcee.EnsembleSampler', (['self.nwalkers', 'self.paramCount', 'callable'], {'threads': 'self.threadCount', 'pool': 'pool'}), '(self.nwalkers, self.paramCount, callable, threads=\n self.threadCount, pool=pool)\n', (2807, 2891), False, 'import emcee\n'), ((3372, 3399), 'mpi4py.MPI.COMM_WORLD.bcast', 'MPI.COMM_WORLD.bcast', (['value'], {}), '(value)\n', (3392, 3399), False, 'from mpi4py import MPI\n'), ((2710, 2737), 'collections.namedtuple', 'namedtuple', (['"""pool"""', "['map']"], {}), "('pool', ['map'])\n", (2720, 2737), False, 'from collections import namedtuple\n'), ((3681, 3706), 'mpi4py.MPI.COMM_WORLD.Get_rank', 'MPI.COMM_WORLD.Get_rank', ([], {}), '()\n', (3704, 3706), False, 'from mpi4py import MPI\n'), ((3707, 3732), 'mpi4py.MPI.COMM_WORLD.Get_size', 'MPI.COMM_WORLD.Get_size', ([], {}), '()\n', (3730, 3732), False, 'from mpi4py import MPI\n'), ((4327, 4350), 'itertools.chain', 'itertools.chain', (['*lists'], {}), '(*lists)\n', (4342, 4350), False, 'import itertools\n')] |
from .graph import Graph
import json
import unittest
class Multigraph:
GRAPHS = 'graphs'
TYPE = 'type'
LABEL = 'label'
METADATA = 'metadata'
def __init__(self, graphs=[], type=None, label=None, metadata=None):
"""Constructor of the Multigraph class.
Arguments:
graphs -- [Graph] list of Graph objects that are part of the multigraph (default [])
type -- string (optionally) the typename of the multigraph (default None)
label -- string (optionally) the label of the multigraph (default None)
metadata -- dictionary (optionally) a dictionary representing the metadata that belongs to the multigraph (default None)
"""
self._graphs = []
self.set_graphs(graphs)
self._type = None
if type != None:
self.set_type(type)
self._label = None
if label != None:
self.set_label(label)
self._metadata = None
if metadata != None:
self.set_metadata(metadata)
def _isJsonSerializable(self, dictionay):
try:
json.dumps(dictionay)
return True
except Exception:
return False
def add_graph(self, graph):
"""Method to add a graph to the multigraph.
Arguments:
graph -- Graph the graph to add
"""
if graph == None:
return
if isinstance(graph, Graph):
self._graphs.append(graph)
else:
raise TypeError("Adding graph to Multigraph failed: graph must of type Graph")
def set_graphs(self, graphs):
"""Method to add a list of graphs.
Arguments:
graphs -- [Graph] the list of graphs that need to be added
"""
for graph in graphs:
self.add_graph(graph)
def set_type(self, type):
"""Method to set the type of the multigraph.
Arguments:
type -- string the typename of the multigraph to set
"""
if type == None:
self._type = None
else:
if isinstance(type, str):
self._type = type
else:
try:
stringType = str(type)
self._type = stringType
except Exception as excecption:
raise TypeError("Type of type in Multigraph object needs to be a string (or string castable): " + str(exception))
def set_label(self, label):
"""Method to set the label of the multigraph.
Arguments:
label -- string the labelname of the multigraph to set
"""
if label == None:
self._label = None
else:
if isinstance(label, str):
self._label = label
else:
try:
stringLabel = str(label)
self._label = stringLabel
except Exception as excecption:
raise TypeError("Type of label in Multigraph object needs to be a string (or string castable): " + str(exception))
def set_metadata(self, metadata):
"""Method to set the metadata of the multigraph.
Arguments:
metadata -- dictionary the metadata to set on the multigraph
"""
if metadata == None:
self._metadata = None
else:
if isinstance(metadata, dict) and self._isJsonSerializable(metadata):
self._metadata = metadata
else:
raise TypeError("metadata in Multigraph object needs to be json serializable")
def get_graphs(self):
"""Method to get a list of all graphs in the multigraph.
Returns:
[Graph] list of graphs present in the multigraph
"""
return self._graphs
def get_type(self):
"""Method to get the type of the multigraph.
Returns:
string the typename of the multigraph if set, else None
"""
return self._type
def get_label(self):
"""Method to get the label of the multigraph.
Returns:
string the label of the multigraph if set, else None
"""
return self._label
def get_metadata(self):
""""Get the metadata of the multigraph.
Returns:
dictionary the metadata of the multigraph if set, else None
"""
return self._metadata
def to_JSON(self, asString=False):
"""Convert the multigraph to JSON.
Creates a dictionary object of the multigraph comforming the JSON Graph Format.
Arguments:
asString -- bool if set to True the method returns the JSON as string
Returns:
dictionary the multigraph as dictionary ready to serialize
"""
result = {}
if self._label != None:
result[Multigraph.LABEL] = self._label
if self._type != None:
result[Multigraph.TYPE] = self._type
if self._metadata != None:
result[Multigraph.METADATA] = self._metadata
graphs = []
for graph in self._graphs:
graphs.append(graph.to_JSON())
result[Multigraph.GRAPHS] = graphs
if asString:
return json.dumps(result)
else:
return result
class TestMultigraphClass(unittest.TestCase):
def test_base(self):
graph = Graph([], [], 'graphType', 'graphLabel', True, {'metaNumber': 11, 'metaString': 'hello world'})
mgraph = Multigraph([graph], 'multigraphType', 'multigraphLabel', {'metaNumber': 11, 'metaString': 'hello world'})
self.assertEqual(mgraph.get_type(), 'multigraphType')
self.assertEqual(mgraph.get_label(), 'multigraphLabel')
self.assertEqual(mgraph.get_metadata()['metaNumber'], 11)
self.assertEqual(mgraph.get_metadata()['metaString'], 'hello world')
self.assertEqual(mgraph.get_graphs()[0], graph)
def test_setters(self):
graph = Graph([], [], 'graphType', 'graphLabel', True, {'metaNumber': 11, 'metaString': 'hello world'})
mgraph = Multigraph([], 'multigraphType', 'multigraphLabel', {'metaNumber': 11, 'metaString': 'hello world'})
mgraph.set_label('new_multigraphLabel')
mgraph.set_type('new_multigraphType')
mgraph.set_metadata({'new_metaNumber': 13, 'new_metaString': 'world hello'})
mgraph.set_graphs([graph])
self.assertEqual(mgraph.get_type(), 'new_multigraphType')
self.assertEqual(mgraph.get_label(), 'new_multigraphLabel')
self.assertEqual(mgraph.get_metadata()['new_metaNumber'], 13)
self.assertEqual(mgraph.get_metadata()['new_metaString'], 'world hello')
self.assertEqual(mgraph.get_graphs()[0], graph)
#TODO make unit test complete
def test_to_JSON(self):
self.assertEqual("TODO", "TODO")
#TODO unittest json result
if __name__ == '__main__':
unittest.main() | [
"unittest.main",
"json.dumps"
] | [((7112, 7127), 'unittest.main', 'unittest.main', ([], {}), '()\n', (7125, 7127), False, 'import unittest\n'), ((1145, 1166), 'json.dumps', 'json.dumps', (['dictionay'], {}), '(dictionay)\n', (1155, 1166), False, 'import json\n'), ((5417, 5435), 'json.dumps', 'json.dumps', (['result'], {}), '(result)\n', (5427, 5435), False, 'import json\n')] |
"""
-*- coding:utf-8 -*-
@Time :2019/11/27 下午4:54
@Author :wts
@File :utils.py
@Version:1.0
"""
import torch
import torch.utils.data.sampler as Sampler
import torch.nn as nn
import torch.nn.functional as F
class Mish(nn.Module):
def __init__(self):
super().__init__()
#print("Mish activation loaded...")
def forward(self, x):
x = x * (torch.tanh(F.softplus(x)))
return x
class MseLoss(nn.Module):
def __init__(self):
super(MseLoss, self).__init__()
| [
"torch.nn.functional.softplus"
] | [((384, 397), 'torch.nn.functional.softplus', 'F.softplus', (['x'], {}), '(x)\n', (394, 397), True, 'import torch.nn.functional as F\n')] |
#!/usr/bin/env python
import os
import pytest
import mindfuck
def load_from_file(brainfuck_code):
cwd = os.path.dirname(__file__)
fpath = os.path.join(cwd, 'data/%s' % brainfuck_code)
with open(fpath, 'r') as f:
return f.read()
def test_hello_world(capfd):
code = load_from_file("helloworld.bf")
mindfuck.eval(code)
output, error = capfd.readouterr()
assert output=="Hello World!\n"
def test_foobar(capfd):
code = load_from_file("foobar.bf")
mindfuck.eval(code)
output, error = capfd.readouterr()
assert output!="foobar"
| [
"mindfuck.eval",
"os.path.dirname",
"os.path.join"
] | [((110, 135), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (125, 135), False, 'import os\n'), ((148, 193), 'os.path.join', 'os.path.join', (['cwd', "('data/%s' % brainfuck_code)"], {}), "(cwd, 'data/%s' % brainfuck_code)\n", (160, 193), False, 'import os\n'), ((327, 346), 'mindfuck.eval', 'mindfuck.eval', (['code'], {}), '(code)\n', (340, 346), False, 'import mindfuck\n'), ((490, 509), 'mindfuck.eval', 'mindfuck.eval', (['code'], {}), '(code)\n', (503, 509), False, 'import mindfuck\n')] |
import pytest
from thenewboston_node.business_logic.exceptions import ValidationError
from thenewboston_node.business_logic.tests.baker_factories import (
make_account_state, make_blockchain_state, make_genesis_blockchain_state
)
from thenewboston_node.business_logic.tests.mocks.utils import patch_blockchain_states, patch_blocks
def test_blockchain_blockchain_genesis_state_is_validated(blockchain_base):
blockchain_genesis_state = make_genesis_blockchain_state()
with patch_blockchain_states(blockchain_base, [blockchain_genesis_state]):
blockchain_base.validate_blockchain_states(is_partial_allowed=False)
def test_blockchain_without_blockchain_genesis_state_is_validated(blockchain_base):
non_initial_blockchain_state = make_blockchain_state()
with patch_blockchain_states(blockchain_base, [non_initial_blockchain_state]):
blockchain_base.validate_blockchain_states(is_partial_allowed=True)
def test_blockchain_must_have_at_least_blockchain_genesis_state(blockchain_base):
with patch_blockchain_states(blockchain_base, []):
with pytest.raises(ValidationError, match='Blockchain must contain at least one blockchain state'):
blockchain_base.validate_blockchain_states()
def test_blockchain_must_start_with_blockchain_genesis_state(blockchain_base):
non_initial_blockchain_state = make_blockchain_state()
with patch_blockchain_states(blockchain_base, [non_initial_blockchain_state]):
with pytest.raises(ValidationError, match='Blockchain must start with initial blockchain state'):
blockchain_base.validate_blockchain_states(is_partial_allowed=False)
def test_validate_blockchain_state_points_to_non_existing_block(blockchain_base, blockchain_genesis_state, block_0):
with patch_blocks(blockchain_base, [block_0]):
blockchain_state_5 = blockchain_base.generate_blockchain_state()
blockchain_state_5.last_block_number = 5
with patch_blockchain_states(blockchain_base, [blockchain_genesis_state, blockchain_state_5]):
with pytest.raises(
ValidationError, match='Blockchain state last_block_number points to non-existing block'
):
blockchain_base.validate_blockchain_states(is_partial_allowed=True)
def test_validate_blockchain_state_last_block_identifier_mismatch(blockchain_base, blockchain_genesis_state, block_0):
with patch_blocks(blockchain_base, [block_0]):
blockchain_state_0 = blockchain_base.generate_blockchain_state()
blockchain_state_0.last_block_identifier = 'wrong-identifier'
with patch_blockchain_states(blockchain_base, [blockchain_genesis_state, blockchain_state_0]):
with pytest.raises(
ValidationError, match='Blockchain state last_block_identifier does not match block_identifier'
):
blockchain_base.validate_blockchain_states(is_partial_allowed=True)
def test_validate_blockchain_state_next_block_identifier_mismatch(blockchain_base, blockchain_genesis_state, block_0):
with patch_blocks(blockchain_base, [block_0]):
blockchain_state_0 = blockchain_base.generate_blockchain_state()
blockchain_state_0.next_block_identifier = 'wrong-identifier'
with patch_blockchain_states(blockchain_base, [blockchain_genesis_state, blockchain_state_0]):
with pytest.raises(
ValidationError,
match='Blockchain state next_block_identifier does not match last_block_number message hash'
):
blockchain_base.validate_blockchain_states(is_partial_allowed=True)
def test_validate_node_is_declared_if_pv_schedule_is_set(blockchain_base):
account_state = make_account_state()
account_state.node = None
blockchain_genesis_state = make_genesis_blockchain_state(message__account_states={'00000': account_state})
with patch_blockchain_states(blockchain_base, [blockchain_genesis_state]):
with pytest.raises(ValidationError, match='Account state node must be set'):
blockchain_base.validate_blockchain_states(is_partial_allowed=False)
| [
"thenewboston_node.business_logic.tests.mocks.utils.patch_blocks",
"thenewboston_node.business_logic.tests.mocks.utils.patch_blockchain_states",
"thenewboston_node.business_logic.tests.baker_factories.make_blockchain_state",
"pytest.raises",
"thenewboston_node.business_logic.tests.baker_factories.make_genes... | [((445, 476), 'thenewboston_node.business_logic.tests.baker_factories.make_genesis_blockchain_state', 'make_genesis_blockchain_state', ([], {}), '()\n', (474, 476), False, 'from thenewboston_node.business_logic.tests.baker_factories import make_account_state, make_blockchain_state, make_genesis_blockchain_state\n'), ((755, 778), 'thenewboston_node.business_logic.tests.baker_factories.make_blockchain_state', 'make_blockchain_state', ([], {}), '()\n', (776, 778), False, 'from thenewboston_node.business_logic.tests.baker_factories import make_account_state, make_blockchain_state, make_genesis_blockchain_state\n'), ((1359, 1382), 'thenewboston_node.business_logic.tests.baker_factories.make_blockchain_state', 'make_blockchain_state', ([], {}), '()\n', (1380, 1382), False, 'from thenewboston_node.business_logic.tests.baker_factories import make_account_state, make_blockchain_state, make_genesis_blockchain_state\n'), ((3737, 3757), 'thenewboston_node.business_logic.tests.baker_factories.make_account_state', 'make_account_state', ([], {}), '()\n', (3755, 3757), False, 'from thenewboston_node.business_logic.tests.baker_factories import make_account_state, make_blockchain_state, make_genesis_blockchain_state\n'), ((3819, 3898), 'thenewboston_node.business_logic.tests.baker_factories.make_genesis_blockchain_state', 'make_genesis_blockchain_state', ([], {'message__account_states': "{'00000': account_state}"}), "(message__account_states={'00000': account_state})\n", (3848, 3898), False, 'from thenewboston_node.business_logic.tests.baker_factories import make_account_state, make_blockchain_state, make_genesis_blockchain_state\n'), ((487, 555), 'thenewboston_node.business_logic.tests.mocks.utils.patch_blockchain_states', 'patch_blockchain_states', (['blockchain_base', '[blockchain_genesis_state]'], {}), '(blockchain_base, [blockchain_genesis_state])\n', (510, 555), False, 'from thenewboston_node.business_logic.tests.mocks.utils import patch_blockchain_states, patch_blocks\n'), ((789, 861), 'thenewboston_node.business_logic.tests.mocks.utils.patch_blockchain_states', 'patch_blockchain_states', (['blockchain_base', '[non_initial_blockchain_state]'], {}), '(blockchain_base, [non_initial_blockchain_state])\n', (812, 861), False, 'from thenewboston_node.business_logic.tests.mocks.utils import patch_blockchain_states, patch_blocks\n'), ((1032, 1076), 'thenewboston_node.business_logic.tests.mocks.utils.patch_blockchain_states', 'patch_blockchain_states', (['blockchain_base', '[]'], {}), '(blockchain_base, [])\n', (1055, 1076), False, 'from thenewboston_node.business_logic.tests.mocks.utils import patch_blockchain_states, patch_blocks\n'), ((1393, 1465), 'thenewboston_node.business_logic.tests.mocks.utils.patch_blockchain_states', 'patch_blockchain_states', (['blockchain_base', '[non_initial_blockchain_state]'], {}), '(blockchain_base, [non_initial_blockchain_state])\n', (1416, 1465), False, 'from thenewboston_node.business_logic.tests.mocks.utils import patch_blockchain_states, patch_blocks\n'), ((1782, 1822), 'thenewboston_node.business_logic.tests.mocks.utils.patch_blocks', 'patch_blocks', (['blockchain_base', '[block_0]'], {}), '(blockchain_base, [block_0])\n', (1794, 1822), False, 'from thenewboston_node.business_logic.tests.mocks.utils import patch_blockchain_states, patch_blocks\n'), ((2416, 2456), 'thenewboston_node.business_logic.tests.mocks.utils.patch_blocks', 'patch_blocks', (['blockchain_base', '[block_0]'], {}), '(blockchain_base, [block_0])\n', (2428, 2456), False, 'from thenewboston_node.business_logic.tests.mocks.utils import patch_blockchain_states, patch_blocks\n'), ((3078, 3118), 'thenewboston_node.business_logic.tests.mocks.utils.patch_blocks', 'patch_blocks', (['blockchain_base', '[block_0]'], {}), '(blockchain_base, [block_0])\n', (3090, 3118), False, 'from thenewboston_node.business_logic.tests.mocks.utils import patch_blockchain_states, patch_blocks\n'), ((3909, 3977), 'thenewboston_node.business_logic.tests.mocks.utils.patch_blockchain_states', 'patch_blockchain_states', (['blockchain_base', '[blockchain_genesis_state]'], {}), '(blockchain_base, [blockchain_genesis_state])\n', (3932, 3977), False, 'from thenewboston_node.business_logic.tests.mocks.utils import patch_blockchain_states, patch_blocks\n'), ((1091, 1189), 'pytest.raises', 'pytest.raises', (['ValidationError'], {'match': '"""Blockchain must contain at least one blockchain state"""'}), "(ValidationError, match=\n 'Blockchain must contain at least one blockchain state')\n", (1104, 1189), False, 'import pytest\n'), ((1480, 1576), 'pytest.raises', 'pytest.raises', (['ValidationError'], {'match': '"""Blockchain must start with initial blockchain state"""'}), "(ValidationError, match=\n 'Blockchain must start with initial blockchain state')\n", (1493, 1576), False, 'import pytest\n'), ((1960, 2052), 'thenewboston_node.business_logic.tests.mocks.utils.patch_blockchain_states', 'patch_blockchain_states', (['blockchain_base', '[blockchain_genesis_state, blockchain_state_5]'], {}), '(blockchain_base, [blockchain_genesis_state,\n blockchain_state_5])\n', (1983, 2052), False, 'from thenewboston_node.business_logic.tests.mocks.utils import patch_blockchain_states, patch_blocks\n'), ((2615, 2707), 'thenewboston_node.business_logic.tests.mocks.utils.patch_blockchain_states', 'patch_blockchain_states', (['blockchain_base', '[blockchain_genesis_state, blockchain_state_0]'], {}), '(blockchain_base, [blockchain_genesis_state,\n blockchain_state_0])\n', (2638, 2707), False, 'from thenewboston_node.business_logic.tests.mocks.utils import patch_blockchain_states, patch_blocks\n'), ((3277, 3369), 'thenewboston_node.business_logic.tests.mocks.utils.patch_blockchain_states', 'patch_blockchain_states', (['blockchain_base', '[blockchain_genesis_state, blockchain_state_0]'], {}), '(blockchain_base, [blockchain_genesis_state,\n blockchain_state_0])\n', (3300, 3369), False, 'from thenewboston_node.business_logic.tests.mocks.utils import patch_blockchain_states, patch_blocks\n'), ((3992, 4062), 'pytest.raises', 'pytest.raises', (['ValidationError'], {'match': '"""Account state node must be set"""'}), "(ValidationError, match='Account state node must be set')\n", (4005, 4062), False, 'import pytest\n'), ((2067, 2175), 'pytest.raises', 'pytest.raises', (['ValidationError'], {'match': '"""Blockchain state last_block_number points to non-existing block"""'}), "(ValidationError, match=\n 'Blockchain state last_block_number points to non-existing block')\n", (2080, 2175), False, 'import pytest\n'), ((2722, 2837), 'pytest.raises', 'pytest.raises', (['ValidationError'], {'match': '"""Blockchain state last_block_identifier does not match block_identifier"""'}), "(ValidationError, match=\n 'Blockchain state last_block_identifier does not match block_identifier')\n", (2735, 2837), False, 'import pytest\n'), ((3384, 3518), 'pytest.raises', 'pytest.raises', (['ValidationError'], {'match': '"""Blockchain state next_block_identifier does not match last_block_number message hash"""'}), "(ValidationError, match=\n 'Blockchain state next_block_identifier does not match last_block_number message hash'\n )\n", (3397, 3518), False, 'import pytest\n')] |
# 混雑度トーナメント選択により新たな探索母集団Qt+1を生成
import numpy as np
import random
import copy
class Tournament(object):
"""混雑度トーナメント選択
"""
def __init__(self, archive_set):
self._archive_set = copy.deepcopy(archive_set)
def tournament(self):
# アーカイブ母集団の個体数分の探索母集団を生成
size = int(self._archive_set.shape[0])
search_set = np.array([], dtype = np.float64)
for i in range(size):
rnd1 = random.randrange(size)
rnd2 = random.randrange(size)
# まずランクで比較
if self._archive_set[rnd1, 2] < self._archive_set[rnd2, 2]:
search_set = np.append(search_set, self._archive_set[rnd1, :])
elif self._archive_set[rnd1, 2] > self._archive_set[rnd2, 2]:
search_set = np.append(search_set, self._archive_set[rnd2, :])
# 次に混雑度距離で比較
elif self._archive_set[rnd1, 3] > self._archive_set[rnd2, 3]:
search_set = np.append(search_set, self._archive_set[rnd1, :])
else:
search_set = np.append(search_set, self._archive_set[rnd2, :])
search_set = search_set.reshape(size, -1)
return search_set
| [
"numpy.append",
"copy.deepcopy",
"numpy.array",
"random.randrange"
] | [((197, 223), 'copy.deepcopy', 'copy.deepcopy', (['archive_set'], {}), '(archive_set)\n', (210, 223), False, 'import copy\n'), ((353, 383), 'numpy.array', 'np.array', (['[]'], {'dtype': 'np.float64'}), '([], dtype=np.float64)\n', (361, 383), True, 'import numpy as np\n'), ((435, 457), 'random.randrange', 'random.randrange', (['size'], {}), '(size)\n', (451, 457), False, 'import random\n'), ((477, 499), 'random.randrange', 'random.randrange', (['size'], {}), '(size)\n', (493, 499), False, 'import random\n'), ((625, 674), 'numpy.append', 'np.append', (['search_set', 'self._archive_set[rnd1, :]'], {}), '(search_set, self._archive_set[rnd1, :])\n', (634, 674), True, 'import numpy as np\n'), ((779, 828), 'numpy.append', 'np.append', (['search_set', 'self._archive_set[rnd2, :]'], {}), '(search_set, self._archive_set[rnd2, :])\n', (788, 828), True, 'import numpy as np\n'), ((958, 1007), 'numpy.append', 'np.append', (['search_set', 'self._archive_set[rnd1, :]'], {}), '(search_set, self._archive_set[rnd1, :])\n', (967, 1007), True, 'import numpy as np\n'), ((1056, 1105), 'numpy.append', 'np.append', (['search_set', 'self._archive_set[rnd2, :]'], {}), '(search_set, self._archive_set[rnd2, :])\n', (1065, 1105), True, 'import numpy as np\n')] |
# Generated by Django 2.0.1 on 2018-01-30 23:36
import datetime
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('registros', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ItemDeLinha',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('qtde', models.IntegerField(default=1)),
('preco_unitario', models.IntegerField(default=0)),
('moeda', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='registros.Moeda')),
('produto', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='registros.Produto')),
],
),
migrations.CreateModel(
name='Transacao',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_recorded', models.DateField(default=datetime.date.today)),
('comprador', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='buyer_company', to='registros.Empresa')),
('vendedor', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='seller_company', to='registros.Empresa')),
],
options={
'verbose_name_plural': 'Transações',
},
),
migrations.AddField(
model_name='itemdelinha',
name='transacao',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='movimento.Transacao'),
),
]
| [
"django.db.models.ForeignKey",
"django.db.models.IntegerField",
"django.db.models.DateField",
"django.db.models.AutoField"
] | [((1726, 1819), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""movimento.Transacao"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'movimento.Transacao')\n", (1743, 1819), False, 'from django.db import migrations, models\n'), ((395, 488), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (411, 488), False, 'from django.db import migrations, models\n'), ((512, 542), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(1)'}), '(default=1)\n', (531, 542), False, 'from django.db import migrations, models\n'), ((580, 610), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(0)'}), '(default=0)\n', (599, 610), False, 'from django.db import migrations, models\n'), ((639, 739), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""registros.Moeda"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n to='registros.Moeda')\n", (656, 739), False, 'from django.db import migrations, models\n'), ((766, 868), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""registros.Produto"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n to='registros.Produto')\n", (783, 868), False, 'from django.db import migrations, models\n'), ((999, 1092), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1015, 1092), False, 'from django.db import migrations, models\n'), ((1125, 1170), 'django.db.models.DateField', 'models.DateField', ([], {'default': 'datetime.date.today'}), '(default=datetime.date.today)\n', (1141, 1170), False, 'from django.db import migrations, models\n'), ((1203, 1335), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""buyer_company"""', 'to': '"""registros.Empresa"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n related_name='buyer_company', to='registros.Empresa')\n", (1220, 1335), False, 'from django.db import migrations, models\n'), ((1363, 1496), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""seller_company"""', 'to': '"""registros.Empresa"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n related_name='seller_company', to='registros.Empresa')\n", (1380, 1496), False, 'from django.db import migrations, models\n')] |
import numpy as np
import pandas as pd
import pytest
from numpy.testing import assert_array_almost_equal
from powersimdata.tests.mock_grid import MockGrid
from powersimdata.tests.mock_scenario import MockScenario
from postreise.analyze.generation.emissions import (
generate_emissions_stats,
summarize_emissions_by_bus,
)
@pytest.fixture
def mock_plant():
# plant_id is the index
return {
"plant_id": [101, 102, 103, 104, 105],
"bus_id": [1001, 1002, 1003, 1004, 1005],
"type": ["solar", "wind", "ng", "coal", "dfo"],
"GenFuelCost": [0, 0, 3.3, 4.4, 5.5],
}
@pytest.fixture
def mock_gencost():
# plant_id is the index
return {
"plant_id": [101, 102, 103, 104, 105],
"type": [2] * 5,
"startup": [0] * 5,
"shutdown": [0] * 5,
"n": [3] * 5,
"c2": [1, 2, 3, 4, 5],
"c1": [10, 20, 30, 40, 50],
"c0": [100, 200, 300, 400, 500],
"interconnect": ["Western"] * 5,
}
@pytest.fixture
def mock_pg(mock_plant):
return pd.DataFrame(
{
plant_id: [(i + 1) * p for p in range(4)]
for i, plant_id in enumerate(mock_plant["plant_id"])
},
index=pd.date_range("2019-01-01", periods=4, freq="H"),
)
@pytest.fixture
def scenario(mock_plant, mock_gencost, mock_pg):
return MockScenario(
grid_attrs={"plant": mock_plant, "gencost_before": mock_gencost},
pg=mock_pg,
)
def _test_emissions_structure(emissions, mock_plant, pg):
plant = pd.DataFrame(mock_plant)
plant.set_index("plant_id", inplace=True)
# check data frame structure
err_msg = "generate_emissions_stats should return a data frame"
assert isinstance(emissions, pd.DataFrame), err_msg
for a, b in zip(pg.index.to_numpy(), emissions.index.to_numpy()):
assert a == b, "emissions and pg should have same index"
for a, b in zip(pg.columns.to_numpy(), emissions.columns.to_numpy()):
assert a == b, "emissions and pg should have same columns"
# sanity check values
emissions_from_wind = plant[plant.type == "wind"].index.values
err_msg = "Wind farm does not emit emissions"
assert emissions[emissions_from_wind[0]].sum() == 0, err_msg
emissions_from_solar = plant[plant.type == "solar"].index.values
err_msg = "Solar plant does not emit emissions"
assert emissions[emissions_from_solar[0]].sum() == 0, err_msg
negative_emissions_count = np.sum((emissions < 0).to_numpy().ravel())
assert negative_emissions_count == 0, "No plant should emit negative emissions"
class TestEmissionStatsArguments:
def test_pollutant_value(self, scenario):
with pytest.raises(ValueError) as excinfo:
generate_emissions_stats(scenario, pollutant="CO2")
assert "Unknown pollutant for generate_emissions_stats()" in str(excinfo.value)
def test_method_type(self, scenario):
with pytest.raises(TypeError) as excinfo:
generate_emissions_stats(scenario, method=1)
assert "method must be a str" in str(excinfo.value)
def test_method_value(self, scenario):
with pytest.raises(ValueError) as excinfo:
generate_emissions_stats(scenario, pollutant="nox", method="always-off")
assert "method for nox must be one of: {'simple'}" in str(excinfo.value)
class TestCarbonCalculation:
def test_carbon_calc_always_on(self, scenario, mock_plant):
carbon = generate_emissions_stats(scenario, method="always-on")
_test_emissions_structure(carbon, mock_plant, scenario.state.get_pg())
# check specific values
expected_values = np.array(
[
[0, 0, 4.82, 8.683333, 6.77],
[0, 0, 6.6998, 13.546000, 11.8475],
[0, 0, 9.4472, 21.1873333, 20.3100],
[0, 0, 13.0622, 31.6073333, 32.1575],
]
)
assert_array_almost_equal(
expected_values, carbon.to_numpy(), err_msg="Values do not match expected"
)
def test_carbon_calc_decommit(self, scenario, mock_plant):
carbon = generate_emissions_stats(scenario, method="decommit")
_test_emissions_structure(carbon, mock_plant, scenario.state.get_pg())
# check specific values
expected_values = np.array(
[
[0, 0, 0, 0, 0],
[0, 0, 6.6998, 13.546000, 11.8475],
[0, 0, 9.4472, 21.1873333, 20.3100],
[0, 0, 13.0622, 31.6073333, 32.1575],
]
)
assert_array_almost_equal(
expected_values, carbon.to_numpy(), err_msg="Values do not match expected"
)
def test_carbon_calc_simple(self, scenario, mock_plant):
carbon = generate_emissions_stats(scenario, method="simple")
_test_emissions_structure(carbon, mock_plant, scenario.state.get_pg())
# check specific values
expected_values = np.array(
[
[0, 0, 0, 0, 0],
[0, 0, 1.407, 4.004, 4.2],
[0, 0, 2.814, 8.008, 8.4],
[0, 0, 4.221, 12.012, 12.6],
]
)
assert_array_almost_equal(
expected_values, carbon.to_numpy(), err_msg="Values do not match expected"
)
class TestNOxCalculation:
def test_calculate_nox_simple(self, scenario):
expected_values = np.array(
[
[0, 0, 0, 0, 0],
[0, 0, 0.000537, 0.002632, 0.007685],
[0, 0, 0.001074, 0.005264, 0.015370],
[0, 0, 0.001611, 0.007896, 0.023055],
]
)
nox = generate_emissions_stats(scenario, pollutant="nox", method="simple")
assert_array_almost_equal(
expected_values, nox.to_numpy(), err_msg="Values do not match expected"
)
def test_calculate_nox_disallowed_method(self, scenario):
with pytest.raises(ValueError):
generate_emissions_stats(scenario, pollutant="nox", method="decommit")
class TestSO2Calculation:
def test_calculate_so2_simple(self, scenario):
expected_values = np.array(
[
[0, 0, 0, 0, 0],
[0, 0, 3.0000e-05, 3.8600e-03, 1.0945e-02],
[0, 0, 6.0000e-05, 7.7200e-03, 2.1890e-02],
[0, 0, 9.0000e-05, 1.1580e-02, 3.2835e-02],
]
)
nox = generate_emissions_stats(scenario, pollutant="so2", method="simple")
assert_array_almost_equal(
expected_values, nox.to_numpy(), err_msg="Values do not match expected"
)
def test_calculate_so2_disallowed_method(self, scenario):
with pytest.raises(ValueError):
generate_emissions_stats(scenario, pollutant="so2", method="always-on")
class TestEmissionsSummarization:
def test_emissions_is_non_negative(self, scenario):
carbon = generate_emissions_stats(scenario)
with pytest.raises(ValueError):
summarize_emissions_by_bus(
-1 * carbon, MockGrid(grid_attrs={"plant": mock_plant})
)
def test_emissions_summarization(self, mock_pg, mock_plant):
# setup
pg = pd.DataFrame(mock_pg).iloc[:3, :]
plant = pd.DataFrame(mock_plant)
plant.set_index("plant_id", inplace=True)
input_carbon_values = [
[0, 0, 6.6998, 13.546000, 11.8475],
[0, 0, 9.4472, 21.1873333, 20.3100],
[0, 0, 13.0622, 31.6073333, 32.1575],
]
input_carbon = pd.DataFrame(
input_carbon_values, index=pg.index, columns=pg.columns
)
expected_sum = {
"coal": {1004: 66.3406666},
"ng": {1003: 29.2092},
"dfo": {1005: 64.315},
}
# calculation
summation = summarize_emissions_by_bus(
input_carbon, MockGrid(grid_attrs={"plant": mock_plant})
)
# checks
err_msg = "summarize_emissions_by_bus didn't return a dict"
assert isinstance(summation, dict), err_msg
err_msg = "summarize_emissions_by_bus didn't return the right dict keys"
assert set(summation.keys()) == expected_sum.keys(), err_msg
for k in expected_sum.keys():
err_msg = "summation not correct for fuel " + k
assert expected_sum[k].keys() == summation[k].keys(), err_msg
for bus in expected_sum[k]:
err_msg = "summation not correct for bus " + str(bus)
assert expected_sum[k][bus] == pytest.approx(summation[k][bus]), err_msg
| [
"pandas.DataFrame",
"powersimdata.tests.mock_grid.MockGrid",
"pandas.date_range",
"postreise.analyze.generation.emissions.generate_emissions_stats",
"pytest.raises",
"numpy.array",
"powersimdata.tests.mock_scenario.MockScenario",
"pytest.approx"
] | [((1354, 1448), 'powersimdata.tests.mock_scenario.MockScenario', 'MockScenario', ([], {'grid_attrs': "{'plant': mock_plant, 'gencost_before': mock_gencost}", 'pg': 'mock_pg'}), "(grid_attrs={'plant': mock_plant, 'gencost_before':\n mock_gencost}, pg=mock_pg)\n", (1366, 1448), False, 'from powersimdata.tests.mock_scenario import MockScenario\n'), ((1540, 1564), 'pandas.DataFrame', 'pd.DataFrame', (['mock_plant'], {}), '(mock_plant)\n', (1552, 1564), True, 'import pandas as pd\n'), ((3468, 3522), 'postreise.analyze.generation.emissions.generate_emissions_stats', 'generate_emissions_stats', (['scenario'], {'method': '"""always-on"""'}), "(scenario, method='always-on')\n", (3492, 3522), False, 'from postreise.analyze.generation.emissions import generate_emissions_stats, summarize_emissions_by_bus\n'), ((3661, 3811), 'numpy.array', 'np.array', (['[[0, 0, 4.82, 8.683333, 6.77], [0, 0, 6.6998, 13.546, 11.8475], [0, 0, \n 9.4472, 21.1873333, 20.31], [0, 0, 13.0622, 31.6073333, 32.1575]]'], {}), '([[0, 0, 4.82, 8.683333, 6.77], [0, 0, 6.6998, 13.546, 11.8475], [0,\n 0, 9.4472, 21.1873333, 20.31], [0, 0, 13.0622, 31.6073333, 32.1575]])\n', (3669, 3811), True, 'import numpy as np\n'), ((4128, 4181), 'postreise.analyze.generation.emissions.generate_emissions_stats', 'generate_emissions_stats', (['scenario'], {'method': '"""decommit"""'}), "(scenario, method='decommit')\n", (4152, 4181), False, 'from postreise.analyze.generation.emissions import generate_emissions_stats, summarize_emissions_by_bus\n'), ((4320, 4458), 'numpy.array', 'np.array', (['[[0, 0, 0, 0, 0], [0, 0, 6.6998, 13.546, 11.8475], [0, 0, 9.4472, \n 21.1873333, 20.31], [0, 0, 13.0622, 31.6073333, 32.1575]]'], {}), '([[0, 0, 0, 0, 0], [0, 0, 6.6998, 13.546, 11.8475], [0, 0, 9.4472, \n 21.1873333, 20.31], [0, 0, 13.0622, 31.6073333, 32.1575]])\n', (4328, 4458), True, 'import numpy as np\n'), ((4772, 4823), 'postreise.analyze.generation.emissions.generate_emissions_stats', 'generate_emissions_stats', (['scenario'], {'method': '"""simple"""'}), "(scenario, method='simple')\n", (4796, 4823), False, 'from postreise.analyze.generation.emissions import generate_emissions_stats, summarize_emissions_by_bus\n'), ((4962, 5077), 'numpy.array', 'np.array', (['[[0, 0, 0, 0, 0], [0, 0, 1.407, 4.004, 4.2], [0, 0, 2.814, 8.008, 8.4], [0,\n 0, 4.221, 12.012, 12.6]]'], {}), '([[0, 0, 0, 0, 0], [0, 0, 1.407, 4.004, 4.2], [0, 0, 2.814, 8.008, \n 8.4], [0, 0, 4.221, 12.012, 12.6]])\n', (4970, 5077), True, 'import numpy as np\n'), ((5411, 5556), 'numpy.array', 'np.array', (['[[0, 0, 0, 0, 0], [0, 0, 0.000537, 0.002632, 0.007685], [0, 0, 0.001074, \n 0.005264, 0.01537], [0, 0, 0.001611, 0.007896, 0.023055]]'], {}), '([[0, 0, 0, 0, 0], [0, 0, 0.000537, 0.002632, 0.007685], [0, 0, \n 0.001074, 0.005264, 0.01537], [0, 0, 0.001611, 0.007896, 0.023055]])\n', (5419, 5556), True, 'import numpy as np\n'), ((5668, 5736), 'postreise.analyze.generation.emissions.generate_emissions_stats', 'generate_emissions_stats', (['scenario'], {'pollutant': '"""nox"""', 'method': '"""simple"""'}), "(scenario, pollutant='nox', method='simple')\n", (5692, 5736), False, 'from postreise.analyze.generation.emissions import generate_emissions_stats, summarize_emissions_by_bus\n'), ((6157, 6290), 'numpy.array', 'np.array', (['[[0, 0, 0, 0, 0], [0, 0, 3e-05, 0.00386, 0.010945], [0, 0, 6e-05, 0.00772, \n 0.02189], [0, 0, 9e-05, 0.01158, 0.032835]]'], {}), '([[0, 0, 0, 0, 0], [0, 0, 3e-05, 0.00386, 0.010945], [0, 0, 6e-05, \n 0.00772, 0.02189], [0, 0, 9e-05, 0.01158, 0.032835]])\n', (6165, 6290), True, 'import numpy as np\n'), ((6432, 6500), 'postreise.analyze.generation.emissions.generate_emissions_stats', 'generate_emissions_stats', (['scenario'], {'pollutant': '"""so2"""', 'method': '"""simple"""'}), "(scenario, pollutant='so2', method='simple')\n", (6456, 6500), False, 'from postreise.analyze.generation.emissions import generate_emissions_stats, summarize_emissions_by_bus\n'), ((6926, 6960), 'postreise.analyze.generation.emissions.generate_emissions_stats', 'generate_emissions_stats', (['scenario'], {}), '(scenario)\n', (6950, 6960), False, 'from postreise.analyze.generation.emissions import generate_emissions_stats, summarize_emissions_by_bus\n'), ((7272, 7296), 'pandas.DataFrame', 'pd.DataFrame', (['mock_plant'], {}), '(mock_plant)\n', (7284, 7296), True, 'import pandas as pd\n'), ((7559, 7628), 'pandas.DataFrame', 'pd.DataFrame', (['input_carbon_values'], {'index': 'pg.index', 'columns': 'pg.columns'}), '(input_carbon_values, index=pg.index, columns=pg.columns)\n', (7571, 7628), True, 'import pandas as pd\n'), ((1220, 1268), 'pandas.date_range', 'pd.date_range', (['"""2019-01-01"""'], {'periods': '(4)', 'freq': '"""H"""'}), "('2019-01-01', periods=4, freq='H')\n", (1233, 1268), True, 'import pandas as pd\n'), ((2694, 2719), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2707, 2719), False, 'import pytest\n'), ((2744, 2795), 'postreise.analyze.generation.emissions.generate_emissions_stats', 'generate_emissions_stats', (['scenario'], {'pollutant': '"""CO2"""'}), "(scenario, pollutant='CO2')\n", (2768, 2795), False, 'from postreise.analyze.generation.emissions import generate_emissions_stats, summarize_emissions_by_bus\n'), ((2940, 2964), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (2953, 2964), False, 'import pytest\n'), ((2989, 3033), 'postreise.analyze.generation.emissions.generate_emissions_stats', 'generate_emissions_stats', (['scenario'], {'method': '(1)'}), '(scenario, method=1)\n', (3013, 3033), False, 'from postreise.analyze.generation.emissions import generate_emissions_stats, summarize_emissions_by_bus\n'), ((3151, 3176), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3164, 3176), False, 'import pytest\n'), ((3201, 3273), 'postreise.analyze.generation.emissions.generate_emissions_stats', 'generate_emissions_stats', (['scenario'], {'pollutant': '"""nox"""', 'method': '"""always-off"""'}), "(scenario, pollutant='nox', method='always-off')\n", (3225, 3273), False, 'from postreise.analyze.generation.emissions import generate_emissions_stats, summarize_emissions_by_bus\n'), ((5942, 5967), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (5955, 5967), False, 'import pytest\n'), ((5981, 6051), 'postreise.analyze.generation.emissions.generate_emissions_stats', 'generate_emissions_stats', (['scenario'], {'pollutant': '"""nox"""', 'method': '"""decommit"""'}), "(scenario, pollutant='nox', method='decommit')\n", (6005, 6051), False, 'from postreise.analyze.generation.emissions import generate_emissions_stats, summarize_emissions_by_bus\n'), ((6706, 6731), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (6719, 6731), False, 'import pytest\n'), ((6745, 6816), 'postreise.analyze.generation.emissions.generate_emissions_stats', 'generate_emissions_stats', (['scenario'], {'pollutant': '"""so2"""', 'method': '"""always-on"""'}), "(scenario, pollutant='so2', method='always-on')\n", (6769, 6816), False, 'from postreise.analyze.generation.emissions import generate_emissions_stats, summarize_emissions_by_bus\n'), ((6974, 6999), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (6987, 6999), False, 'import pytest\n'), ((7893, 7935), 'powersimdata.tests.mock_grid.MockGrid', 'MockGrid', ([], {'grid_attrs': "{'plant': mock_plant}"}), "(grid_attrs={'plant': mock_plant})\n", (7901, 7935), False, 'from powersimdata.tests.mock_grid import MockGrid\n'), ((7070, 7112), 'powersimdata.tests.mock_grid.MockGrid', 'MockGrid', ([], {'grid_attrs': "{'plant': mock_plant}"}), "(grid_attrs={'plant': mock_plant})\n", (7078, 7112), False, 'from powersimdata.tests.mock_grid import MockGrid\n'), ((7222, 7243), 'pandas.DataFrame', 'pd.DataFrame', (['mock_pg'], {}), '(mock_pg)\n', (7234, 7243), True, 'import pandas as pd\n'), ((8563, 8595), 'pytest.approx', 'pytest.approx', (['summation[k][bus]'], {}), '(summation[k][bus])\n', (8576, 8595), False, 'import pytest\n')] |
import socket
from messages import send_packet, receive_packet, INPUT_MESSAGE
ID = 0
NUM_BOTS = 4
# Notice: multiplicity == 4, telling RLBot I want to control bots.
# Controlled IDs are [id, id + 1, id + 2, id + 3].
# There must be enough cars in the match config file to support this.
HIVE_READY_MESSAGE = {
"type": "Ready",
"name": "Hivemind",
"team": 0,
"id": ID,
"multiplicity": NUM_BOTS
}
class SocketHivemind:
def log(self, statement):
if self.debug:
print(f"[HIVEMIND] {statement}")
def __init__(self, port, debug=False):
self.debug = debug
self.id = ID
self.drone_ids = [ID + i for i in range(NUM_BOTS)]
self.initialize_hive()
self.log("Loaded")
# Create socket.
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.log("Attempting to connect")
self.socket.connect(("localhost", port))
self.log("Connected")
self.log("Sending READY message")
send_packet(self.socket, HIVE_READY_MESSAGE)
def run(self):
try:
# Packet loop.
while True:
self.log("Receiving packet")
packet = receive_packet(self.socket)
self.parse_packet(packet)
except Exception as e:
print(e)
self.log("Closing")
self.socket.close()
def parse_packet(self, packet):
# Packet is a list of messages.
for message in packet:
if message["type"] == "Update":
self.log("Received UPDATE message")
output = self.get_output(message)
self.log("Sending INPUT messages")
send_packet(self.socket, output)
else:
# TODO Other kinds of messages
continue
def initialize_hive(self):
# Space for an init func.
pass
def get_output(self, message):
controls = [INPUT_MESSAGE for drone in self.drone_ids]
return controls
if __name__ == "__main__":
hivemind = SocketHivemind(23234, debug=True)
hivemind.run() | [
"messages.receive_packet",
"socket.socket",
"messages.send_packet"
] | [((797, 846), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (810, 846), False, 'import socket\n'), ((1027, 1071), 'messages.send_packet', 'send_packet', (['self.socket', 'HIVE_READY_MESSAGE'], {}), '(self.socket, HIVE_READY_MESSAGE)\n', (1038, 1071), False, 'from messages import send_packet, receive_packet, INPUT_MESSAGE\n'), ((1226, 1253), 'messages.receive_packet', 'receive_packet', (['self.socket'], {}), '(self.socket)\n', (1240, 1253), False, 'from messages import send_packet, receive_packet, INPUT_MESSAGE\n'), ((1735, 1767), 'messages.send_packet', 'send_packet', (['self.socket', 'output'], {}), '(self.socket, output)\n', (1746, 1767), False, 'from messages import send_packet, receive_packet, INPUT_MESSAGE\n')] |
import math
import logging
import requests
from itertools import count
from requests import RequestException, HTTPError
from servicelayer.util import backoff
from followthemoney.helpers import entity_filename
from ingestors.settings import CONVERT_URL, CONVERT_TIMEOUT
from ingestors.support.cache import CacheSupport
from ingestors.support.temp import TempFileSupport
from ingestors.exc import ProcessingException
log = logging.getLogger(__name__)
class DocumentConvertSupport(CacheSupport, TempFileSupport):
"""Provides helpers for UNO document conversion via HTTP."""
def document_to_pdf(self, file_path, entity):
key = self.cache_key('pdf', entity.first('contentHash'))
pdf_hash = self.tags.get(key)
if pdf_hash is not None:
file_name = entity_filename(entity, extension='pdf')
path = self.manager.load(pdf_hash, file_name=file_name)
if path is not None:
log.info("Using PDF cache: %s", file_name)
entity.set('pdfHash', pdf_hash)
return path
pdf_file = self._document_to_pdf(file_path, entity)
if pdf_file is not None:
content_hash = self.manager.store(pdf_file)
entity.set('pdfHash', content_hash)
self.tags.set(key, content_hash)
return pdf_file
def _document_to_pdf(self, file_path, entity):
"""Converts an office document to PDF."""
file_name = entity_filename(entity)
mime_type = entity.first('mimeType')
log.info('Converting [%s] to PDF...', file_name)
for attempt in count(1):
try:
with open(file_path, 'rb') as fh:
files = {'file': (file_name, fh, mime_type)}
res = requests.post(CONVERT_URL,
params={'timeout': CONVERT_TIMEOUT},
files=files,
timeout=CONVERT_TIMEOUT + 10,
stream=True)
res.raise_for_status()
out_path = self.make_work_file('out.pdf')
with open(out_path, 'wb') as fh:
bytes_written = 0
for chunk in res.iter_content(chunk_size=None):
bytes_written += len(chunk)
fh.write(chunk)
if bytes_written > 50:
return out_path
raise ProcessingException("Could not be converted to PDF.")
except HTTPError as exc:
if exc.response.status_code in (400, 500):
# For error 500, this might also be a temporary error
# in the conversion service. But all attempts to divy
# these phenomena apart have failed so far.
raise ProcessingException(res.text)
msg = "Converter not available: %s (attempt: %s)"
log.info(msg, exc, attempt)
backoff(failures=math.sqrt(attempt))
except RequestException as exc:
msg = "Converter not available: %s (attempt: %s)"
log.error(msg, exc, attempt)
backoff(failures=math.sqrt(attempt))
| [
"ingestors.exc.ProcessingException",
"math.sqrt",
"itertools.count",
"requests.post",
"followthemoney.helpers.entity_filename",
"logging.getLogger"
] | [((423, 450), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (440, 450), False, 'import logging\n'), ((1456, 1479), 'followthemoney.helpers.entity_filename', 'entity_filename', (['entity'], {}), '(entity)\n', (1471, 1479), False, 'from followthemoney.helpers import entity_filename\n'), ((1605, 1613), 'itertools.count', 'count', (['(1)'], {}), '(1)\n', (1610, 1613), False, 'from itertools import count\n'), ((790, 830), 'followthemoney.helpers.entity_filename', 'entity_filename', (['entity'], {'extension': '"""pdf"""'}), "(entity, extension='pdf')\n", (805, 830), False, 'from followthemoney.helpers import entity_filename\n'), ((2502, 2555), 'ingestors.exc.ProcessingException', 'ProcessingException', (['"""Could not be converted to PDF."""'], {}), "('Could not be converted to PDF.')\n", (2521, 2555), False, 'from ingestors.exc import ProcessingException\n'), ((1773, 1896), 'requests.post', 'requests.post', (['CONVERT_URL'], {'params': "{'timeout': CONVERT_TIMEOUT}", 'files': 'files', 'timeout': '(CONVERT_TIMEOUT + 10)', 'stream': '(True)'}), "(CONVERT_URL, params={'timeout': CONVERT_TIMEOUT}, files=files,\n timeout=CONVERT_TIMEOUT + 10, stream=True)\n", (1786, 1896), False, 'import requests\n'), ((2890, 2919), 'ingestors.exc.ProcessingException', 'ProcessingException', (['res.text'], {}), '(res.text)\n', (2909, 2919), False, 'from ingestors.exc import ProcessingException\n'), ((3063, 3081), 'math.sqrt', 'math.sqrt', (['attempt'], {}), '(attempt)\n', (3072, 3081), False, 'import math\n'), ((3271, 3289), 'math.sqrt', 'math.sqrt', (['attempt'], {}), '(attempt)\n', (3280, 3289), False, 'import math\n')] |
# -*- coding: utf-8 -*-
import numpy as np
import pytest
from mrsimulator.method.query import TransitionQuery
from mrsimulator.methods import FiveQ_VAS
from mrsimulator.methods import SevenQ_VAS
from mrsimulator.methods import ThreeQ_VAS
__author__ = "<NAME>"
__email__ = "<EMAIL>"
methods = [ThreeQ_VAS, FiveQ_VAS, SevenQ_VAS]
names = ["ThreeQ_VAS", "FiveQ_VAS", "SevenQ_VAS"]
def sample_test_output(n):
return {
"magnetic_flux_density": "9.4 T",
"rotor_angle": "0.9553166181245 rad",
"rotor_frequency": "1000000000000.0 Hz",
"spectral_dimensions": [
{
"count": 1024,
"spectral_width": "25000.0 Hz",
"events": [{"transition_query": [{"ch1": {"P": [n], "D": [0]}}]}],
},
{
"count": 1024,
"spectral_width": "25000.0 Hz",
"events": [{"transition_query": [{"ch1": {"P": [-1], "D": [0]}}]}],
},
],
}
def test_MQ_VAS_rotor_freq():
e = "`rotor_frequency=1e12 Hz` is fixed for 2D Methods and cannot be modified."
isotopes = ["87Rb", "27Al", "51V"]
for iso, method in zip(isotopes, methods):
with pytest.raises(ValueError, match=f".*{e}.*"):
method(channels=[iso], rotor_frequency=10, spectral_dimensions=[{}, {}])
def test_MQ_VAS_affine():
sites = ["87Rb", "27Al", "51V"]
spins = [1.5, 2.5, 3.5]
k_MQ_MAS = {
3: {1.5: 21 / 27, 2.5: 114 / 72, 3.5: 303 / 135, 4.5: 546 / 216},
5: {2.5: 150 / 72, 3.5: 165 / 135, 4.5: 570 / 216},
7: {3.5: 483 / 135, 4.5: 84 / 216},
9: {4.5: 1116 / 216},
}
for j, method in enumerate(methods):
for i, isotope in zip(spins[j:], sites[j:]):
meth = method(channels=[isotope])
k = k_MQ_MAS[3 + 2 * j][i]
assert meth.spectral_dimensions[0].events[0].fraction == 1
assert meth.spectral_dimensions[1].events[0].fraction == 1
assert np.allclose(meth.affine_matrix, [1 / (k + 1), k / (k + 1), 0, 1])
def test_3Q_VAS_general():
"""3Q-VAS method test"""
mth = ThreeQ_VAS(channels=["87Rb"], spectral_dimensions=[{}, {}])
assert mth.name == "ThreeQ_VAS"
assert mth.description == "Simulate a 3Q variable-angle spinning spectrum."
assert mth.spectral_dimensions[0].events[0].transition_query == [
TransitionQuery(ch1={"P": [-3], "D": [0]})
]
assert mth.spectral_dimensions[1].events[0].transition_query == [
TransitionQuery(ch1={"P": [-1], "D": [0]})
]
assert ThreeQ_VAS.parse_dict_with_units(mth.json()) == mth
assert np.allclose(mth.affine_matrix, [0.5625, 0.4375, 0.0, 1.0])
serialize = mth.json()
_ = serialize.pop("affine_matrix")
assert serialize == {
"channels": ["87Rb"],
"description": "Simulate a 3Q variable-angle spinning spectrum.",
"name": "ThreeQ_VAS",
**sample_test_output(-3),
}
def test_5Q_VAS_general():
"""5Q-VAS method test"""
mth = FiveQ_VAS(channels=["17O"], spectral_dimensions=[{}, {}])
assert mth.name == "FiveQ_VAS"
assert mth.description == "Simulate a 5Q variable-angle spinning spectrum."
assert mth.spectral_dimensions[0].events[0].transition_query == [
TransitionQuery(ch1={"P": [-5], "D": [0]})
]
assert mth.spectral_dimensions[1].events[0].transition_query == [
TransitionQuery(ch1={"P": [-1], "D": [0]})
]
assert FiveQ_VAS.parse_dict_with_units(mth.json()) == mth
assert np.allclose(
mth.affine_matrix, [0.3243243243243243, 0.6756756756756757, 0.0, 1.0]
)
serialize = mth.json()
_ = serialize.pop("affine_matrix")
assert serialize == {
"channels": ["17O"],
"description": "Simulate a 5Q variable-angle spinning spectrum.",
"name": "FiveQ_VAS",
**sample_test_output(-5),
}
def test_7Q_VAS_general():
"""7Q-VAS method test"""
mth = SevenQ_VAS(channels=["51V"], spectral_dimensions=[{}, {}])
assert mth.name == "SevenQ_VAS"
assert mth.description == "Simulate a 7Q variable-angle spinning spectrum."
assert mth.spectral_dimensions[0].events[0].transition_query == [
TransitionQuery(ch1={"P": [-7], "D": [0]})
]
assert mth.spectral_dimensions[1].events[0].transition_query == [
TransitionQuery(ch1={"P": [-1], "D": [0]})
]
assert SevenQ_VAS.parse_dict_with_units(mth.json()) == mth
assert np.allclose(mth.affine_matrix, [0.2184466, 0.7815534, 0.0, 1.0])
serialize = mth.json()
_ = serialize.pop("affine_matrix")
assert serialize == {
"channels": ["51V"],
"description": "Simulate a 7Q variable-angle spinning spectrum.",
"name": "SevenQ_VAS",
**sample_test_output(-7),
}
| [
"mrsimulator.methods.ThreeQ_VAS",
"mrsimulator.method.query.TransitionQuery",
"numpy.allclose",
"pytest.raises",
"mrsimulator.methods.SevenQ_VAS",
"mrsimulator.methods.FiveQ_VAS"
] | [((2134, 2193), 'mrsimulator.methods.ThreeQ_VAS', 'ThreeQ_VAS', ([], {'channels': "['87Rb']", 'spectral_dimensions': '[{}, {}]'}), "(channels=['87Rb'], spectral_dimensions=[{}, {}])\n", (2144, 2193), False, 'from mrsimulator.methods import ThreeQ_VAS\n'), ((2639, 2697), 'numpy.allclose', 'np.allclose', (['mth.affine_matrix', '[0.5625, 0.4375, 0.0, 1.0]'], {}), '(mth.affine_matrix, [0.5625, 0.4375, 0.0, 1.0])\n', (2650, 2697), True, 'import numpy as np\n'), ((3033, 3090), 'mrsimulator.methods.FiveQ_VAS', 'FiveQ_VAS', ([], {'channels': "['17O']", 'spectral_dimensions': '[{}, {}]'}), "(channels=['17O'], spectral_dimensions=[{}, {}])\n", (3042, 3090), False, 'from mrsimulator.methods import FiveQ_VAS\n'), ((3535, 3621), 'numpy.allclose', 'np.allclose', (['mth.affine_matrix', '[0.3243243243243243, 0.6756756756756757, 0.0, 1.0]'], {}), '(mth.affine_matrix, [0.3243243243243243, 0.6756756756756757, 0.0,\n 1.0])\n', (3546, 3621), True, 'import numpy as np\n'), ((3966, 4024), 'mrsimulator.methods.SevenQ_VAS', 'SevenQ_VAS', ([], {'channels': "['51V']", 'spectral_dimensions': '[{}, {}]'}), "(channels=['51V'], spectral_dimensions=[{}, {}])\n", (3976, 4024), False, 'from mrsimulator.methods import SevenQ_VAS\n'), ((4471, 4535), 'numpy.allclose', 'np.allclose', (['mth.affine_matrix', '[0.2184466, 0.7815534, 0.0, 1.0]'], {}), '(mth.affine_matrix, [0.2184466, 0.7815534, 0.0, 1.0])\n', (4482, 4535), True, 'import numpy as np\n'), ((1207, 1250), 'pytest.raises', 'pytest.raises', (['ValueError'], {'match': 'f""".*{e}.*"""'}), "(ValueError, match=f'.*{e}.*')\n", (1220, 1250), False, 'import pytest\n'), ((2000, 2065), 'numpy.allclose', 'np.allclose', (['meth.affine_matrix', '[1 / (k + 1), k / (k + 1), 0, 1]'], {}), '(meth.affine_matrix, [1 / (k + 1), k / (k + 1), 0, 1])\n', (2011, 2065), True, 'import numpy as np\n'), ((2388, 2430), 'mrsimulator.method.query.TransitionQuery', 'TransitionQuery', ([], {'ch1': "{'P': [-3], 'D': [0]}"}), "(ch1={'P': [-3], 'D': [0]})\n", (2403, 2430), False, 'from mrsimulator.method.query import TransitionQuery\n'), ((2515, 2557), 'mrsimulator.method.query.TransitionQuery', 'TransitionQuery', ([], {'ch1': "{'P': [-1], 'D': [0]}"}), "(ch1={'P': [-1], 'D': [0]})\n", (2530, 2557), False, 'from mrsimulator.method.query import TransitionQuery\n'), ((3285, 3327), 'mrsimulator.method.query.TransitionQuery', 'TransitionQuery', ([], {'ch1': "{'P': [-5], 'D': [0]}"}), "(ch1={'P': [-5], 'D': [0]})\n", (3300, 3327), False, 'from mrsimulator.method.query import TransitionQuery\n'), ((3412, 3454), 'mrsimulator.method.query.TransitionQuery', 'TransitionQuery', ([], {'ch1': "{'P': [-1], 'D': [0]}"}), "(ch1={'P': [-1], 'D': [0]})\n", (3427, 3454), False, 'from mrsimulator.method.query import TransitionQuery\n'), ((4220, 4262), 'mrsimulator.method.query.TransitionQuery', 'TransitionQuery', ([], {'ch1': "{'P': [-7], 'D': [0]}"}), "(ch1={'P': [-7], 'D': [0]})\n", (4235, 4262), False, 'from mrsimulator.method.query import TransitionQuery\n'), ((4347, 4389), 'mrsimulator.method.query.TransitionQuery', 'TransitionQuery', ([], {'ch1': "{'P': [-1], 'D': [0]}"}), "(ch1={'P': [-1], 'D': [0]})\n", (4362, 4389), False, 'from mrsimulator.method.query import TransitionQuery\n')] |
#!/usr/bin/env python3
import HIH6130
import rospy
from sensor_msgs.msg import Temperature, RelativeHumidity
def publish():
tempPub = rospy.Publisher('InternalTemperature', Temperature, queue_size=1)
humidPub = rospy.Publisher('InternalHumidity', RelativeHumidity, queue_size=1)
rospy.init_node('InternalEnvironment')
sensor = HIH6130.HIH6130(bus=1)
temp = Temperature()
temp.header.frame_id = "base_link"
humid = RelativeHumidity()
humid.header.frame_id = "base_link"
freq = rospy.Rate(5)
while not rospy.is_shutdown():
sensor.read()
temp.temperature = sensor.t
humid.relative_humidity = sensor.rh
tempPub.publish(temp)
humidPub.publish(humid)
freq.sleep()
if __name__ == '__main__':
try:
publish()
except rospy.ROSInterruptException:
pass
| [
"sensor_msgs.msg.RelativeHumidity",
"HIH6130.HIH6130",
"rospy.Publisher",
"rospy.Rate",
"sensor_msgs.msg.Temperature",
"rospy.is_shutdown",
"rospy.init_node"
] | [((136, 201), 'rospy.Publisher', 'rospy.Publisher', (['"""InternalTemperature"""', 'Temperature'], {'queue_size': '(1)'}), "('InternalTemperature', Temperature, queue_size=1)\n", (151, 201), False, 'import rospy\n'), ((214, 281), 'rospy.Publisher', 'rospy.Publisher', (['"""InternalHumidity"""', 'RelativeHumidity'], {'queue_size': '(1)'}), "('InternalHumidity', RelativeHumidity, queue_size=1)\n", (229, 281), False, 'import rospy\n'), ((283, 321), 'rospy.init_node', 'rospy.init_node', (['"""InternalEnvironment"""'], {}), "('InternalEnvironment')\n", (298, 321), False, 'import rospy\n'), ((332, 354), 'HIH6130.HIH6130', 'HIH6130.HIH6130', ([], {'bus': '(1)'}), '(bus=1)\n', (347, 354), False, 'import HIH6130\n'), ((363, 376), 'sensor_msgs.msg.Temperature', 'Temperature', ([], {}), '()\n', (374, 376), False, 'from sensor_msgs.msg import Temperature, RelativeHumidity\n'), ((422, 440), 'sensor_msgs.msg.RelativeHumidity', 'RelativeHumidity', ([], {}), '()\n', (438, 440), False, 'from sensor_msgs.msg import Temperature, RelativeHumidity\n'), ((486, 499), 'rospy.Rate', 'rospy.Rate', (['(5)'], {}), '(5)\n', (496, 499), False, 'import rospy\n'), ((511, 530), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (528, 530), False, 'import rospy\n')] |
#!/usr/bin/env python
# coding: utf-8
# In[1]:
# from IPython import get_ipython
import time, os, sys, shutil
# from utils.fitting_utils import *
# for math and plotting
import pandas as pd
import numpy as np
import scipy as sp
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D # <--- This is important for 3d plotting
import sys, os, pickle
# import cv2
# from colour import Color
import h5py
import glob
import itertools
# and pytorch
import torch
# In[2]:
import ipywidgets as widgets
from ipywidgets import HBox, VBox
from IPython.display import display
# %matplotlib inline
get_ipython().run_line_magic('matplotlib', 'widget')
# In[ ]:
# In[3]:
# def unpack_from_jagged(jagged_line):
# ''' THE REVESER SO HERE IT UNPACKS AGAIN SO THE DATA CAN BE SAVED
# AS A JAGGED H5PY DATASET
# FROM OTHER: Takes the NX3, N, Mx3, M, M shapes and packs to a single float16
# We ravel the position, ravel the keyp, stack everything and
# - importantly - we also save M, the number of keypoints'''
# n_keyp = int(jagged_line[-1])
# keyp_idx2 = jagged_line[-(1+n_keyp):-1].astype('int')
# pkeyp2 = jagged_line[-(1+2*n_keyp):-(1+n_keyp)]
# keyp2 = jagged_line[-(1+5*n_keyp):-(1+2*n_keyp)].reshape((n_keyp,3))
# block2 = jagged_line[:-(1+5*n_keyp)].reshape((-1,4))
# pos2,pos_weights2 = block2[:,:3], block2[:,3]
# # HACK to cut the floor
# floor_logic = pos2[:,2] > .012
# pos2 = pos2[floor_logic,:]
# pos_weights2 = pos_weights2[floor_logic]
# return pos2, pos_weights2, keyp2, pkeyp2, keyp_idx2
from utils.analysis_tools import unpack_from_jagged
from utils.analysis_tools import particles_to_body_supports_cuda
class data_storage(object):
def __init__(self):
# TODO update all this properly
self.data_path = None
self.tracking_path = None
self.jagged_lines = None
self.has_implant = True
self.is_running = False
def load_jagged(self):
with h5py.File(self.data_path, mode='r') as hdf5_file:
print("Loading jagged lines from " + self.data_path + "...")
# print(hdf5_file.keys())
# print(len(hdf5_file['dataset']))
self.jagged_lines = hdf5_file['dataset'][...]
print("Loaded {} jagged lines.".format(len(self.jagged_lines)) )
def load_tracking(self):
with open(self.tracking_path, 'rb') as f:
tracked_behavior = pickle.load(f)
print(tracked_behavior.keys())
self.tracked_behavior = tracked_behavior
self.has_implant = tracked_behavior['has_implant']
self.start_frame = tracked_behavior['start_frame']
self.end_frame = tracked_behavior['end_frame']
# get the raw tracking data!
part = self.tracked_behavior['tracking_holder']
# unpack all the 3D coordinates!
part = torch.from_numpy(part).float().cuda()
part = torch.transpose(part,0,1)
if self.has_implant:
body_support_0 = particles_to_body_supports_cuda(part[:,:9],implant = True)
body_support_1 = particles_to_body_supports_cuda(part[:,9:],implant = False)
# and the spine length
s_0 = part[:,2].cpu().numpy()
s_1 = part[:,2+9].cpu().numpy()
else:
body_support_0 = particles_to_body_supports_cuda(part[:,:8],implant = False)
body_support_1 = particles_to_body_supports_cuda(part[:,8:],implant = False)
# and the spine length
s_0 = part[:,2].cpu().numpy()
s_1 = part[:,2+8].cpu().numpy()
# add the raw and smoothed coordinates as numpy arrays
self.body_support_0_raw = [i.cpu().numpy().squeeze() for i in body_support_0]
# self.body_support_0_smooth = body_support_0_smooth
self.s_0_raw = s_0
# self.s_0_smooth = s_0_smooth
self.body_support_1_raw = [i.cpu().numpy().squeeze() for i in body_support_1]
# self.body_support_1_smooth = body_support_1_smooth
self.s_1_raw = s_1
# self.s_1_smooth = s_1_smooth
def make_3d_axis(self):
# 3D plot of the
fig = plt.figure(figsize = (4.5,4.5))
ax = fig.add_subplot(111, projection='3d')
# add to self for use later
self.fig = fig
self.ax = ax
def add_raw_data(self,frame):
# unpack the raw data in a plottable format
pos, pos_weights, keyp, pkeyp, ikeyp = unpack_from_jagged(self.jagged_lines[frame])
X, Y, Z = pos[:,0],pos[:,1],pos[:,2]
# add to axis 3D plot of Sphere
self.h_pc = self.ax.scatter(X, Y, Z, zdir='z', s=2, c='k', alpha = .05,rasterized=False)
body_colors = ['dodgerblue','red','lime','orange']
body_indices = [0,1,2,3]
# loop over the types of body, and make emptyscatter plots
self.h_kp_list = []
for body in body_indices:
h_kp = self.ax.scatter([],[],[], zdir='z', s=25, c=body_colors[body],rasterized=False)
self.h_kp_list.append(h_kp)
# THEN set the 3d values to be what the shoud be
for body in body_indices:
self.h_kp_list[body]._offsets3d = (keyp[ikeyp==body,0], keyp[ikeyp==body,1], keyp[ikeyp==body,2])
# for axis adjustment
self.max_range = np.array([X.max()-X.min(), Y.max()-Y.min(), Z.max()-Z.min()]).max() / 2.0
self.mid_x = (X.max()+X.min()) * 0.5
self.mid_y = (Y.max()+Y.min()) * 0.5
self.mid_z = (Z.max()+Z.min()) * 0.5
def update_raw_data(self,frame):
# get new raw data!
pos, pos_weights, keyp, pkeyp, ikeyp = unpack_from_jagged(self.jagged_lines[frame])
X, Y, Z = pos[:,0],pos[:,1],pos[:,2]
# update the pointcloud
self.h_pc._offsets3d = (X,Y,Z)
# and update the keypoints
for body in range(4):
self.h_kp_list[body]._offsets3d = (keyp[ikeyp==body,0], keyp[ikeyp==body,1], keyp[ikeyp==body,2])
def plot_skeleton(self,body_support,color = 'k',body_idx = 0,has_implant = False):
# unpack
c_hip,c_ass,c_mid,c_nose,c_tip,c_impl,R_body,R_head,R_nose = body_support
#print("c_hip is {}".format(c_hip))
if has_implant:
p_skel = [c_hip,c_mid,c_nose,c_ass,c_tip,c_impl]
p_line = [c_nose,c_nose,c_mid,c_impl,c_impl]
q_line = [c_mid,c_tip,c_ass,c_nose,c_tip]
else:
p_skel = [c_hip,c_mid,c_nose,c_ass,c_tip]
p_line = [c_nose,c_nose,c_mid]
q_line = [c_mid,c_tip,c_ass]
# add the body points
for p in p_skel:
h_bp = self.ax.scatter(p[0],p[1],p[2],zdir='z', s=50, alpha = 1 , c=color,rasterized=False)
self.h_bp_list[body_idx].append(h_bp)
# and the lines between body parts
for p,q in zip(p_line,q_line):
h_skel = self.ax.plot([p[0],q[0]],[p[1],q[1]],[p[2],q[2]],c=color,lw = 4)
self.h_skel_list[body_idx].append(h_skel)
def add_skel_fit(self,frame,fit='raw',plot_ellipsoids = True):
# frame index
i_frame = frame-self.start_frame
if fit =='raw':
body_support_0 = [ d[i_frame,...] for d in self.body_support_0_raw]
body_support_1 = [ d[i_frame,...] for d in self.body_support_1_raw]
s_0 = self.s_0_raw[i_frame]
s_1 = self.s_1_raw[i_frame]
elif fit =='smooth':
body_support_0 = [ d[i_frame,...] for d in self.body_support_0_smooth]
body_support_1 = [ d[i_frame,...] for d in self.body_support_1_smooth]
s_0 = self.s_0_smooth[i_frame]
s_1 = self.s_1_smooth[i_frame]
else:
return
# and plot!
self.h_skel_list = [[],[]]
self.h_bp_list = [[],[]]
self.plot_skeleton(body_support_0,color = 'k',body_idx = 0,has_implant = self.has_implant)
self.plot_skeleton(body_support_1,color = 'peru',body_idx = 1,has_implant = False)
def update_skeleton(self,body_support,body_idx = 0, has_implant = False):
# unpack
c_hip,c_ass,c_mid,c_nose,c_tip,c_impl,R_body,R_head,R_nose = body_support
if has_implant :
p_skel = [c_hip,c_mid,c_nose,c_ass,c_tip,c_impl]
p_line = [c_nose,c_nose,c_mid,c_impl,c_impl]
q_line = [c_mid,c_tip,c_ass,c_nose,c_tip]
else:
p_skel = [c_hip,c_mid,c_nose,c_ass,c_tip]
p_line = [c_nose,c_nose,c_mid]
q_line = [c_mid,c_tip,c_ass]
# update the body points
for j,p in enumerate(p_skel):
self.h_bp_list[body_idx][j]._offsets3d = ([p[0]],[p[1]],[p[2]])
# update the lines between body parts
for j,(p,q) in enumerate(zip(p_line,q_line)):
# # lines are an extra level deep for some stupid matplotlib reason
# self.h_skel_list[body_idx][j][0].set_xdata([p[0],q[0]])
# self.h_skel_list[body_idx][j][0].set_ydata([p[1],q[1]])
# self.h_skel_list[body_idx][j][0].set_3d_properties([p[2],q[2]])
# new matplotlilb has changed how this is done:
self.h_skel_list[body_idx][j][0].set_data_3d([p[0],q[0]],[p[1],q[1]],[p[2],q[2]])
def update_skel_fit(self,frame,fit='raw'):
# get the data out frame index
i_frame = frame-self.start_frame
# speed up this list nonsense
if fit =='raw':
body_support_0 = [ d[i_frame,...] for d in self.body_support_0_raw]
body_support_1 = [ d[i_frame,...] for d in self.body_support_1_raw]
s_0 = self.s_0_raw[i_frame]
s_1 = self.s_1_raw[i_frame]
elif fit =='smooth':
body_support_0 = [ d[i_frame,...] for d in self.body_support_0_smooth]
body_support_1 = [ d[i_frame,...] for d in self.body_support_1_smooth]
s_0 = self.s_0_smooth[i_frame]
s_1 = self.s_1_smooth[i_frame]
else:
return
self.update_skeleton(body_support_0,body_idx = 0, has_implant = self.has_implant)
self.update_skeleton(body_support_1,body_idx = 1, has_implant = False)
def add_ellip_fit(self,frame,fit='raw',plot_ellipsoids = True):
# frame index
i_frame = frame-self.start_frame
if fit =='raw':
body_support_0 = [ d[i_frame,...] for d in self.body_support_0_raw]
body_support_1 = [ d[i_frame,...] for d in self.body_support_1_raw]
s_0 = self.s_0_raw[i_frame]
s_1 = self.s_1_raw[i_frame]
elif fit =='smooth':
body_support_0 = [ d[i_frame,...] for d in self.body_support_0_smooth]
body_support_1 = [ d[i_frame,...] for d in self.body_support_1_smooth]
s_0 = self.s_0_smooth[i_frame]
s_1 = self.s_1_smooth[i_frame]
else:
return
self.h_hip_list = [[],[]]
self.plot_ellipsoids(body_support_0,s_0,color = 'k',body_idx = 0,has_implant=self.has_implant)
self.plot_ellipsoids(body_support_1,s_1,color = 'peru',body_idx = 1,has_implant=False)
def add_wireframe_to_axis(self,ax,R_body,c_hip, a_nose,b_nose,a_hip,b_hip,r_impl,style='hip',this_color='k',this_alpha=.4):
# FIRST PLOT THE ELLIPSE, which is the hip
# generate points on a sphere
u, v = np.mgrid[0:2*np.pi:20j, 0:np.pi:10j]
# get the mesh, by using the equation of an ellipsoid
if style == 'hip':
x=np.cos(u)*a_hip
y=np.sin(u)*np.sin(v)*b_hip
z=np.sin(u)*np.cos(v)*b_hip
this_color = 'grey'
if style == 'nose':
x=np.cos(u)*a_nose
y=np.sin(u)*np.sin(v)*b_nose
z=np.sin(u)*np.cos(v)*b_nose
if style == 'impl':
x=np.cos(u)*r_impl
y=np.sin(u)*np.sin(v)*r_impl
z=np.sin(u)*np.cos(v)*r_impl
# pack to matrix of positions
posi = np.vstack((x.ravel(),y.ravel(),z.ravel()))
# apply the rotatation and unpack
# posi_rotated = ((R_body @ (posi.T + c_hip).T ).T + t_body).T
# REMEBRE BODY SUPPORTS ARE [c_hip,c_ass,c_mid,c_nose,c_tip,c_impl,R_body,R_head,R_nose]
posi_rotated = np.einsum('ij,ja->ia',R_body,posi) + c_hip[:,np.newaxis]
x = posi_rotated[0,:]
y = posi_rotated[1,:]
z = posi_rotated[2,:]
# reshape for wireframe
x = np.reshape(x, (u.shape) )
y = np.reshape(y, (u.shape) )
z = np.reshape(z, (u.shape) )
h_hip = ax.plot_wireframe(x, y, z, color=this_color,alpha = this_alpha)
return h_hip
def plot_ellipsoids(self,body_support,s,color = 'k',body_idx = 0,has_implant=False):
# unpack
c_hip,c_ass,c_mid,c_nose,c_tip,c_impl,R_body,R_head,R_nose = body_support
# this is not so elegant, hm hm
_, a_hip_min,a_hip_max,b_hip_min,b_hip_max,a_nose,b_nose,d_nose,x_impl,z_impl,r_impl= self.tracked_behavior['body_constants']
a_hip_delta = a_hip_max - a_hip_min
b_hip_delta = b_hip_max - b_hip_min
a_hip_0 = a_hip_min
b_hip_0 = b_hip_min
a_hip = a_hip_0 + a_hip_delta * s
b_hip = b_hip_0 + b_hip_delta * (1.-s)
d_hip = .75 * a_hip
if has_implant:
RRs,ccs,styles = [R_body,R_nose,R_nose],[c_hip,c_nose,c_impl],['hip','nose','impl']
else:
RRs,ccs,styles = [R_body,R_nose],[c_hip,c_nose],['hip','nose']
for RR,cc,style in zip(RRs,ccs,styles):
h_hip = self.add_wireframe_to_axis(self.ax,RR,
cc,
a_nose,
b_nose,
a_hip,
b_hip,
r_impl,
style=style,this_color=color)
self.h_hip_list[body_idx].append(h_hip)
def update_wireframe_lines(self,h_hip,X,Y,Z):
# h_hip is the handle to the lines3dcollection
# much of the code is taken from the source of the marplotlib wireframe plotting
X, Y, Z = np.broadcast_arrays(X, Y, Z)
rows, cols = Z.shape
rstride = 1
cstride = 1
# We want two sets of lines, one running along the "rows" of
# Z and another set of lines running along the "columns" of Z.
# This transpose will make it easy to obtain the columns.
tX, tY, tZ = np.transpose(X), np.transpose(Y), np.transpose(Z)
if rstride:
rii = list(range(0, rows, rstride))
# Add the last index only if needed
if rows > 0 and rii[-1] != (rows - 1):
rii += [rows-1]
else:
rii = []
if cstride:
cii = list(range(0, cols, cstride))
# Add the last index only if needed
if cols > 0 and cii[-1] != (cols - 1):
cii += [cols-1]
else:
cii = []
xlines = [X[i] for i in rii]
ylines = [Y[i] for i in rii]
zlines = [Z[i] for i in rii]
txlines = [tX[i] for i in cii]
tylines = [tY[i] for i in cii]
tzlines = [tZ[i] for i in cii]
lines = ([list(zip(xl, yl, zl))
for xl, yl, zl in zip(xlines, ylines, zlines)]
+ [list(zip(xl, yl, zl))
for xl, yl, zl in zip(txlines, tylines, tzlines)])
h_hip.set_segments(lines)
def calculate_wireframe_points(self,R_body,c_hip,a_nose,b_nose,a_hip,b_hip,r_impl,style='hip'):
# FIRST PLOT THE ELLIPSE, which is the hip
# generate points on a sphere
u, v = np.mgrid[0:2*np.pi:20j, 0:np.pi:10j]
# get the mesh, by using the equation of an ellipsoid
if style == 'hip':
x=np.cos(u)*a_hip
y=np.sin(u)*np.sin(v)*b_hip
z=np.sin(u)*np.cos(v)*b_hip
if style == 'nose':
x=np.cos(u)*a_nose
y=np.sin(u)*np.sin(v)*b_nose
z=np.sin(u)*np.cos(v)*b_nose
if style == 'impl':
x=np.cos(u)*r_impl
y=np.sin(u)*np.sin(v)*r_impl
z=np.sin(u)*np.cos(v)*r_impl
# pack to matrix of positions
posi = np.vstack((x.ravel(),y.ravel(),z.ravel()))
# apply the rotatation and unpack
# posi_rotated = ((R_body @ (posi.T + c_hip).T ).T + t_body).T
# REMEBRE BODY SUPPORTS ARE [c_hip,c_ass,c_mid,c_nose,c_tip,c_impl,R_body,R_head,R_nose]
posi_rotated = np.einsum('ij,ja->ia',R_body,posi) + c_hip[:,np.newaxis]
x = posi_rotated[0,:]
y = posi_rotated[1,:]
z = posi_rotated[2,:]
# reshape for wireframe
x = np.reshape(x, (u.shape) )
y = np.reshape(y, (u.shape) )
z = np.reshape(z, (u.shape) )
return x,y,z
def update_ellipsoids(self,body_support,s,body_idx = 0, has_implant = False):
# unpack
c_hip,c_ass,c_mid,c_nose,c_tip,c_impl,R_body,R_head,R_nose = body_support
# this is not so elegant, hm hm
# this is STILL not so elegant, hm hm
_, a_hip_min,a_hip_max,b_hip_min,b_hip_max,a_nose,b_nose,d_nose,x_impl,z_impl,r_impl= self.tracked_behavior['body_constants']
a_hip_delta = a_hip_max - a_hip_min
b_hip_delta = b_hip_max - b_hip_min
a_hip_0 = a_hip_min
b_hip_0 = b_hip_min
a_hip = a_hip_0 + a_hip_delta * s
b_hip = b_hip_0 + b_hip_delta * (1.-s)
d_hip = .75 * a_hip
if has_implant:
RRs,ccs,styles = [R_body,R_nose,R_nose],[c_hip,c_nose,c_impl],['hip','nose','impl']
else:
RRs,ccs,styles = [R_body,R_nose],[c_hip,c_nose],['hip','nose']
for jj, (RR,cc,style) in enumerate(zip(RRs,ccs,styles)):
X,Y,Z = self.calculate_wireframe_points(RR,
cc,
a_nose,
b_nose,
a_hip,
b_hip,
r_impl,
style=style)
h_hip = self.h_hip_list[body_idx][jj]
self.update_wireframe_lines(h_hip,X,Y,Z)
def update_ellip_fit(self,frame,fit = 'raw'):
# get the data out frame index
i_frame = frame-self.start_frame
# speed up this list nonsense
if fit =='raw':
body_support_0 = [ d[i_frame,...] for d in self.body_support_0_raw]
body_support_1 = [ d[i_frame,...] for d in self.body_support_1_raw]
s_0 = self.s_0_raw[i_frame]
s_1 = self.s_1_raw[i_frame]
elif fit =='smooth':
body_support_0 = [ d[i_frame,...] for d in self.body_support_0_smooth]
body_support_1 = [ d[i_frame,...] for d in self.body_support_1_smooth]
s_0 = self.s_0_smooth[i_frame]
s_1 = self.s_1_smooth[i_frame]
else:
return
self.update_ellipsoids(body_support_0,s_0,body_idx = 0,has_implant = self.has_implant)
self.update_ellipsoids(body_support_1,s_1,body_idx = 1,has_implant = False)
def unpack_trace(self,body_support,trace_indices,body_idx = 0,what_type=['hip'],color='k'):
c_hip,c_ass,c_mid,c_nose,c_tip,c_impl,R_body,R_head,R_nose = body_support
type_list = np.array(['hip','ass','mid','nose','tip','impl'])
c_list = [c_hip,c_ass,c_mid,c_nose,c_tip,c_impl]
ii_c_list = np.arange(len(type_list))
# TODO make the decay work!
for ttt in what_type:
# this is also not so elegant
selecta = np.arange(len(type_list))[type_list == ttt]
dat = c_list[selecta[0]].squeeze()
X,Y,Z = dat[trace_indices,0],dat[trace_indices,1],dat[trace_indices,2]
h_trace = self.ax.plot(X,Y,Z,lw=2,c=color,alpha = .65)
self.h_trace_list[body_idx][ii_c_list[type_list == ttt][0]] = h_trace
def add_trace(self,frame,trace='raw',trace_length=90,trace_clip = None,decay_factor=.9, type_list = ['nose']):
# get the particle, convert to torch tensor, calculate body supports
i_frame = frame-self.start_frame
# make a holder for the lines
self.h_trace_list = [[None]*5,[None]*5]
if trace_clip is not None:
i_clip = trace_clip-self.start_frame
i_trace_start = np.max([i_clip, i_frame-trace_length])
else:
i_trace_start = np.max([0, i_frame-trace_length])
#print("i_trace_start is {} and i_frame is {}".format(i_trace_start,i_frame))
trace_indices = np.arange(i_trace_start,i_frame)
if trace == 'raw':
self.unpack_trace(self.body_support_0_raw,trace_indices, body_idx = 0,what_type=type_list,color='black')
self.unpack_trace(self.body_support_1_raw,trace_indices, body_idx = 1,what_type=type_list,color='peru')
if trace == 'smooth':
self.unpack_trace(self.body_support_0_smooth,trace_indices, body_idx = 0,what_type=type_list,color='black')
self.unpack_trace(self.body_support_1_smooth,trace_indices, body_idx = 1,what_type=type_list,color='peru')
def update_trace_3dlines(self,body_support,trace_indices,body_idx=0,what_type=['hip']):
c_hip,c_ass,c_mid,c_nose,c_tip,c_impl,R_body,R_head,R_nose = body_support
type_list = np.array(['hip','ass','mid','nose','tip','impl'])
c_list = [c_hip,c_ass,c_mid,c_nose,c_tip,c_impl]
ii_c_list = np.arange(len(type_list))
# TODO make the decay work!
for ttt in what_type:
# this is also not so elegant
selecta = np.arange(len(type_list))[type_list == ttt]
dat = c_list[selecta[0]].squeeze()
X,Y,Z = dat[trace_indices,0],dat[trace_indices,1],dat[trace_indices,2]
# self.h_trace_list[body_idx][ii_c_list[type_list == what_type][0]][0].set_xdata(X)
# self.h_trace_list[body_idx][ii_c_list[type_list == what_type][0]][0].set_ydata(Y)
# self.h_trace_list[body_idx][ii_c_list[type_list == what_type][0]][0].set_3d_properties(Z)
# Ugh matplotlib changed the api, the new way makes much more sense though, so fine..
self.h_trace_list[body_idx][ii_c_list[type_list == what_type][0]][0].set_data_3d(X,Y,Z)
def update_trace_fit(self,frame,trace='raw',trace_length=90,trace_clip = None,decay_factor=.9, type_list = None):
# get the particle, convert to torch tensor, calculate body supports
i_frame = frame-self.start_frame
if trace_clip is not None:
i_clip = trace_clip-self.start_frame
i_trace_start = np.max([i_clip, i_frame-trace_length])
else:
i_trace_start = np.max([0, i_frame-trace_length])
# these are the indices to plot
trace_indices = np.arange(i_trace_start,i_frame)
if trace =='raw':
body_support_0 = self.body_support_0_raw
body_support_1 = self.body_support_1_raw
elif trace =='smooth':
body_support_0 = self.body_support_0_smooth
body_support_1 = self.body_support_1_smooth
else:
return
if len(trace_indices)== 0:
# just skip if there is no trace
return
self.update_trace_3dlines(body_support_0,trace_indices,body_idx=0,what_type = type_list)
self.update_trace_3dlines(body_support_1,trace_indices,body_idx=1,what_type = type_list)
def finish_3d_axis(self,view_style = 'ex', zoom = False, dump = False):
# finish the labeling, plot adjustments, dump and show
ax = self.ax
if self.max_range is not None:
ax.set_xlim(self.mid_x - self.max_range, self.mid_x + self.max_range)
ax.set_ylim(self.mid_y - self.max_range, self.mid_y + self.max_range)
ax.set_zlim(0, 2*self.max_range)
ax.xaxis.set_ticklabels([])
ax.yaxis.set_ticklabels([])
ax.zaxis.set_ticklabels([])
if view_style == 'top':
az = -30
el = 90
if view_style == 'side':
az = -15
el = 9
if view_style == 'mix':
az = 150
el = 50
if view_style == 'ex':
az = -14
el = 46
if view_style == 'ex':
az = -46
el = 23
ax.view_init(elev=el, azim=az)
storage = data_storage()
# In[4]:
play = widgets.Play(
value=0,
min=0,
max=10000,
step=10,
interval=100,
description="Press play",
disabled=False
)
slider = widgets.IntSlider(value=0,
min=0,
max=10000)
def on_value_change(change):
frame = int(change['new'])
storage.update_raw_data( change['new'] )
storage.update_skel_fit( int(change['new']) )
storage.update_ellip_fit( int(change['new']) )
# storage.update_trace_fit( int(change['new']) )
# storage.update_trace_fit(frame)
storage.fig.canvas.draw()
slider.observe(on_value_change, 'value')
widgets.jslink((play, 'value'),(slider, 'value'))
# In[5]:
data_path_textbox = widgets.Text(
value='/media/chrelli/SSD4TB/Data0_backup/recording_20201110-105540/pre_processed_frames.hdf5',
description='Path:'
)
tracking_path_textbox = widgets.Text(
value='/media/chrelli/SSD4TB/Data0_backup/recording_20201110-105540/tracked_behavior_in_progress.pkl',
description='Path:'
)
load_button = widgets.Button(
description='Load data',
)
load_behavior_button = widgets.Button(
description='Load tracking',
)
# In[6]:
@load_button.on_click
def plot_on_click(b):
storage.data_path = data_path_textbox.value
storage.load_jagged()
# and make the plot
storage.add_raw_data( int(play.value) )
storage.finish_3d_axis()
storage.fig.canvas.draw()
# set the min and max time to the behavior!
play.min = 0
play.max = len(storage.jagged_lines)
slider.min = 0
slider.max = len(storage.jagged_lines)
@load_behavior_button.on_click
def plot_on_click2(b):
storage.tracking_path = tracking_path_textbox.value
storage.load_tracking()
storage.add_skel_fit( int(play.value) )
storage.add_ellip_fit( int(play.value) )
# storage.add_trace( int(play.value) )
play.min = storage.tracked_behavior['start_frame']
play.max = storage.tracked_behavior['end_frame']
slider.min = storage.tracked_behavior['start_frame']
slider.max = storage.tracked_behavior['end_frame']
# # set the min and max time to the tracked behavior!
# play.min = 0
# play.max = len(storage.jagged_lines)
storage.fig.canvas.draw()
# In[7]:
frame_textbox = widgets.BoundedIntText(
value=0,
min = 0,
max = 10000,
description='Frame #:'
)
jump_frame_button = widgets.Button(
description='Jump to frame',
)
# In[8]:
@jump_frame_button.on_click
def update_frame(b):
play.value = frame_textbox.value
# storage.update_raw_data( frame_textbox.value)
# storage.fig.canvas.draw()
# In[9]:
fps = 60
time_textbox = widgets.BoundedFloatText(
value=0,
min = 0,
max = 10000/60,
description='Time [s]:'
)
jump_time_button = widgets.Button(
description='Jump to time',
)
# In[10]:
@jump_time_button.on_click
def update_time(b):
play.value = int(time_textbox.value * fps)
# storage.update_raw_data( int(time_textbox.value * fps) )
# storage.fig.canvas.draw()
# In[ ]:
# In[11]:
# widgets.jslink((play, 'value'),(frame_textbox, 'value'))
# In[12]:
raw_ok =widgets.Valid(
value=True,
indent = True,
description='Raw data',
)
track_ok = widgets.Valid(
value=True,
description='Tracking'
)
# In[13]:
check_raw = widgets.Checkbox(
value=True,
description='Display raw data',
disabled=False,
indent=True
)
check_skel = widgets.Checkbox(
value=True,
description='Display skeleton',
disabled=False,
indent=False
)
check_ellip = widgets.Checkbox(
value=True,
description='Display ellipsoids',
disabled=False,
indent=True
)
check_trace = widgets.Checkbox(
value=False,
description='Display trace',
disabled=False,
indent=False
)
# In[14]:
sub10_button = widgets.Button(
description='<< 10',
)
sub5_button = widgets.Button(
description='< 5',
)
add10_button = widgets.Button(
description='10 >>',
)
add5_button = widgets.Button(
description='5 >',
)
@sub10_button.on_click
def update_frame(b):
play.value = play.value - 10
@sub5_button.on_click
def update_frame(b):
play.value = play.value - 5
@add5_button.on_click
def update_frame(b):
play.value = play.value + 5
@add10_button.on_click
def update_frame(b):
play.value = play.value + 10
# In[15]:
from ipywidgets import AppLayout, GridspecLayout
item_layout = widgets.Layout(margin='0 0 10px 10px')
dashboard = VBox([
HBox([data_path_textbox, load_button], layout = item_layout) ,
HBox([tracking_path_textbox, load_behavior_button], layout = item_layout) ,
HBox([track_ok, raw_ok], layout = item_layout) ,
HBox([play, slider], layout = item_layout) ,
HBox([sub10_button,sub5_button,add5_button,add10_button]) ,
HBox([frame_textbox,jump_frame_button], layout = item_layout) ,
HBox([time_textbox,jump_time_button] , layout = item_layout) ,
HBox([check_raw,check_skel]),
HBox([check_ellip,check_trace])
])
output = widgets.Output()
with output:
storage.make_3d_axis()
storage.fig.canvas.toolbar_position = 'bottom'
# In[ ]:
# In[16]:
from ipywidgets import AppLayout
from ipywidgets import HTML, Layout, Dropdown, Output, Textarea, VBox, Label, Text
from ipywidgets import Label, Layout, HBox
from IPython.display import display
# header = HTML("<h1><center><\"(__)~~.. MousePlayer <\"(__)~~....</center></h1>")
# header = HTML("<h1><center><\"(__)~~.. ʍօʊֆɛ քʟǟʏɛʀ <\"(__)~~....</center></h1>")
header = HTML("<h1><center>🐭 ʍօʊֆɛ քʟǟʏɛʀ 🐭</center></h1>")
# board = VBox( [header, HBox([output,dashboard]) ], layout=Layout(justify_content = 'center') )
board = AppLayout(header=None,
left_sidebar=None,
center=output,
right_sidebar=dashboard,
footer=None,
pane_widths=[0,2, 2])
app = VBox( [header, board ], layout=Layout(justify_content = 'center') )
# In[ ]:
# In[17]:
# In[ ]:
# In[ ]:
# In[18]:
# TODO toggles to show trace, ellipsoids, skeleton, raw data,
# Labeles showing if data is loaded or tracking is loaded
# Tracking without the raw data (get the xy limits from the xy data)
# In[ ]:
# In[ ]:
# In[ ]:
# In[ ]:
# In[ ]:
| [
"utils.analysis_tools.particles_to_body_supports_cuda",
"ipywidgets.Valid",
"ipywidgets.Text",
"numpy.einsum",
"ipywidgets.jslink",
"ipywidgets.Output",
"matplotlib.pyplot.figure",
"pickle.load",
"numpy.arange",
"numpy.sin",
"ipywidgets.BoundedIntText",
"ipywidgets.Button",
"numpy.transpose"... | [((24684, 24793), 'ipywidgets.Play', 'widgets.Play', ([], {'value': '(0)', 'min': '(0)', 'max': '(10000)', 'step': '(10)', 'interval': '(100)', 'description': '"""Press play"""', 'disabled': '(False)'}), "(value=0, min=0, max=10000, step=10, interval=100, description=\n 'Press play', disabled=False)\n", (24696, 24793), True, 'import ipywidgets as widgets\n'), ((24828, 24872), 'ipywidgets.IntSlider', 'widgets.IntSlider', ([], {'value': '(0)', 'min': '(0)', 'max': '(10000)'}), '(value=0, min=0, max=10000)\n', (24845, 24872), True, 'import ipywidgets as widgets\n'), ((25256, 25306), 'ipywidgets.jslink', 'widgets.jslink', (["(play, 'value')", "(slider, 'value')"], {}), "((play, 'value'), (slider, 'value'))\n", (25270, 25306), True, 'import ipywidgets as widgets\n'), ((25339, 25478), 'ipywidgets.Text', 'widgets.Text', ([], {'value': '"""/media/chrelli/SSD4TB/Data0_backup/recording_20201110-105540/pre_processed_frames.hdf5"""', 'description': '"""Path:"""'}), "(value=\n '/media/chrelli/SSD4TB/Data0_backup/recording_20201110-105540/pre_processed_frames.hdf5'\n , description='Path:')\n", (25351, 25478), True, 'import ipywidgets as widgets\n'), ((25504, 25650), 'ipywidgets.Text', 'widgets.Text', ([], {'value': '"""/media/chrelli/SSD4TB/Data0_backup/recording_20201110-105540/tracked_behavior_in_progress.pkl"""', 'description': '"""Path:"""'}), "(value=\n '/media/chrelli/SSD4TB/Data0_backup/recording_20201110-105540/tracked_behavior_in_progress.pkl'\n , description='Path:')\n", (25516, 25650), True, 'import ipywidgets as widgets\n'), ((25667, 25706), 'ipywidgets.Button', 'widgets.Button', ([], {'description': '"""Load data"""'}), "(description='Load data')\n", (25681, 25706), True, 'import ipywidgets as widgets\n'), ((25738, 25781), 'ipywidgets.Button', 'widgets.Button', ([], {'description': '"""Load tracking"""'}), "(description='Load tracking')\n", (25752, 25781), True, 'import ipywidgets as widgets\n'), ((26891, 26964), 'ipywidgets.BoundedIntText', 'widgets.BoundedIntText', ([], {'value': '(0)', 'min': '(0)', 'max': '(10000)', 'description': '"""Frame #:"""'}), "(value=0, min=0, max=10000, description='Frame #:')\n", (26913, 26964), True, 'import ipywidgets as widgets\n'), ((27008, 27051), 'ipywidgets.Button', 'widgets.Button', ([], {'description': '"""Jump to frame"""'}), "(description='Jump to frame')\n", (27022, 27051), True, 'import ipywidgets as widgets\n'), ((27281, 27367), 'ipywidgets.BoundedFloatText', 'widgets.BoundedFloatText', ([], {'value': '(0)', 'min': '(0)', 'max': '(10000 / 60)', 'description': '"""Time [s]:"""'}), "(value=0, min=0, max=10000 / 60, description=\n 'Time [s]:')\n", (27305, 27367), True, 'import ipywidgets as widgets\n'), ((27403, 27445), 'ipywidgets.Button', 'widgets.Button', ([], {'description': '"""Jump to time"""'}), "(description='Jump to time')\n", (27417, 27445), True, 'import ipywidgets as widgets\n'), ((27765, 27827), 'ipywidgets.Valid', 'widgets.Valid', ([], {'value': '(True)', 'indent': '(True)', 'description': '"""Raw data"""'}), "(value=True, indent=True, description='Raw data')\n", (27778, 27827), True, 'import ipywidgets as widgets\n'), ((27857, 27906), 'ipywidgets.Valid', 'widgets.Valid', ([], {'value': '(True)', 'description': '"""Tracking"""'}), "(value=True, description='Tracking')\n", (27870, 27906), True, 'import ipywidgets as widgets\n'), ((27945, 28038), 'ipywidgets.Checkbox', 'widgets.Checkbox', ([], {'value': '(True)', 'description': '"""Display raw data"""', 'disabled': '(False)', 'indent': '(True)'}), "(value=True, description='Display raw data', disabled=False,\n indent=True)\n", (27961, 28038), True, 'import ipywidgets as widgets\n'), ((28067, 28161), 'ipywidgets.Checkbox', 'widgets.Checkbox', ([], {'value': '(True)', 'description': '"""Display skeleton"""', 'disabled': '(False)', 'indent': '(False)'}), "(value=True, description='Display skeleton', disabled=False,\n indent=False)\n", (28083, 28161), True, 'import ipywidgets as widgets\n'), ((28191, 28287), 'ipywidgets.Checkbox', 'widgets.Checkbox', ([], {'value': '(True)', 'description': '"""Display ellipsoids"""', 'disabled': '(False)', 'indent': '(True)'}), "(value=True, description='Display ellipsoids', disabled=\n False, indent=True)\n", (28207, 28287), True, 'import ipywidgets as widgets\n'), ((28316, 28408), 'ipywidgets.Checkbox', 'widgets.Checkbox', ([], {'value': '(False)', 'description': '"""Display trace"""', 'disabled': '(False)', 'indent': '(False)'}), "(value=False, description='Display trace', disabled=False,\n indent=False)\n", (28332, 28408), True, 'import ipywidgets as widgets\n'), ((28452, 28487), 'ipywidgets.Button', 'widgets.Button', ([], {'description': '"""<< 10"""'}), "(description='<< 10')\n", (28466, 28487), True, 'import ipywidgets as widgets\n'), ((28510, 28543), 'ipywidgets.Button', 'widgets.Button', ([], {'description': '"""< 5"""'}), "(description='< 5')\n", (28524, 28543), True, 'import ipywidgets as widgets\n'), ((28567, 28602), 'ipywidgets.Button', 'widgets.Button', ([], {'description': '"""10 >>"""'}), "(description='10 >>')\n", (28581, 28602), True, 'import ipywidgets as widgets\n'), ((28625, 28658), 'ipywidgets.Button', 'widgets.Button', ([], {'description': '"""5 >"""'}), "(description='5 >')\n", (28639, 28658), True, 'import ipywidgets as widgets\n'), ((29051, 29089), 'ipywidgets.Layout', 'widgets.Layout', ([], {'margin': '"""0 0 10px 10px"""'}), "(margin='0 0 10px 10px')\n", (29065, 29089), True, 'import ipywidgets as widgets\n'), ((29658, 29674), 'ipywidgets.Output', 'widgets.Output', ([], {}), '()\n', (29672, 29674), True, 'import ipywidgets as widgets\n'), ((30169, 30219), 'ipywidgets.HTML', 'HTML', (['"""<h1><center>🐭 ʍօʊֆɛ քʟǟʏɛʀ 🐭</center></h1>"""'], {}), "('<h1><center>🐭 ʍօʊֆɛ քʟǟʏɛʀ 🐭</center></h1>')\n", (30173, 30219), False, 'from ipywidgets import HTML, Layout, Dropdown, Output, Textarea, VBox, Label, Text\n'), ((30328, 30450), 'ipywidgets.AppLayout', 'AppLayout', ([], {'header': 'None', 'left_sidebar': 'None', 'center': 'output', 'right_sidebar': 'dashboard', 'footer': 'None', 'pane_widths': '[0, 2, 2]'}), '(header=None, left_sidebar=None, center=output, right_sidebar=\n dashboard, footer=None, pane_widths=[0, 2, 2])\n', (30337, 30450), False, 'from ipywidgets import AppLayout\n'), ((2940, 2967), 'torch.transpose', 'torch.transpose', (['part', '(0)', '(1)'], {}), '(part, 0, 1)\n', (2955, 2967), False, 'import torch\n'), ((4168, 4198), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(4.5, 4.5)'}), '(figsize=(4.5, 4.5))\n', (4178, 4198), True, 'import matplotlib.pyplot as plt\n'), ((4465, 4509), 'utils.analysis_tools.unpack_from_jagged', 'unpack_from_jagged', (['self.jagged_lines[frame]'], {}), '(self.jagged_lines[frame])\n', (4483, 4509), False, 'from utils.analysis_tools import unpack_from_jagged\n'), ((5634, 5678), 'utils.analysis_tools.unpack_from_jagged', 'unpack_from_jagged', (['self.jagged_lines[frame]'], {}), '(self.jagged_lines[frame])\n', (5652, 5678), False, 'from utils.analysis_tools import unpack_from_jagged\n'), ((12371, 12393), 'numpy.reshape', 'np.reshape', (['x', 'u.shape'], {}), '(x, u.shape)\n', (12381, 12393), True, 'import numpy as np\n'), ((12409, 12431), 'numpy.reshape', 'np.reshape', (['y', 'u.shape'], {}), '(y, u.shape)\n', (12419, 12431), True, 'import numpy as np\n'), ((12447, 12469), 'numpy.reshape', 'np.reshape', (['z', 'u.shape'], {}), '(z, u.shape)\n', (12457, 12469), True, 'import numpy as np\n'), ((14214, 14242), 'numpy.broadcast_arrays', 'np.broadcast_arrays', (['X', 'Y', 'Z'], {}), '(X, Y, Z)\n', (14233, 14242), True, 'import numpy as np\n'), ((16790, 16812), 'numpy.reshape', 'np.reshape', (['x', 'u.shape'], {}), '(x, u.shape)\n', (16800, 16812), True, 'import numpy as np\n'), ((16828, 16850), 'numpy.reshape', 'np.reshape', (['y', 'u.shape'], {}), '(y, u.shape)\n', (16838, 16850), True, 'import numpy as np\n'), ((16866, 16888), 'numpy.reshape', 'np.reshape', (['z', 'u.shape'], {}), '(z, u.shape)\n', (16876, 16888), True, 'import numpy as np\n'), ((19555, 19609), 'numpy.array', 'np.array', (["['hip', 'ass', 'mid', 'nose', 'tip', 'impl']"], {}), "(['hip', 'ass', 'mid', 'nose', 'tip', 'impl'])\n", (19563, 19609), True, 'import numpy as np\n'), ((20822, 20855), 'numpy.arange', 'np.arange', (['i_trace_start', 'i_frame'], {}), '(i_trace_start, i_frame)\n', (20831, 20855), True, 'import numpy as np\n'), ((21580, 21634), 'numpy.array', 'np.array', (["['hip', 'ass', 'mid', 'nose', 'tip', 'impl']"], {}), "(['hip', 'ass', 'mid', 'nose', 'tip', 'impl'])\n", (21588, 21634), True, 'import numpy as np\n'), ((23062, 23095), 'numpy.arange', 'np.arange', (['i_trace_start', 'i_frame'], {}), '(i_trace_start, i_frame)\n', (23071, 23095), True, 'import numpy as np\n'), ((29114, 29172), 'ipywidgets.HBox', 'HBox', (['[data_path_textbox, load_button]'], {'layout': 'item_layout'}), '([data_path_textbox, load_button], layout=item_layout)\n', (29118, 29172), False, 'from ipywidgets import Label, Layout, HBox\n'), ((29183, 29254), 'ipywidgets.HBox', 'HBox', (['[tracking_path_textbox, load_behavior_button]'], {'layout': 'item_layout'}), '([tracking_path_textbox, load_behavior_button], layout=item_layout)\n', (29187, 29254), False, 'from ipywidgets import Label, Layout, HBox\n'), ((29264, 29308), 'ipywidgets.HBox', 'HBox', (['[track_ok, raw_ok]'], {'layout': 'item_layout'}), '([track_ok, raw_ok], layout=item_layout)\n', (29268, 29308), False, 'from ipywidgets import Label, Layout, HBox\n'), ((29318, 29358), 'ipywidgets.HBox', 'HBox', (['[play, slider]'], {'layout': 'item_layout'}), '([play, slider], layout=item_layout)\n', (29322, 29358), False, 'from ipywidgets import Label, Layout, HBox\n'), ((29368, 29428), 'ipywidgets.HBox', 'HBox', (['[sub10_button, sub5_button, add5_button, add10_button]'], {}), '([sub10_button, sub5_button, add5_button, add10_button])\n', (29372, 29428), False, 'from ipywidgets import Label, Layout, HBox\n'), ((29433, 29493), 'ipywidgets.HBox', 'HBox', (['[frame_textbox, jump_frame_button]'], {'layout': 'item_layout'}), '([frame_textbox, jump_frame_button], layout=item_layout)\n', (29437, 29493), False, 'from ipywidgets import Label, Layout, HBox\n'), ((29502, 29560), 'ipywidgets.HBox', 'HBox', (['[time_textbox, jump_time_button]'], {'layout': 'item_layout'}), '([time_textbox, jump_time_button], layout=item_layout)\n', (29506, 29560), False, 'from ipywidgets import Label, Layout, HBox\n'), ((29570, 29599), 'ipywidgets.HBox', 'HBox', (['[check_raw, check_skel]'], {}), '([check_raw, check_skel])\n', (29574, 29599), False, 'from ipywidgets import Label, Layout, HBox\n'), ((29604, 29636), 'ipywidgets.HBox', 'HBox', (['[check_ellip, check_trace]'], {}), '([check_ellip, check_trace])\n', (29608, 29636), False, 'from ipywidgets import Label, Layout, HBox\n'), ((30540, 30572), 'ipywidgets.Layout', 'Layout', ([], {'justify_content': '"""center"""'}), "(justify_content='center')\n", (30546, 30572), False, 'from ipywidgets import Label, Layout, HBox\n'), ((2010, 2045), 'h5py.File', 'h5py.File', (['self.data_path'], {'mode': '"""r"""'}), "(self.data_path, mode='r')\n", (2019, 2045), False, 'import h5py\n'), ((2460, 2474), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (2471, 2474), False, 'import sys, os, pickle\n'), ((3025, 3083), 'utils.analysis_tools.particles_to_body_supports_cuda', 'particles_to_body_supports_cuda', (['part[:, :9]'], {'implant': '(True)'}), '(part[:, :9], implant=True)\n', (3056, 3083), False, 'from utils.analysis_tools import particles_to_body_supports_cuda\n'), ((3113, 3172), 'utils.analysis_tools.particles_to_body_supports_cuda', 'particles_to_body_supports_cuda', (['part[:, 9:]'], {'implant': '(False)'}), '(part[:, 9:], implant=False)\n', (3144, 3172), False, 'from utils.analysis_tools import particles_to_body_supports_cuda\n'), ((3338, 3397), 'utils.analysis_tools.particles_to_body_supports_cuda', 'particles_to_body_supports_cuda', (['part[:, :8]'], {'implant': '(False)'}), '(part[:, :8], implant=False)\n', (3369, 3397), False, 'from utils.analysis_tools import particles_to_body_supports_cuda\n'), ((3427, 3486), 'utils.analysis_tools.particles_to_body_supports_cuda', 'particles_to_body_supports_cuda', (['part[:, 8:]'], {'implant': '(False)'}), '(part[:, 8:], implant=False)\n', (3458, 3486), False, 'from utils.analysis_tools import particles_to_body_supports_cuda\n'), ((12178, 12214), 'numpy.einsum', 'np.einsum', (['"""ij,ja->ia"""', 'R_body', 'posi'], {}), "('ij,ja->ia', R_body, posi)\n", (12187, 12214), True, 'import numpy as np\n'), ((14541, 14556), 'numpy.transpose', 'np.transpose', (['X'], {}), '(X)\n', (14553, 14556), True, 'import numpy as np\n'), ((14558, 14573), 'numpy.transpose', 'np.transpose', (['Y'], {}), '(Y)\n', (14570, 14573), True, 'import numpy as np\n'), ((14575, 14590), 'numpy.transpose', 'np.transpose', (['Z'], {}), '(Z)\n', (14587, 14590), True, 'import numpy as np\n'), ((16597, 16633), 'numpy.einsum', 'np.einsum', (['"""ij,ja->ia"""', 'R_body', 'posi'], {}), "('ij,ja->ia', R_body, posi)\n", (16606, 16633), True, 'import numpy as np\n'), ((20596, 20636), 'numpy.max', 'np.max', (['[i_clip, i_frame - trace_length]'], {}), '([i_clip, i_frame - trace_length])\n', (20602, 20636), True, 'import numpy as np\n'), ((20677, 20712), 'numpy.max', 'np.max', (['[0, i_frame - trace_length]'], {}), '([0, i_frame - trace_length])\n', (20683, 20712), True, 'import numpy as np\n'), ((22882, 22922), 'numpy.max', 'np.max', (['[i_clip, i_frame - trace_length]'], {}), '([i_clip, i_frame - trace_length])\n', (22888, 22922), True, 'import numpy as np\n'), ((22963, 22998), 'numpy.max', 'np.max', (['[0, i_frame - trace_length]'], {}), '([0, i_frame - trace_length])\n', (22969, 22998), True, 'import numpy as np\n'), ((11437, 11446), 'numpy.cos', 'np.cos', (['u'], {}), '(u)\n', (11443, 11446), True, 'import numpy as np\n'), ((11607, 11616), 'numpy.cos', 'np.cos', (['u'], {}), '(u)\n', (11613, 11616), True, 'import numpy as np\n'), ((11748, 11757), 'numpy.cos', 'np.cos', (['u'], {}), '(u)\n', (11754, 11757), True, 'import numpy as np\n'), ((15888, 15897), 'numpy.cos', 'np.cos', (['u'], {}), '(u)\n', (15894, 15897), True, 'import numpy as np\n'), ((16026, 16035), 'numpy.cos', 'np.cos', (['u'], {}), '(u)\n', (16032, 16035), True, 'import numpy as np\n'), ((16167, 16176), 'numpy.cos', 'np.cos', (['u'], {}), '(u)\n', (16173, 16176), True, 'import numpy as np\n'), ((11467, 11476), 'numpy.sin', 'np.sin', (['u'], {}), '(u)\n', (11473, 11476), True, 'import numpy as np\n'), ((11477, 11486), 'numpy.sin', 'np.sin', (['v'], {}), '(v)\n', (11483, 11486), True, 'import numpy as np\n'), ((11507, 11516), 'numpy.sin', 'np.sin', (['u'], {}), '(u)\n', (11513, 11516), True, 'import numpy as np\n'), ((11517, 11526), 'numpy.cos', 'np.cos', (['v'], {}), '(v)\n', (11523, 11526), True, 'import numpy as np\n'), ((11638, 11647), 'numpy.sin', 'np.sin', (['u'], {}), '(u)\n', (11644, 11647), True, 'import numpy as np\n'), ((11648, 11657), 'numpy.sin', 'np.sin', (['v'], {}), '(v)\n', (11654, 11657), True, 'import numpy as np\n'), ((11679, 11688), 'numpy.sin', 'np.sin', (['u'], {}), '(u)\n', (11685, 11688), True, 'import numpy as np\n'), ((11689, 11698), 'numpy.cos', 'np.cos', (['v'], {}), '(v)\n', (11695, 11698), True, 'import numpy as np\n'), ((11779, 11788), 'numpy.sin', 'np.sin', (['u'], {}), '(u)\n', (11785, 11788), True, 'import numpy as np\n'), ((11789, 11798), 'numpy.sin', 'np.sin', (['v'], {}), '(v)\n', (11795, 11798), True, 'import numpy as np\n'), ((11820, 11829), 'numpy.sin', 'np.sin', (['u'], {}), '(u)\n', (11826, 11829), True, 'import numpy as np\n'), ((11830, 11839), 'numpy.cos', 'np.cos', (['v'], {}), '(v)\n', (11836, 11839), True, 'import numpy as np\n'), ((15918, 15927), 'numpy.sin', 'np.sin', (['u'], {}), '(u)\n', (15924, 15927), True, 'import numpy as np\n'), ((15928, 15937), 'numpy.sin', 'np.sin', (['v'], {}), '(v)\n', (15934, 15937), True, 'import numpy as np\n'), ((15958, 15967), 'numpy.sin', 'np.sin', (['u'], {}), '(u)\n', (15964, 15967), True, 'import numpy as np\n'), ((15968, 15977), 'numpy.cos', 'np.cos', (['v'], {}), '(v)\n', (15974, 15977), True, 'import numpy as np\n'), ((16057, 16066), 'numpy.sin', 'np.sin', (['u'], {}), '(u)\n', (16063, 16066), True, 'import numpy as np\n'), ((16067, 16076), 'numpy.sin', 'np.sin', (['v'], {}), '(v)\n', (16073, 16076), True, 'import numpy as np\n'), ((16098, 16107), 'numpy.sin', 'np.sin', (['u'], {}), '(u)\n', (16104, 16107), True, 'import numpy as np\n'), ((16108, 16117), 'numpy.cos', 'np.cos', (['v'], {}), '(v)\n', (16114, 16117), True, 'import numpy as np\n'), ((16198, 16207), 'numpy.sin', 'np.sin', (['u'], {}), '(u)\n', (16204, 16207), True, 'import numpy as np\n'), ((16208, 16217), 'numpy.sin', 'np.sin', (['v'], {}), '(v)\n', (16214, 16217), True, 'import numpy as np\n'), ((16239, 16248), 'numpy.sin', 'np.sin', (['u'], {}), '(u)\n', (16245, 16248), True, 'import numpy as np\n'), ((16249, 16258), 'numpy.cos', 'np.cos', (['v'], {}), '(v)\n', (16255, 16258), True, 'import numpy as np\n'), ((2887, 2909), 'torch.from_numpy', 'torch.from_numpy', (['part'], {}), '(part)\n', (2903, 2909), False, 'import torch\n')] |
import falcon
import history
from bayesapi.resources import BaseResource
from bayesapi.validation import validate
class AnomaliesResource(BaseResource):
def on_post(self, req, resp):
req_vars = validate(self.api_def, 'post', req)
target_column = req_vars['target-column']
context_columns = req_vars['context-columns']
quoted_tgt_column = '"{}"'.format(target_column)
quoted_ctx_columns = ['"{}"'.format(c) for c in context_columns]
with self.bdb.savepoint():
query = self.queries.find_anomalies(
population = self.cfg.population_name,
target_column = quoted_tgt_column,
context_columns = quoted_ctx_columns
)
self.logger.info(query)
cursor = self.execute(query)
cols = ['row-id','probability']
result = [dict(zip(cols, row)) for row in cursor]
history.save(self.cfg.history,
{'type': 'anomalies',
'query': query,
'result': result,
'target_column': target_column,
'context_columns': context_columns})
history.save(self.cfg.history,
{ 'result': result },
"anomaly" )
resp.media = result
resp.status = falcon.HTTP_200
| [
"bayesapi.validation.validate",
"history.save"
] | [((209, 244), 'bayesapi.validation.validate', 'validate', (['self.api_def', '"""post"""', 'req'], {}), "(self.api_def, 'post', req)\n", (217, 244), False, 'from bayesapi.validation import validate\n'), ((937, 1100), 'history.save', 'history.save', (['self.cfg.history', "{'type': 'anomalies', 'query': query, 'result': result, 'target_column':\n target_column, 'context_columns': context_columns}"], {}), "(self.cfg.history, {'type': 'anomalies', 'query': query,\n 'result': result, 'target_column': target_column, 'context_columns':\n context_columns})\n", (949, 1100), False, 'import history\n'), ((1235, 1296), 'history.save', 'history.save', (['self.cfg.history', "{'result': result}", '"""anomaly"""'], {}), "(self.cfg.history, {'result': result}, 'anomaly')\n", (1247, 1296), False, 'import history\n')] |
import json
import numpy as np
import os
home = os.getcwd() + "/"
with open(home + "averaged_distributions/105_charges_distributions_errors.json") as file:
data = json.load(file)
x = list(np.array(range(4, 38)) * 2.5e-10)
permittivities = [1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 50, 75, 100, 2, 65, 85]
os.chdir(home + "charges_105")
for perm in permittivities:
os.chdir(home + "charges_105/permittivity_{}".format(perm))
y = data["distribution_{}".format(perm)][3:]
yerr = data["standard_errors_{}".format(perm)][3:]
input = {"x": x, "y" : y, "yerr":yerr}
with open(home + "charges_105/permittivity_{}/inputs.json".format(perm), 'w') as d:
json.dump(input,d)
os.chdir(home + "charges_105")
| [
"os.getcwd",
"json.dump",
"json.load",
"os.chdir"
] | [((307, 337), 'os.chdir', 'os.chdir', (["(home + 'charges_105')"], {}), "(home + 'charges_105')\n", (315, 337), False, 'import os\n'), ((48, 59), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (57, 59), False, 'import os\n'), ((166, 181), 'json.load', 'json.load', (['file'], {}), '(file)\n', (175, 181), False, 'import json\n'), ((701, 731), 'os.chdir', 'os.chdir', (["(home + 'charges_105')"], {}), "(home + 'charges_105')\n", (709, 731), False, 'import os\n'), ((677, 696), 'json.dump', 'json.dump', (['input', 'd'], {}), '(input, d)\n', (686, 696), False, 'import json\n')] |
# @file signtool_signer.py
# This module contains the abstracted signing interface for Windows Signtool.
# This interface abstraction takes in the signature_options and signer_options
# dictionaries that are used by capsule_tool and capsule_helper.
#
# Will attempt to locate a valid installation of Windows Signtool using the
# utility_functions provided by edk2toollib.
#
##
# Copyright (C) Microsoft Corporation
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
import os
import tempfile
import warnings
from edk2toollib.windows import locate_tools
from edk2toollib.utility_functions import RunCmd
GLOBAL_SIGNTOOL_PATH = None
SUPPORTED_SIGNATURE_TYPE_OPTIONS = {
'pkcs7': {'detachedSignedData', 'embedded', 'pkcs7DetachedSignedData'}
}
def get_signtool_path():
'''
helper function to locate a valid installation of Windows Signtool. Will
attempt to reuse a previously located version, since this call can be
lengthy
'''
global GLOBAL_SIGNTOOL_PATH
if GLOBAL_SIGNTOOL_PATH is None:
GLOBAL_SIGNTOOL_PATH = locate_tools.FindToolInWinSdk('signtool.exe')
return GLOBAL_SIGNTOOL_PATH
def sign(data: bytes, signature_options: dict, signer_options: dict) -> bytes:
'''
primary signing interface. Takes n the signature_options and signer_options
dictionaries that are used by capsule_tool and capsule_helper
'''
# NOTE: Currently, we only support the necessary options for capsules & Windows Firmware Policies
# The following _if_ clause handles the deprecated signature_option 'sign_alg' for backwards compatibility
# when the deprecated option is supplied, this code adds the new, required options based on prior code behavior
if 'sign_alg' in signature_options:
warnings.warn('Signature_option "sign_alg" is deprecated, use "type"', DeprecationWarning)
if signature_options['sign_alg'] == 'pkcs12':
# map legacy behavior to new options and backwards-compatible values
signature_options['type'] = 'pkcs7'
signature_options['type_options'] = {'detachedSignedData'}
signature_options['encoding'] = 'DER'
signer_options['key_file_format'] = 'pkcs12'
else:
raise ValueError(f"Unsupported signature algorithm: {signature_options['sign_alg']}!")
if signature_options['type'] != 'pkcs7':
raise ValueError(f"Unsupported signature type: {signature_options['type']}!")
for opt in signature_options['type_options']:
if opt not in SUPPORTED_SIGNATURE_TYPE_OPTIONS[signature_options['type']]:
raise ValueError(f"Unsupported type option: {opt}! Ensure you have provied a set")
mutually_exclusive_options = ('embedded', 'detachedSignedData', 'pkcs7DetachedSignedData')
option_found = None
for option in mutually_exclusive_options:
if option in signature_options['type_options']:
if option_found is None:
option_found = option
else:
raise ValueError("type_options '%s' and '%s' are mutually exclusive" % (option_found, option))
if signature_options['encoding'] != 'DER':
raise ValueError(f"Unsupported signature encoding: {signature_options['type']}!")
if signature_options['hash_alg'] != 'sha256':
raise ValueError(f"Unsupported hashing algorithm: {signature_options['hash_alg']}!")
if 'key_file' not in signer_options:
raise ValueError("Must supply a key_file in signer_options for Signtool!")
if signer_options['key_file_format'] != 'pkcs12':
raise ValueError(f"Unsupported key file format: {signer_options['key_file_format']}!")
# Set up a temp directory to hold input and output files.
temp_folder = tempfile.mkdtemp()
in_file_path = os.path.join(temp_folder, "data_to_sign.bin")
# Create the input file for Signtool.
in_file = open(in_file_path, 'wb')
in_file.write(data)
in_file.close()
# Start building the parameters for the call.
signtool_params = ['sign']
signtool_params += ['/fd', signature_options['hash_alg']]
if 'detachedSignedData' in signature_options['type_options']:
signtool_params += ['/p7ce', 'DetachedSignedData']
elif 'pkcs7DetachedSignedData' in signature_options['type_options']:
signtool_params += ['/p7ce', 'PKCS7DetachedSignedData']
elif 'embedded' in signature_options['type_options']:
signtool_params += ['/p7ce', 'Embedded']
else:
raise ValueError("For pkcs7, type_options must include either embedded or detachedSignedData")
signtool_params += ['/p7', f'"{temp_folder}"']
signtool_params += ['/f', f"\"{signer_options['key_file']}\""]
if 'oid' in signer_options:
signtool_params += ['/p7co', signer_options['oid']]
if 'eku' in signer_options:
signtool_params += ['/u', signer_options['eku']]
if 'key_pass' in signer_options:
signtool_params += ['/p', signer_options['key_pass']]
# Add basic options.
signtool_params += ['/debug', '/v', f'"{in_file_path}"']
# Make the call to Signtool.
ret = RunCmd(get_signtool_path(), " ".join(signtool_params))
if ret != 0:
raise RuntimeError(f"Signtool.exe returned with error: {ret}!")
# Load the data from the output file and return it.
out_file_path = os.path.join(temp_folder, "data_to_sign.bin.p7")
out_file = open(out_file_path, 'rb')
out_data = out_file.read()
out_file.close()
return out_data
def sign_in_place(sign_file_path, signature_options, signer_options):
'''
alternate module-specific signing interface to support particular signatures associated
with Windows capsule files (e.g. CAT files). Takes n the signature_options and signer_options
dictionaries that are used by capsule_tool and capsule_helper
'''
# NOTE: Currently, we only support the necessary algorithms for capsules.
if signature_options['sign_alg'] != 'pkcs12':
raise ValueError(f"Unsupported signature algorithm: {signature_options['sign_alg']}!")
if signature_options['hash_alg'] != 'sha256':
raise ValueError(f"Unsupported hashing algorithm: {signature_options['hash_alg']}!")
if 'key_file' not in signer_options:
raise ValueError("Must supply a key_file in signer_options for Signtool!")
# Start building the parameters for the call.
signtool_params = ['sign', '/a']
signtool_params += ['/fd', signature_options['hash_alg']]
signtool_params += ['/f', f"\"{signer_options['key_file']}\""]
# if 'oid' in signer_options:
# signtool_params += ['/p7co', signer_options['oid']]
# if 'eku' in signer_options:
# signtool_params += ['/u', signer_options['eku']]
if 'key_pass' in signer_options:
signtool_params += ['/p', signer_options['key_pass']]
# Add basic options.
signtool_params += ['/debug', '/v', f'"{sign_file_path}"']
# Make the call to Signtool.
ret = RunCmd(get_signtool_path(), " ".join(signtool_params))
if ret != 0:
raise RuntimeError(f"Signtool.exe returned with error: {ret}!")
| [
"warnings.warn",
"tempfile.mkdtemp",
"os.path.join",
"edk2toollib.windows.locate_tools.FindToolInWinSdk"
] | [((3744, 3762), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (3760, 3762), False, 'import tempfile\n'), ((3782, 3827), 'os.path.join', 'os.path.join', (['temp_folder', '"""data_to_sign.bin"""'], {}), "(temp_folder, 'data_to_sign.bin')\n", (3794, 3827), False, 'import os\n'), ((5329, 5377), 'os.path.join', 'os.path.join', (['temp_folder', '"""data_to_sign.bin.p7"""'], {}), "(temp_folder, 'data_to_sign.bin.p7')\n", (5341, 5377), False, 'import os\n'), ((1056, 1101), 'edk2toollib.windows.locate_tools.FindToolInWinSdk', 'locate_tools.FindToolInWinSdk', (['"""signtool.exe"""'], {}), "('signtool.exe')\n", (1085, 1101), False, 'from edk2toollib.windows import locate_tools\n'), ((1757, 1851), 'warnings.warn', 'warnings.warn', (['"""Signature_option "sign_alg" is deprecated, use "type\\""""', 'DeprecationWarning'], {}), '(\'Signature_option "sign_alg" is deprecated, use "type"\',\n DeprecationWarning)\n', (1770, 1851), False, 'import warnings\n')] |
import argo_workflow_tools.models.io.argoproj.workflow.v1alpha1 as argo
from argo_workflow_tools import dsl, WorkflowTemplate
@dsl.Task(image="python:3.10")
def say_hello(name: str):
return f"hello {name}"
@dsl.DAG()
def command_hello(name):
return say_hello(name)
def test_workflow_template():
workflow = WorkflowTemplate(
name="hello-world", entrypoint=command_hello, arguments={"name": "Brian"}
)
model = workflow.to_model()
assert model.kind == "WorkflowTemplate"
def test_workflow_template_labels_and_annotations():
workflow = WorkflowTemplate(
name="hello-world", entrypoint=command_hello, arguments={"name": "Brian"},
labels={'key1': 'val1'}, workflow_labels={'key2': 'val2'},
annotations={'key1': 'val1'}, workflow_annotations={'key2': 'val2'},
)
model = workflow.to_model()
assert model.metadata.labels['key1'] == 'val1'
assert model.spec.workflow_metadata.labels['key2'] == 'val2'
assert model.metadata.annotations['key1'] == 'val1'
assert model.spec.workflow_metadata.annotations['key2'] == 'val2'
def test_workflow_template_arguments():
workflow = WorkflowTemplate(
name="hello-world",
entrypoint=command_hello,
arguments=[argo.Parameter(name="name", value="Brian", enum=["Brian", "Joe"])],
)
model = workflow.to_model()
assert model.spec.arguments.parameters[0].enum == ["Brian", "Joe"]
| [
"argo_workflow_tools.models.io.argoproj.workflow.v1alpha1.Parameter",
"argo_workflow_tools.WorkflowTemplate",
"argo_workflow_tools.dsl.Task",
"argo_workflow_tools.dsl.DAG"
] | [((129, 158), 'argo_workflow_tools.dsl.Task', 'dsl.Task', ([], {'image': '"""python:3.10"""'}), "(image='python:3.10')\n", (137, 158), False, 'from argo_workflow_tools import dsl, WorkflowTemplate\n'), ((215, 224), 'argo_workflow_tools.dsl.DAG', 'dsl.DAG', ([], {}), '()\n', (222, 224), False, 'from argo_workflow_tools import dsl, WorkflowTemplate\n'), ((324, 420), 'argo_workflow_tools.WorkflowTemplate', 'WorkflowTemplate', ([], {'name': '"""hello-world"""', 'entrypoint': 'command_hello', 'arguments': "{'name': 'Brian'}"}), "(name='hello-world', entrypoint=command_hello, arguments={\n 'name': 'Brian'})\n", (340, 420), False, 'from argo_workflow_tools import dsl, WorkflowTemplate\n'), ((576, 808), 'argo_workflow_tools.WorkflowTemplate', 'WorkflowTemplate', ([], {'name': '"""hello-world"""', 'entrypoint': 'command_hello', 'arguments': "{'name': 'Brian'}", 'labels': "{'key1': 'val1'}", 'workflow_labels': "{'key2': 'val2'}", 'annotations': "{'key1': 'val1'}", 'workflow_annotations': "{'key2': 'val2'}"}), "(name='hello-world', entrypoint=command_hello, arguments={\n 'name': 'Brian'}, labels={'key1': 'val1'}, workflow_labels={'key2':\n 'val2'}, annotations={'key1': 'val1'}, workflow_annotations={'key2':\n 'val2'})\n", (592, 808), False, 'from argo_workflow_tools import dsl, WorkflowTemplate\n'), ((1257, 1322), 'argo_workflow_tools.models.io.argoproj.workflow.v1alpha1.Parameter', 'argo.Parameter', ([], {'name': '"""name"""', 'value': '"""Brian"""', 'enum': "['Brian', 'Joe']"}), "(name='name', value='Brian', enum=['Brian', 'Joe'])\n", (1271, 1322), True, 'import argo_workflow_tools.models.io.argoproj.workflow.v1alpha1 as argo\n')] |
from _ctypes import ArgumentError
import zahlwort2num as w2n
import sys
def main():
if(sys.argv[1]):
print(w2n.convert(sys.argv[1]))
else:
raise ArgumentError('No parameter given!') | [
"_ctypes.ArgumentError",
"zahlwort2num.convert"
] | [((171, 207), '_ctypes.ArgumentError', 'ArgumentError', (['"""No parameter given!"""'], {}), "('No parameter given!')\n", (184, 207), False, 'from _ctypes import ArgumentError\n'), ((121, 145), 'zahlwort2num.convert', 'w2n.convert', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (132, 145), True, 'import zahlwort2num as w2n\n')] |
import smart_imports
smart_imports.all()
class AccountError(utils_exceptions.TheTaleError):
MSG = 'account error'
class UnkwnownAchievementTypeError(AccountError):
MSG = 'unknown achievement type: %(achievement_type)r'
class EmailAndPasswordError(AccountError):
MSG = 'email & password must be specified or not specified together'
class BotIsFastError(AccountError):
MSG = 'can not cant fast account for bot'
class ChangeCredentialsError(AccountError):
MSG = 'change credentials error'
class MailNotSpecifiedForFastAccountError(ChangeCredentialsError):
MSG = 'new_email must be specified for fast account'
class PasswordNotSpecifiedForFastAccountError(ChangeCredentialsError):
MSG = 'password must be specified for fast account'
class NickNotSpecifiedForFastAccountError(ChangeCredentialsError):
MSG = 'nick must be specified for fast account'
class NewEmailNotSpecifiedError(ChangeCredentialsError):
MSG = 'email not specified'
| [
"smart_imports.all"
] | [((23, 42), 'smart_imports.all', 'smart_imports.all', ([], {}), '()\n', (40, 42), False, 'import smart_imports\n')] |
from PIL import Image # importing packages
import face_recognition
image = face_recognition.load_image_file('group.jpg') # reading group photo
face_locations = face_recognition.face_locations(image)
for face_location in face_locations:
top, right, bottom, left = face_location
face_image = image[top:bottom, left:right]
pil_image = Image.fromarray(face_image)
#pil_image.show()
pil_image.save(f'faces/{top}.jpg') # to save faces
| [
"PIL.Image.fromarray",
"face_recognition.face_locations",
"face_recognition.load_image_file"
] | [((82, 127), 'face_recognition.load_image_file', 'face_recognition.load_image_file', (['"""group.jpg"""'], {}), "('group.jpg')\n", (114, 127), False, 'import face_recognition\n'), ((169, 207), 'face_recognition.face_locations', 'face_recognition.face_locations', (['image'], {}), '(image)\n', (200, 207), False, 'import face_recognition\n'), ((355, 382), 'PIL.Image.fromarray', 'Image.fromarray', (['face_image'], {}), '(face_image)\n', (370, 382), False, 'from PIL import Image\n')] |
# Generated by Django 3.2.7 on 2021-10-12 09:20
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('goods', '0006_auto_20211012_0812'),
]
operations = [
migrations.AlterUniqueTogether(
name='pharmproduct',
unique_together={('trade_name', 'maker', 'original_packing', 'dosage_packing')},
),
]
| [
"django.db.migrations.AlterUniqueTogether"
] | [((225, 362), 'django.db.migrations.AlterUniqueTogether', 'migrations.AlterUniqueTogether', ([], {'name': '"""pharmproduct"""', 'unique_together': "{('trade_name', 'maker', 'original_packing', 'dosage_packing')}"}), "(name='pharmproduct', unique_together={(\n 'trade_name', 'maker', 'original_packing', 'dosage_packing')})\n", (255, 362), False, 'from django.db import migrations\n')] |
"""Low level module for IgBLAST api calls
You probably only want to interact with this module through airr as the database files are extremely tricky to get right
"""
from __future__ import annotations
import glob
import logging
import os
import subprocess
import tempfile
import warnings
import semantic_version
from multiprocessing import cpu_count
from pathlib import Path
from typing import Any, List, Union
# Third party
import pandas as pd
# package/module level
from sadie.utility.util import is_tool
from sadie.airr.airrtable.constants import IGBLAST_AIRR
from sadie.airr.exceptions import (
BadIgBLASTArgument,
BadIgBLASTExe,
BadIgDATA,
MissingIgBLASTArgument,
EmtpyFileError,
IgBLASTRunTimeError,
)
# get logger in global scope
logger = logging.getLogger("IgBLAST")
def ensure_prefix_to(path: Union[str, Path]) -> Union[Path, bool]:
"""Ensure that the blast db is actually a blast like database
The problem is that a blast_db takes in a prefix file
ex. /path/to/blast/human_V
which is a file or path that does not actually exists, but blasts uses it as a file glob to match
/path/to/blast/human_V.nod
/path/to/blast/human_V.nsq
/path/to/blast/human_V.fasta
because of this, we don't have validated file path but an actual file path glob. This method validates that the file glob returns blast like files
Parameters
----------
path : str
path glob
Returns
-------
return : Union[str, bool]
returns Path or False if not a file glob
"""
# convert Path to str
path = str(path)
directory_path = os.path.dirname(path)
if not os.path.exists(directory_path):
return False
# get abs path to directory
directory_path = os.path.abspath(directory_path)
# base name
basename = os.path.basename(path)
glob_path = os.path.join(directory_path, basename) + "*"
# make sure that there are things that match this queyr
if not glob.glob(glob_path):
return False
return Path(os.path.join(directory_path, basename))
class IgBLASTArgument:
"""
A class for handling all IgBLAST Arguments
"""
def __init__(self, name: str, arg_key: str, arg_value: Union[str, int, bool, Path], required: bool):
"""IgBLASTArgument Class constructor
Parameters
----------
name : str
the internal name for the computer
arg_key : str
the argument key, ex. -germline_db_V
arg_value : Union[str, int, bool]
the value for the argument /path/germline/db/V
required : bool
is the argument required
"""
self.name = name
self.key = arg_key
self.value = arg_value
self.required = required
@property
def name(self) -> str:
"""
An internal name for the argument
"""
return self._name
@name.setter
def name(self, n: str) -> None:
self._name = n
@property
def key(self) -> str:
"""The blast command key argument
ex. '-germline_db_v
Returns
-------
str
blast command key
"""
return self._key
@key.setter
def key(self, k: str) -> None:
self._key = k
@property
def value(self) -> Union[str, int, bool, Path]:
"""Return the value of the argument
Returns
-------
Union[str,int,bool]
ex. /path/to/database
"""
return self._value
@value.setter
def value(self, v: Union[str, int, bool]) -> None:
self._value = v
@property
def required(self) -> bool:
"""Returns if the argument is required
Returns
-------
bool
if argument is required
"""
return self._required
@required.setter
def required(self, r: bool) -> None:
self._required = r
def get_formatted_blast_arg(self) -> List[str]:
"""Return the blast formatted argument as a list
Returns
-------
Union[List[str],List[str,str]]
Either returns a single argument ['-arg'] for bool args or key value arguments ['-arg', 'value']
"""
# If value is a bool, we return the key
if isinstance(self.value, bool) and self.value:
return ["-" + self.key]
else:
# If its not a bool, we check if it has been set
if self.value:
return ["-" + str(self.key), str(self.value)]
return [] # return an empty str if its been set
def __str__(self) -> str:
return "{}-{}".format(self.name, self.key)
ArgumentType = Union[IgBLASTArgument, int, str, Path]
class IgBLASTN:
"""IgBLASTN
IgBLASTN class. A tool for immunoglobulin (IG) and T cell receptor (TR) V domain sequences from nucletodies.
This is a lower level class and you should probably use sadie.airr to interact
Examples
--------
>>> ig_blast = igblast.IgBLASTN('igblastn')
>>> germline_ref = "reference/germlines/"
>>> db_ref = "reference/germlines/blastdb/Ig"
>>> aux_path = "reference/germlines/aux_data)
# Set data
>>> ig_blast.igdata = germline_ref
>>> query = "fasta_inputs/PG9_H.fasta"
>>> ig_blast.germline_db_v = os.path.join(db_ref, "human/human_V")
>>> ig_blast.germline_db_d = os.path.join(db_ref, "human/human_D")
>>> ig_blast.germline_db_j = os.path.join(db_ref, "human/human_J")
>>> ig_blast.aux_path = os.path.join(aux_path, "human_gl.aux")
>>> ig_blast.organism = "human"
>>> csv_dataframe = ig_blast.run(query)
"""
# Only allow these attributes
__slots__ = [
"_executable",
"_version",
"_min_d_match",
"_num_v",
"_num_d",
"_num_j",
"_outfmt",
"_receptor",
"_word_size",
"_nomenclature",
"_gap_open",
"_gap_extend",
"_num_threads",
"_extend_5",
"_extend_3",
"_j_penalty",
"_v_penalty",
"_d_penalty",
"_organism",
"_germline_db_v",
"_germline_db_d",
"_germline_db_j",
"_aux_path",
"_igdata",
"_temp_dir",
"_allow_vdj_overlap",
]
def __init__(self, executable: Union[Path, str] = "igblastn"):
"""IgBLASTN with a query. Set everything up with a setter"""
# set the executable dynamically
self.executable = Path(executable)
self._version = self._get_version()
# setup all the default values if we don't add them
self.min_d_match = 5
self.num_v = 3
self.num_d = 3
self.num_j = 3
self.outfmt = 19
self.receptor = "Ig"
self.word_size = 5
self.nomenclature = "imgt"
self.gap_open = 5
self.gap_extend = 2
self.num_threads = cpu_count()
self.extend_5 = True
self.extend_3 = True
self.j_penalty = -2
self.v_penalty = -1
self.d_penalty = -1
self.allow_vdj_overlap = False
# Make these blank, if they are not set by the caller, then we will complain during runtime. They must be set dynamically
self._organism = IgBLASTArgument("organism", "organism", "", True)
self._germline_db_v = IgBLASTArgument("germline_db_v", "germline_db_V", "", True)
self._germline_db_d = IgBLASTArgument("germline_db_d", "germline_db_D", "", True)
self._germline_db_j = IgBLASTArgument("germline_db_j", "germline_db_J", "", True)
self._aux_path = IgBLASTArgument("aux_path", "auxiliary_data", "", True)
# Igdata is not an official blast argument, it is an enviroment
self._igdata = Path(".")
self.temp_dir = Path(".")
def _get_version(self) -> semantic_version.Version:
"""Private method to parse igblast -version and get semantic_version
Returns
-------
semantic_version.Version
the igblast version
"""
process = subprocess.run([self.executable, "-version"], capture_output=True)
stdout = process.stdout.decode("utf-8")
if process.stderr:
logger.error(
f"{self.executable}, has no returned and error when checking version,. Tried igblastn -version: {process.stderr.decode('utf-8')}"
)
raise BadIgBLASTExe(self.executable, process.stderr.decode("utf-8"))
version = stdout.split("\n")[0].split(":")[-1].strip()
try:
version = semantic_version.Version(version)
except ValueError:
raise BadIgBLASTExe(self.executable, f"semantic version can't parse {version}")
return version
@property
def version(self) -> semantic_version.Version:
return self._version
@property
def executable(self) -> Path:
"""The igblastn executable
Returns
-------
Path
igblastn path
"""
return self._executable
@executable.setter
def executable(self, exe: Path) -> None:
if isinstance(exe, str):
exe = Path(exe)
self._executable = exe
@property
def temp_dir(self) -> Path:
"""The path to the tempdata directory for spliting igblast
Returns
-------
Path
A valid temporary directory path
"""
return self._temp_dir
@temp_dir.setter
def temp_dir(self, data: Union[Path, str]) -> None:
if isinstance(data, str):
data = Path(data)
self._temp_dir = data.absolute()
if not os.access(self._temp_dir, os.W_OK):
raise IOError(self._temp_dir, "Unable to write to temp dir")
@property
def igdata(self) -> Path:
"""The path to IGDATA which contains the internal_data needed to make a recomdination
Returns
-------
Path
A valid IGDATA path
"""
return Path(self._igdata)
@igdata.setter
def igdata(self, data: Union[Path, str]) -> None:
if isinstance(data, str):
data = Path(data)
if not data.exists() or not data.is_dir():
raise BadIgDATA(data)
self._igdata = Path(data.absolute())
@property
def min_d_match(self) -> ArgumentType:
"""Required minimal consecutive nucleotide base matches for D genes
Returns
-------
IgBLASTArgument
"""
return self._min_d_match
@min_d_match.setter
def min_d_match(self, d: int) -> None:
if not isinstance(d, int) and d < 5:
raise BadIgBLASTArgument(d, ">5")
self._min_d_match = IgBLASTArgument("min_d_match", "min_D_match", d, False)
@property
def num_v(self) -> ArgumentType:
"""
Number of Germline sequences to show alignments for
Returns
-------
IgBLASTArgument
"""
return self._num_v
@num_v.setter
def num_v(self, v: int) -> None:
if not isinstance(v, int):
raise BadIgBLASTArgument(v, int)
self._num_v = IgBLASTArgument("num_v", "num_alignments_V", v, False)
@property
def num_d(self) -> ArgumentType:
"""
Number of Germline sequences to show alignments for D gene
Returns
-------
IgBLASTArgument
"""
return self._num_d
@num_d.setter
def num_d(self, d: int) -> None:
if not isinstance(d, int):
raise BadIgBLASTArgument(d, int)
self._num_d = IgBLASTArgument("num_d", "num_alignments_D", d, False)
@property
def num_j(self) -> ArgumentType:
"""
Number of Germline sequences to show alignments for J gene
Returns
-------
IgBLASTArgument
"""
return self._num_j
@num_j.setter
def num_j(self, j: int) -> None:
if not isinstance(j, int):
raise BadIgBLASTArgument(j, int)
self._num_j = IgBLASTArgument("num_j", "num_alignments_J", j, False)
@property
def organism(self) -> ArgumentType:
"""The organism for your query sequence.
Returns
-------
IgBLASTArgument
"""
return self._organism
@organism.setter
def organism(self, o: str) -> None:
"""Organism
Parameters
----------
o : str
an organism string
Raises
------
BadIgBLASTArgument
if igblast is not a str
"""
# I don't want to hardcode in the organisms here.
# I will handle that logic at a higher level,
# this is because blast has no preset organims and it's all about the v,d,j blast paths which are set dynamically
if not isinstance(o, str):
raise BadIgBLASTArgument(o, str)
self._organism = IgBLASTArgument("organism", "organism", o, True)
@property
def outfmt(self) -> ArgumentType:
"""alignment view options:
3 = Flat query-anchored, show identities,
4 = Flat query-anchored, no identities,
7 = Tabular with comment lines
19 = Rearrangement summary report (AIRR format)
Returns
-------
IgBLASTArgument
"""
return self._outfmt
@outfmt.setter
def outfmt(self, fmt: int) -> None:
# only accept 19 for now
if fmt != 19:
raise BadIgBLASTArgument(fmt, 19)
self._outfmt = IgBLASTArgument("outfmt", "outfmt", fmt, True)
@property
def receptor(self) -> ArgumentType:
"""
Specify Ig or T cell receptor sequence
Returns
-------
IgBLASTArgument
"""
return self._receptor
@receptor.setter
def receptor(self, r: str) -> None:
if not isinstance(r, str) and not (r in ["Ig", "TCR"]):
raise BadIgBLASTArgument(r, ["Ig", "TCR"])
self._receptor = IgBLASTArgument("receptor", "ig_seqtype", r, True)
@property
def nomenclature(self) -> ArgumentType:
"""Domain system to be used for segment annotation
Returns
-------
IgBLASTArgument
"""
return self._nomenclature
@nomenclature.setter
def nomenclature(self, system: str) -> None:
if system.lower() not in ["imgt", "kabat"]:
raise BadIgBLASTArgument(system, "['imgt','kaba']")
self._nomenclature = IgBLASTArgument("nomenclature", "domain_system", system, True)
@property
def aux_path(self) -> ArgumentType:
"""Auxilary data path. This is needed to lookup the J genes and tell them when the CDR3 stops.
Returns
-------
IgBLASTArgument
"""
return self._aux_path
@aux_path.setter
def aux_path(self, aux_path: Path | str) -> None:
if isinstance(aux_path, str):
aux_path = Path(aux_path)
if not aux_path.exists():
raise BadIgBLASTArgument(aux_path, "valid path to Auxilary database")
self._aux_path = IgBLASTArgument("aux_path", "auxiliary_data", aux_path.absolute(), True)
@property
def germline_db_v(self) -> ArgumentType:
"""Path to V gene database prefix
Returns
-------
IgBLASTArgument
"""
return self._germline_db_v
@germline_db_v.setter
def germline_db_v(self, path: str | Path) -> None:
abs_path = ensure_prefix_to(path)
if not abs_path:
raise BadIgBLASTArgument(path, "Valid path to V Database")
self._germline_db_v = IgBLASTArgument("germline_db_v", "germline_db_V", path, True)
@property
def germline_db_d(self) -> ArgumentType:
"""Path to D gene database prefix
Returns
-------
IgBLASTArgument
"""
return self._germline_db_d
@germline_db_d.setter
def germline_db_d(self, path: str | Path) -> None:
abs_path = ensure_prefix_to(path)
if not abs_path:
warnings.warn(f"{path} is not found, No D gene segment", UserWarning)
# raise BadIgBLASTArgument(path, "Valid path to D Database")
self._germline_db_d = IgBLASTArgument("germline_db_d", "germline_db_D", "", False)
else:
self._germline_db_d = IgBLASTArgument("germline_db_d", "germline_db_D", path, True)
@property
def germline_db_j(self) -> ArgumentType:
"""Path to J gene database prefix
Returns
-------
IgBLASTArgument
"""
return self._germline_db_j
@germline_db_j.setter
def germline_db_j(self, path: str | Path) -> None:
abs_path = ensure_prefix_to(path)
if not abs_path:
raise BadIgBLASTArgument(path, "Valid path to J Database")
self._germline_db_j = IgBLASTArgument("germline_db_j", "germline_db_J", path, True)
@property
def word_size(self) -> ArgumentType:
"""Word size for wordfinder algorithm (length of best perfect match)
Returns
-------
IgBLASTArugment
"""
return self._word_size
@word_size.setter
def word_size(self, word_size: int) -> None:
if not isinstance(word_size, int) and word_size < 4:
raise BadIgBLASTArgument(word_size, ">4")
self._word_size = IgBLASTArgument("word_size", "word_size", word_size, False)
@property
def gap_open(self) -> ArgumentType:
"""Cost to open a gap
Returns
-------
IgBLASTArgument
"""
return self._gap_open
@gap_open.setter
def gap_open(self, go: int) -> None:
if not isinstance(go, int) and go > 0:
raise BadIgBLASTArgument(go, ">0")
self._gap_open = IgBLASTArgument("gap_open", "gapopen", go, False)
@property
def gap_extend(self) -> ArgumentType:
"""Cost to extend a gap
Returns
-------
IgBLASTArgument
"""
return self._gap_extend
@gap_extend.setter
def gap_extend(self, ge: int) -> None:
if not isinstance(ge, int) and ge > 0:
raise BadIgBLASTArgument(ge, ">0")
self._gap_extend = IgBLASTArgument("gap_open", "gapextend", ge, False)
@property
def num_threads(self) -> ArgumentType:
"""
Number of threads (CPUs) to use in the BLAST search
Returns
-------
IgBLASTArgument
"""
return self._num_threads
@num_threads.setter
def num_threads(self, num_threads: int) -> None:
if num_threads > cpu_count():
raise BadIgBLASTArgument(num_threads, "<" + str(cpu_count()))
self._num_threads = IgBLASTArgument("number_threds", "num_threads", num_threads, False)
@property
def extend_5(self) -> ArgumentType:
"""Extend V gene alignment at 5' end
Returns
-------
IgBLASTArgument
"""
return self._extend_5
@extend_5.setter
def extend_5(self, extend_5: bool) -> None:
self._extend_5 = IgBLASTArgument("extend_5", "extend_align5end", extend_5, False)
@property
def extend_3(self) -> ArgumentType:
"""Extend V gene alignment at 3' end
Returns
-------
IgBLASTArgument
"""
return self._extend_3
@extend_3.setter
def extend_3(self, extend_3: bool) -> None:
self._extend_3 = IgBLASTArgument("extend_3", "extend_align3end", extend_3, False)
@property
def allow_vdj_overlap(self) -> Any:
"""Allow the VDJ overlap
This option is active only when D_penalty
and J_penalty are set to -4 and -3, respectively
Returns
-------
IgBLASTArgument
"""
return self._allow_vdj_overlap # type: ignore[has-type]
@allow_vdj_overlap.setter
def allow_vdj_overlap(self, allow: bool) -> None:
j_penalty: IgBLASTArgument = self.j_penalty # type: ignore[assignment]
d_penalty: IgBLASTArgument = self.d_penalty # type: ignore[assignment]
if j_penalty.value != -3 and d_penalty.value != -4 and allow:
warnings.warn(
f"Allows vdj overlap set but j penalty and d penalty need to be -3 and -4, now are {self.j_penalty}, {self.d_penalty}",
UserWarning,
)
self._allow_vdj_overlap = IgBLASTArgument("allow_vdj_overlap", "allow_vdj_overlap", allow, False)
@property
def d_penalty(self) -> ArgumentType:
"""What is the D gene panalty
Returns
-------
IgBLASTArgument
"""
return self._d_penalty
@d_penalty.setter
def d_penalty(self, penalty: int) -> None:
if not -5 < penalty < 1:
raise BadIgBLASTArgument(penalty, "must be less than 0 and greater than -5")
self._d_penalty = IgBLASTArgument("d_penalty", "D_penalty", penalty, True)
@property
def j_penalty(self) -> ArgumentType:
"""What is the J gene panalty
Returns
-------
IgBLASTArgument
"""
return self._j_penalty
@j_penalty.setter
def j_penalty(self, penalty: int) -> None:
if not -4 < penalty < 1:
raise BadIgBLASTArgument(penalty, "must be less than 0 and greater than -4")
self._j_penalty = IgBLASTArgument("j_penalty", "J_penalty", penalty, True)
@property
def v_penalty(self) -> ArgumentType:
"""What is the v gene panalty
Returns
-------
IgBLASTArgument
"""
return self._v_penalty
@v_penalty.setter
def v_penalty(self, penalty: int) -> None:
if not -5 < penalty < 1:
raise BadIgBLASTArgument(penalty, "must be less than 0 and greater than -5")
self._v_penalty = IgBLASTArgument("v_penalty", "V_penalty", penalty, True)
@property
def arguments(self) -> List[IgBLASTArgument]:
"""return a list of IgBLASTArugments
Returns
-------
List[IgBLASTArguments]
"""
# lots of type ignores since these are IgBLASTArguments set in the setter, but are read from the property
return [
self.min_d_match, # type: ignore
self.num_v, # type: ignore
self.num_j, # type: ignore
self.num_d, # type: ignore
self.organism, # type: ignore
self.receptor, # type: ignore
self.germline_db_v, # type: ignore
self.germline_db_d, # type: ignore
self.germline_db_j, # type: ignore
self.aux_path, # type: ignore
self.outfmt, # type: ignore
self.nomenclature, # type: ignore
self.word_size, # type: ignore
self.gap_open, # type: ignore
self.gap_extend, # type: ignore
self.j_penalty, # type: ignore
self.v_penalty, # type: ignore
self.d_penalty, # type: ignore
self.num_threads, # type: ignore
self.extend_5, # type: ignore
self.extend_3, # type: ignore
self.allow_vdj_overlap,
]
@property
def cmd(self) -> List[str]:
"""Return the blast cmd that will be run by subprocess"""
_cmd = [str(self.executable)]
for blast_arg in self.arguments:
kv = blast_arg.get_formatted_blast_arg() # can return non if we already set it twice
if kv: # only set on boolean if they are true
_cmd += kv
return _cmd
def pre_check(self) -> None:
"""Ensures we have set everything right
Raises
------
MissingIgBLASTArg
We have set the IGDATA field
BadIgBLASTExe
Correct IGblast executable
BadIgDATA
If any of the fields are not set properly
"""
# Ensure required arguments werer set
for blast_arg in self.arguments:
if blast_arg.required and not (blast_arg.value):
raise MissingIgBLASTArgument(f"Missing Blast argument. Need to set IgBLASTN.{blast_arg.name}")
# Check the executable
if not is_tool(str(self.executable)):
raise BadIgBLASTExe(self.executable, "Is not an executable tool")
if not self.igdata:
raise BadIgDATA("No IGDATA set, set with IgBLASTN.igdata")
else:
if not os.path.exists(self.igdata):
raise BadIgDATA(self.igdata)
# Run methods
def run_file(self, file: Union[Path, str]) -> pd.DataFrame:
"""Run IgBlast on a file
Parameters
----------
file : Path
the fasta file path
Returns
-------
pd.DataFrame
A dataframe with the IgBLAST results
Raises
------
EmtpyFileError
if the fasta file is empty
IgBLASTRunTimeError
for any given runtime error for igblastn
"""
# because igblast uses IGDATA as the internal file structure, we should pass the enviroment to the subprocess
local_env = os.environ.copy()
local_env["IGDATA"] = str(self.igdata)
# we want to ensure they actually passed a file with stuff in it
if os.path.getsize(file) == 0:
raise EmtpyFileError(file)
# take the cmd and finally add the query file
cmd = self.cmd
cmd += ["-query", str(file)]
# run a precheck to make sure everything passed was working
self.pre_check()
# while we can certainly do this as an output stream on stdout,
# It's probably best to take advantage of IGblast output and tempfile
with tempfile.NamedTemporaryFile(dir=self.temp_dir, suffix="_igblast.tsv") as tmpfile:
cmd += ["-out", tmpfile.name]
process = subprocess.run(cmd, env=local_env, capture_output=True)
if process.stderr:
raise IgBLASTRunTimeError(process.stderr)
# we read the dataframe from the tempfile, it should always be in .TSV.
# We can also cast it to IGBLAST_AIRR dtypes to save memory
df = pd.read_csv(tmpfile.name, sep="\t", dtype=IGBLAST_AIRR)
if Path(tmpfile.name).exists():
logger.debug(f"{tmpfile.name} was not deleted after it exited scope")
Path(tmpfile.name).unlink()
df["v_identity"] = df["v_identity"] / 100
df["d_identity"] = df["d_identity"] / 100
df["j_identity"] = df["j_identity"] / 100
return df
def __repr__(self) -> str:
return "IgBLAST: env IGDATA={} {}".format(str(self.igdata), " ".join(self.cmd))
def __str__(self) -> str:
return self.__repr__()
if __name__ == "__main__":
ig_blast = IgBLASTN()
| [
"pandas.read_csv",
"os.environ.copy",
"sadie.airr.exceptions.IgBLASTRunTimeError",
"pathlib.Path",
"sadie.airr.exceptions.EmtpyFileError",
"glob.glob",
"sadie.airr.exceptions.BadIgDATA",
"os.path.join",
"multiprocessing.cpu_count",
"os.path.abspath",
"os.path.dirname",
"os.path.exists",
"os.... | [((777, 805), 'logging.getLogger', 'logging.getLogger', (['"""IgBLAST"""'], {}), "('IgBLAST')\n", (794, 805), False, 'import logging\n'), ((1627, 1648), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (1642, 1648), False, 'import os\n'), ((1767, 1798), 'os.path.abspath', 'os.path.abspath', (['directory_path'], {}), '(directory_path)\n', (1782, 1798), False, 'import os\n'), ((1831, 1853), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (1847, 1853), False, 'import os\n'), ((1660, 1690), 'os.path.exists', 'os.path.exists', (['directory_path'], {}), '(directory_path)\n', (1674, 1690), False, 'import os\n'), ((1870, 1908), 'os.path.join', 'os.path.join', (['directory_path', 'basename'], {}), '(directory_path, basename)\n', (1882, 1908), False, 'import os\n'), ((1986, 2006), 'glob.glob', 'glob.glob', (['glob_path'], {}), '(glob_path)\n', (1995, 2006), False, 'import glob\n'), ((2045, 2083), 'os.path.join', 'os.path.join', (['directory_path', 'basename'], {}), '(directory_path, basename)\n', (2057, 2083), False, 'import os\n'), ((6505, 6521), 'pathlib.Path', 'Path', (['executable'], {}), '(executable)\n', (6509, 6521), False, 'from pathlib import Path\n'), ((6922, 6933), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (6931, 6933), False, 'from multiprocessing import cpu_count\n'), ((7768, 7777), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (7772, 7777), False, 'from pathlib import Path\n'), ((7802, 7811), 'pathlib.Path', 'Path', (['"""."""'], {}), "('.')\n", (7806, 7811), False, 'from pathlib import Path\n'), ((8074, 8140), 'subprocess.run', 'subprocess.run', (["[self.executable, '-version']"], {'capture_output': '(True)'}), "([self.executable, '-version'], capture_output=True)\n", (8088, 8140), False, 'import subprocess\n'), ((10009, 10027), 'pathlib.Path', 'Path', (['self._igdata'], {}), '(self._igdata)\n', (10013, 10027), False, 'from pathlib import Path\n'), ((25189, 25206), 'os.environ.copy', 'os.environ.copy', ([], {}), '()\n', (25204, 25206), False, 'import os\n'), ((8581, 8614), 'semantic_version.Version', 'semantic_version.Version', (['version'], {}), '(version)\n', (8605, 8614), False, 'import semantic_version\n'), ((9172, 9181), 'pathlib.Path', 'Path', (['exe'], {}), '(exe)\n', (9176, 9181), False, 'from pathlib import Path\n'), ((9590, 9600), 'pathlib.Path', 'Path', (['data'], {}), '(data)\n', (9594, 9600), False, 'from pathlib import Path\n'), ((9657, 9691), 'os.access', 'os.access', (['self._temp_dir', 'os.W_OK'], {}), '(self._temp_dir, os.W_OK)\n', (9666, 9691), False, 'import os\n'), ((10155, 10165), 'pathlib.Path', 'Path', (['data'], {}), '(data)\n', (10159, 10165), False, 'from pathlib import Path\n'), ((10235, 10250), 'sadie.airr.exceptions.BadIgDATA', 'BadIgDATA', (['data'], {}), '(data)\n', (10244, 10250), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((10663, 10690), 'sadie.airr.exceptions.BadIgBLASTArgument', 'BadIgBLASTArgument', (['d', '""">5"""'], {}), "(d, '>5')\n", (10681, 10690), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((11107, 11133), 'sadie.airr.exceptions.BadIgBLASTArgument', 'BadIgBLASTArgument', (['v', 'int'], {}), '(v, int)\n', (11125, 11133), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((11550, 11576), 'sadie.airr.exceptions.BadIgBLASTArgument', 'BadIgBLASTArgument', (['d', 'int'], {}), '(d, int)\n', (11568, 11576), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((11993, 12019), 'sadie.airr.exceptions.BadIgBLASTArgument', 'BadIgBLASTArgument', (['j', 'int'], {}), '(j, int)\n', (12011, 12019), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((12861, 12887), 'sadie.airr.exceptions.BadIgBLASTArgument', 'BadIgBLASTArgument', (['o', 'str'], {}), '(o, str)\n', (12879, 12887), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((13489, 13516), 'sadie.airr.exceptions.BadIgBLASTArgument', 'BadIgBLASTArgument', (['fmt', '(19)'], {}), '(fmt, 19)\n', (13507, 13516), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((13944, 13980), 'sadie.airr.exceptions.BadIgBLASTArgument', 'BadIgBLASTArgument', (['r', "['Ig', 'TCR']"], {}), "(r, ['Ig', 'TCR'])\n", (13962, 13980), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((14423, 14468), 'sadie.airr.exceptions.BadIgBLASTArgument', 'BadIgBLASTArgument', (['system', '"""[\'imgt\',\'kaba\']"""'], {}), '(system, "[\'imgt\',\'kaba\']")\n', (14441, 14468), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((14955, 14969), 'pathlib.Path', 'Path', (['aux_path'], {}), '(aux_path)\n', (14959, 14969), False, 'from pathlib import Path\n'), ((15022, 15085), 'sadie.airr.exceptions.BadIgBLASTArgument', 'BadIgBLASTArgument', (['aux_path', '"""valid path to Auxilary database"""'], {}), "(aux_path, 'valid path to Auxilary database')\n", (15040, 15085), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((15557, 15609), 'sadie.airr.exceptions.BadIgBLASTArgument', 'BadIgBLASTArgument', (['path', '"""Valid path to V Database"""'], {}), "(path, 'Valid path to V Database')\n", (15575, 15609), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((16069, 16138), 'warnings.warn', 'warnings.warn', (['f"""{path} is not found, No D gene segment"""', 'UserWarning'], {}), "(f'{path} is not found, No D gene segment', UserWarning)\n", (16082, 16138), False, 'import warnings\n'), ((16791, 16843), 'sadie.airr.exceptions.BadIgBLASTArgument', 'BadIgBLASTArgument', (['path', '"""Valid path to J Database"""'], {}), "(path, 'Valid path to J Database')\n", (16809, 16843), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((17320, 17355), 'sadie.airr.exceptions.BadIgBLASTArgument', 'BadIgBLASTArgument', (['word_size', '""">4"""'], {}), "(word_size, '>4')\n", (17338, 17355), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((17754, 17782), 'sadie.airr.exceptions.BadIgBLASTArgument', 'BadIgBLASTArgument', (['go', '""">0"""'], {}), "(go, '>0')\n", (17772, 17782), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((18180, 18208), 'sadie.airr.exceptions.BadIgBLASTArgument', 'BadIgBLASTArgument', (['ge', '""">0"""'], {}), "(ge, '>0')\n", (18198, 18208), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((18627, 18638), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (18636, 18638), False, 'from multiprocessing import cpu_count\n'), ((20183, 20339), 'warnings.warn', 'warnings.warn', (['f"""Allows vdj overlap set but j penalty and d penalty need to be -3 and -4, now are {self.j_penalty}, {self.d_penalty}"""', 'UserWarning'], {}), "(\n f'Allows vdj overlap set but j penalty and d penalty need to be -3 and -4, now are {self.j_penalty}, {self.d_penalty}'\n , UserWarning)\n", (20196, 20339), False, 'import warnings\n'), ((20799, 20869), 'sadie.airr.exceptions.BadIgBLASTArgument', 'BadIgBLASTArgument', (['penalty', '"""must be less than 0 and greater than -5"""'], {}), "(penalty, 'must be less than 0 and greater than -5')\n", (20817, 20869), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((21269, 21339), 'sadie.airr.exceptions.BadIgBLASTArgument', 'BadIgBLASTArgument', (['penalty', '"""must be less than 0 and greater than -4"""'], {}), "(penalty, 'must be less than 0 and greater than -4')\n", (21287, 21339), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((21739, 21809), 'sadie.airr.exceptions.BadIgBLASTArgument', 'BadIgBLASTArgument', (['penalty', '"""must be less than 0 and greater than -5"""'], {}), "(penalty, 'must be less than 0 and greater than -5')\n", (21757, 21809), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((25339, 25360), 'os.path.getsize', 'os.path.getsize', (['file'], {}), '(file)\n', (25354, 25360), False, 'import os\n'), ((25385, 25405), 'sadie.airr.exceptions.EmtpyFileError', 'EmtpyFileError', (['file'], {}), '(file)\n', (25399, 25405), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((25779, 25848), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'dir': 'self.temp_dir', 'suffix': '"""_igblast.tsv"""'}), "(dir=self.temp_dir, suffix='_igblast.tsv')\n", (25806, 25848), False, 'import tempfile\n'), ((25926, 25981), 'subprocess.run', 'subprocess.run', (['cmd'], {'env': 'local_env', 'capture_output': '(True)'}), '(cmd, env=local_env, capture_output=True)\n', (25940, 25981), False, 'import subprocess\n'), ((26244, 26299), 'pandas.read_csv', 'pd.read_csv', (['tmpfile.name'], {'sep': '"""\t"""', 'dtype': 'IGBLAST_AIRR'}), "(tmpfile.name, sep='\\t', dtype=IGBLAST_AIRR)\n", (26255, 26299), True, 'import pandas as pd\n'), ((8660, 8733), 'sadie.airr.exceptions.BadIgBLASTExe', 'BadIgBLASTExe', (['self.executable', 'f"""semantic version can\'t parse {version}"""'], {}), '(self.executable, f"semantic version can\'t parse {version}")\n', (8673, 8733), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((24070, 24163), 'sadie.airr.exceptions.MissingIgBLASTArgument', 'MissingIgBLASTArgument', (['f"""Missing Blast argument. Need to set IgBLASTN.{blast_arg.name}"""'], {}), "(\n f'Missing Blast argument. Need to set IgBLASTN.{blast_arg.name}')\n", (24092, 24163), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((24268, 24327), 'sadie.airr.exceptions.BadIgBLASTExe', 'BadIgBLASTExe', (['self.executable', '"""Is not an executable tool"""'], {}), "(self.executable, 'Is not an executable tool')\n", (24281, 24327), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((24383, 24435), 'sadie.airr.exceptions.BadIgDATA', 'BadIgDATA', (['"""No IGDATA set, set with IgBLASTN.igdata"""'], {}), "('No IGDATA set, set with IgBLASTN.igdata')\n", (24392, 24435), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((26035, 26070), 'sadie.airr.exceptions.IgBLASTRunTimeError', 'IgBLASTRunTimeError', (['process.stderr'], {}), '(process.stderr)\n', (26054, 26070), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((26311, 26329), 'pathlib.Path', 'Path', (['tmpfile.name'], {}), '(tmpfile.name)\n', (26315, 26329), False, 'from pathlib import Path\n'), ((24478, 24505), 'os.path.exists', 'os.path.exists', (['self.igdata'], {}), '(self.igdata)\n', (24492, 24505), False, 'import os\n'), ((24533, 24555), 'sadie.airr.exceptions.BadIgDATA', 'BadIgDATA', (['self.igdata'], {}), '(self.igdata)\n', (24542, 24555), False, 'from sadie.airr.exceptions import BadIgBLASTArgument, BadIgBLASTExe, BadIgDATA, MissingIgBLASTArgument, EmtpyFileError, IgBLASTRunTimeError\n'), ((26434, 26452), 'pathlib.Path', 'Path', (['tmpfile.name'], {}), '(tmpfile.name)\n', (26438, 26452), False, 'from pathlib import Path\n'), ((18700, 18711), 'multiprocessing.cpu_count', 'cpu_count', ([], {}), '()\n', (18709, 18711), False, 'from multiprocessing import cpu_count\n')] |
from django.contrib import admin
from core.models import User, TrackerGroup, TrackerGroupInstance, Question, Answer, Response, TrackerGroupInstance
@admin.register(User)
class UserAdmin(admin.ModelAdmin):
list_display = (
'username',
'name',
'email',
'slug',
)
@admin.register(TrackerGroup)
class TrackerGroup(admin.ModelAdmin):
list_display = (
'name',
'user',
'created_at',
'id',
)
@admin.register(TrackerGroupInstance)
class TrackerGroupInstanceAdmin(admin.ModelAdmin):
list_display = (
'tracker',
'started_at',
'created_by',
'tracker_id',
)
@admin.register(Question)
class Question(admin.ModelAdmin):
list_display = (
'current_question',
'tracker',
'created_at',
'created_by',
)
@admin.register(Answer)
class Answer(admin.ModelAdmin):
list_display = (
'current_answer',
'question',
'created_at',
'created_by',
)
@admin.register(Response)
class Response(admin.ModelAdmin):
list_display = (
'tracker',
'tracker_instance',
'display_answers',
'question',
'user',
'created_at',
) | [
"django.contrib.admin.register"
] | [((150, 170), 'django.contrib.admin.register', 'admin.register', (['User'], {}), '(User)\n', (164, 170), False, 'from django.contrib import admin\n'), ((304, 332), 'django.contrib.admin.register', 'admin.register', (['TrackerGroup'], {}), '(TrackerGroup)\n', (318, 332), False, 'from django.contrib import admin\n'), ((470, 506), 'django.contrib.admin.register', 'admin.register', (['TrackerGroupInstance'], {}), '(TrackerGroupInstance)\n', (484, 506), False, 'from django.contrib import admin\n'), ((674, 698), 'django.contrib.admin.register', 'admin.register', (['Question'], {}), '(Question)\n', (688, 698), False, 'from django.contrib import admin\n'), ((855, 877), 'django.contrib.admin.register', 'admin.register', (['Answer'], {}), '(Answer)\n', (869, 877), False, 'from django.contrib import admin\n'), ((1031, 1055), 'django.contrib.admin.register', 'admin.register', (['Response'], {}), '(Response)\n', (1045, 1055), False, 'from django.contrib import admin\n')] |
"""
ssh module
"""
from ker.utils import request
from .list import API_LIST
class SSH:
"""
ssh api
"""
def __init__(self, email, token):
self.email = email
self.token = token
def list(self):
"""
list ssh keys
"""
return request(API_LIST.SSH_LIST.value, {
'email': self.email,
'token': self.token
})
def create(self, name, key):
"""
create ssh key
"""
return request(API_LIST.SSH_CREATE.value, {
'email': self.email,
'token': self.token,
'name': name,
'key': key
})
def delete(self):
"""
delete ssh key
"""
return request(API_LIST.SSH_DELETE.value, {
'email': self.email,
'token': self.token
})
| [
"ker.utils.request"
] | [((291, 367), 'ker.utils.request', 'request', (['API_LIST.SSH_LIST.value', "{'email': self.email, 'token': self.token}"], {}), "(API_LIST.SSH_LIST.value, {'email': self.email, 'token': self.token})\n", (298, 367), False, 'from ker.utils import request\n'), ((498, 607), 'ker.utils.request', 'request', (['API_LIST.SSH_CREATE.value', "{'email': self.email, 'token': self.token, 'name': name, 'key': key}"], {}), "(API_LIST.SSH_CREATE.value, {'email': self.email, 'token': self.\n token, 'name': name, 'key': key})\n", (505, 607), False, 'from ker.utils import request\n'), ((746, 824), 'ker.utils.request', 'request', (['API_LIST.SSH_DELETE.value', "{'email': self.email, 'token': self.token}"], {}), "(API_LIST.SSH_DELETE.value, {'email': self.email, 'token': self.token})\n", (753, 824), False, 'from ker.utils import request\n')] |
from jinja2 import Template
import sys
import os
import argparse
sys.path.append(os.path.join(os.path.dirname(__file__), '../lib'))
from ad_struct import ADStructure
with open(os.path.join(os.path.dirname(__file__), '../lib/scripter/templates/fill-ad.ps1.jinja'),'r') as fill_ad:
template = Template(fill_ad.read())
dc_domain = "cancamusa.com"
dc_path = dc_domain.split(".")
dc_path_string = "DC=" + (",DC=".join(dc_path))
ad_structure = {
"domain" : dc_domain,
"ou" : {
"IT-Services" : {
"name" : "IT-Services",
"ou" : {
"SupportGroups" : {
"name" : "SupportGroups",
"ou" : {
"CostCenter" : {
"name" : "CostCenter",
"ou" : {
},
"groups" : {
"CostCenter-123" : {
"name" : "CostCenter-123",
"sam_account_name" : "CostCenter-123",
"group_category" : "Security",
"group_scope" : "Global",
"display_name" : "CostCenter 123"
},
"CostCenter-125" : {
"name" : "CostCenter-125",
"sam_account_name" : "CostCenter-125",
"group_category" : "Security",
"group_scope" : "Global",
"display_name" : "CostCenter 125"
}
}
}
},
"groups" : {
"SecurePrinting" : {
"name" : "SecurePrinting",
"sam_account_name" : "SecurePrinting",
"group_category" : "Security",
"group_scope" : "Global",
"display_name" : "Secure Printing Users"
}
}
}
},
"groups" : {
}
},
"Locations" : {
"name" : "Locations",
"ou" : {
"HeadQuarter" : {
"name" : "HeadQuarter",
"ou" : {
"Users" : {
"name" : "Users",
"groups" : {},
"ou" : {}
}
},
"groups" : {}
}
},
"groups" : {}
}
}
}
struct = ADStructure.from_json(ad_structure)
ad_groups = struct.list_groups()
ad_ous = struct.list_child_ou()
user_list = [
{
"sam_account_name" : "samuel.garces",
"UserPrincipalName" : "<EMAIL>",
"Firstname" : "Samuel",
"Lastname" : "Garces",
"Department" : "CyberSecurity",
"ou" : "HeadQuarter",
"Password" : "<PASSWORD>"
},
{
"sam_account_name" : "canca.musa",
"UserPrincipalName" : "<EMAIL>",
"Firstname" : "Canca",
"Lastname" : "Musa",
"Department" : "CyberSecurity",
"ou" : "HeadQuarter",
"Password" : "<PASSWORD>"
}
]
print(template.render(user_list=user_list, ad_groups=ad_groups, ad_ous= ad_ous))
| [
"os.path.dirname",
"ad_struct.ADStructure.from_json"
] | [((3138, 3173), 'ad_struct.ADStructure.from_json', 'ADStructure.from_json', (['ad_structure'], {}), '(ad_structure)\n', (3159, 3173), False, 'from ad_struct import ADStructure\n'), ((94, 119), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (109, 119), False, 'import os\n'), ((192, 217), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (207, 217), False, 'import os\n')] |
import pyspark
from pyspark.sql import SparkSession
from pyspark.sql.types import *
from pyspark.sql.functions import *
import os
# Configure spark session
# ?readPreference=primaryPreferred
spark = SparkSession\
.builder\
.master('local[2]')\
.appName('accidents_etl')\
.config("spark.mongodb.input.uri", 'mongodb+srv://dbAdmin:<EMAIL>.net/CMPT732.Project')\
.config('spark.mongodb.output.uri', 'mongodb+srv://dbAdmin:cmpt732@<EMAIL>.jfbfw.mongodb.net/CMPT732.Project')\
.config('spark.jars.packages', 'org.mongodb.spark:mongo-spark-connector_2.12:3.0.1')\
.getOrCreate()
accidents_schema = StructType([
StructField('ID', StringType()),
StructField('Severity', IntegerType()),
StructField('Start_Time', TimestampType()),
StructField('End_Time', TimestampType()),
StructField('Start_Lat', DoubleType()),
StructField('Start_Lng', DoubleType()),
StructField('End_Lat', DoubleType()),
StructField('End_Lng', DoubleType()),
StructField('Distance(mi)', DoubleType()),
StructField('Description', StringType()),
StructField('Number', DoubleType()),
StructField('Street', StringType()),
StructField('Side', StringType()),
StructField('City', StringType()),
StructField('County', StringType()),
StructField('State', StringType()),
StructField('Zipcode', StringType()),
StructField('Country', StringType()),
StructField('Timezone', StringType()),
StructField('Airport_Code', StringType()),
StructField('Weather_Timestamp', StringType()),
StructField('Temperature(F)', DoubleType()),
StructField('Wind_Chill(F)', DoubleType()),
StructField('Humidity(%)', DoubleType()),
StructField('Pressure(in)', DoubleType()),
StructField('Visibility(mi)', DoubleType()),
StructField('Wind_Direction', StringType()),
StructField('Wind_Speed(mph)', DoubleType()),
StructField('Precipitation(in)', DoubleType()),
StructField('Weather_Condition', StringType()),
StructField('Amenity', StringType()),
StructField('Bump', StringType()),
StructField('Crossing', StringType()),
StructField('Give_Way', StringType()),
StructField('Junction', StringType()),
StructField('No_Exit', StringType()),
StructField('Railway', StringType()),
StructField('Roundabout', StringType()),
StructField('Station', StringType()),
StructField('Stop', StringType()),
StructField('Traffic_Calming', StringType()),
StructField('Traffic_Signal', StringType()),
StructField('Turning_Loop', StringType()),
StructField('Sunrise_Sunset', StringType()),
StructField('Civil_Twilight', StringType()),
StructField('Nautical_Twilight', StringType()),
StructField('Astronomical_Twilight', StringType()),
])
# Change the current working directory to root
path = os.path.dirname(__file__)
path = path.rstrip("/ETL")
os.chdir(path)
# Load df
df = spark.read.csv("Accident_No_NA.csv", schema=accidents_schema,header=True)
df.select(df['Start_Time'])
df=df.withColumn('date',to_date(df['Start_Time'],"yyyy-MM-dd")) #convert timestamp to datetime
df=df.select(df['State'],df['start_lat'],df['date'],year(df['date']).alias('Year'), month(df['date']).alias('Month'),dayofmonth(df['date']),df['Timezone']).cache()
df=df.filter((df['Year']=='2017')|(df['Year']=='2018')|(df['Year']=='2019')).cache()
#1 month:
df1=df.groupBy(df['Month']).count().orderBy(df['Month'])
df1.show()
df1.write.format('mongo')\
.mode('overwrite')\
.option('spark.mongodb.output.uri', 'mongodb+srv://dbAdmin:cmpt732@cluster732.jfbfw.mongodb.net/CMPT732.monthCount').save()
#2. weekday:
df2 = df.select(dayofweek(df['date']).alias('day_of_Week'))
df2 = df2.groupBy(df2['day_of_Week']).count().orderBy(df2['day_of_Week'])
df2.show()
#df2=df1.filter(df1['year(date)']=='2020')
#df2=df1.filter(df1['state_name']=='FL')
#df2=df2.filter(df1['start_lat']>45)
df2.write.format('mongo')\
.mode('overwrite')\
.option('spark.mongodb.output.uri', 'mongodb+srv://dbAdmin:cmpt732@<EMAIL>32.jfbfw.mongodb.net/CMPT732.dayofWeek').save()
#3.north vs south:
df3=df.filter(df['start_lat']>37)
df4=df.filter(df['start_lat']<30)
df3=df3.filter(df3['Timezone']=='US/Eastern')
df4=df4.filter(df4['Timezone']=='US/Eastern')
df3=df3.groupBy(df3['Month']).count().orderBy(df3['Month'])
df4=df4.groupBy(df4['Month']).count().orderBy(df4['Month'])
df3.show()
df4.show()
df3.write.format('mongo')\
.mode('overwrite')\
.option('spark.mongodb.output.uri', 'mongodb+srv://dbAdmin:cmpt732@<EMAIL>732.jfbfw.mongodb.net/CMPT732.northCount').save()
df4.write.format('mongo')\
.mode('overwrite')\
.option('spark.mongodb.output.uri', 'mongodb+srv://dbAdmin:cmpt732@cluster732.jfbfw.mongodb.net/CMPT732.southCount').save()
| [
"pyspark.sql.SparkSession.builder.master",
"os.path.dirname",
"os.chdir"
] | [((2814, 2839), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (2829, 2839), False, 'import os\n'), ((2867, 2881), 'os.chdir', 'os.chdir', (['path'], {}), '(path)\n', (2875, 2881), False, 'import os\n'), ((200, 239), 'pyspark.sql.SparkSession.builder.master', 'SparkSession.builder.master', (['"""local[2]"""'], {}), "('local[2]')\n", (227, 239), False, 'from pyspark.sql import SparkSession\n')] |
from rest_framework import serializers
from .models import CodonUsageTable, CodonUsage
class CodonUsageTableSerializer(serializers.ModelSerializer):
species = serializers.SlugRelatedField(read_only=True, slug_field='name')
class Meta:
model = CodonUsageTable
fields = '__all__'
class CodonUsageSerializer(serializers.ModelSerializer):
class Meta:
model = CodonUsage
exclude = ('table',)
| [
"rest_framework.serializers.SlugRelatedField"
] | [((166, 229), 'rest_framework.serializers.SlugRelatedField', 'serializers.SlugRelatedField', ([], {'read_only': '(True)', 'slug_field': '"""name"""'}), "(read_only=True, slug_field='name')\n", (194, 229), False, 'from rest_framework import serializers\n')] |
"""add a boolean flag for proxy dataset in ObjectMetadata
Revision ID: f712122fe780
Revises: <PASSWORD>
Create Date: 2022-03-31 07:20:49.411961
"""
# third party
from alembic import op # type: ignore
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "f712122fe780"
down_revision = "70fcad0b1795"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"obj_metadata", sa.Column("is_proxy_dataset", sa.Boolean(), default=False)
)
def downgrade() -> None:
op.drop_column("obj_metadata", "is_proxy_dataset")
| [
"alembic.op.drop_column",
"sqlalchemy.Boolean"
] | [((529, 579), 'alembic.op.drop_column', 'op.drop_column', (['"""obj_metadata"""', '"""is_proxy_dataset"""'], {}), "('obj_metadata', 'is_proxy_dataset')\n", (543, 579), False, 'from alembic import op\n'), ((463, 475), 'sqlalchemy.Boolean', 'sa.Boolean', ([], {}), '()\n', (473, 475), True, 'import sqlalchemy as sa\n')] |
from django.test import TestCase
from django.core.management import call_command
from mock import patch
from robber import expect
from data.factories import AttachmentFileFactory
from data.constants import AttachmentSourceType
class CrawlCopaPortalDataTestCase(TestCase):
@patch('data_importer.copa_crawler.importers.CopaPortalAttachmentImporter.crawl_and_update_attachments')
@patch('data_importer.copa_crawler.importers.CopaSummaryReportsAttachmentImporter.crawl_and_update_attachments')
@patch('data_importer.management.commands.crawl_copa_data.send_cr_attachment_available_email')
def test_handle(self, send_email_mock, summary_reports_importer_mock, portal_importer_mock):
attachment_file_1 = AttachmentFileFactory(source_type=AttachmentSourceType.PORTAL_COPA)
attachment_file_2 = AttachmentFileFactory(source_type=AttachmentSourceType.SUMMARY_REPORTS_COPA)
portal_importer_mock.return_value = [attachment_file_1]
summary_reports_importer_mock.return_value = [attachment_file_2]
call_command('crawl_copa_data')
expect(send_email_mock).to.be.called_with([attachment_file_1, attachment_file_2])
| [
"data.factories.AttachmentFileFactory",
"django.core.management.call_command",
"mock.patch",
"robber.expect"
] | [((281, 394), 'mock.patch', 'patch', (['"""data_importer.copa_crawler.importers.CopaPortalAttachmentImporter.crawl_and_update_attachments"""'], {}), "(\n 'data_importer.copa_crawler.importers.CopaPortalAttachmentImporter.crawl_and_update_attachments'\n )\n", (286, 394), False, 'from mock import patch\n'), ((390, 511), 'mock.patch', 'patch', (['"""data_importer.copa_crawler.importers.CopaSummaryReportsAttachmentImporter.crawl_and_update_attachments"""'], {}), "(\n 'data_importer.copa_crawler.importers.CopaSummaryReportsAttachmentImporter.crawl_and_update_attachments'\n )\n", (395, 511), False, 'from mock import patch\n'), ((507, 610), 'mock.patch', 'patch', (['"""data_importer.management.commands.crawl_copa_data.send_cr_attachment_available_email"""'], {}), "(\n 'data_importer.management.commands.crawl_copa_data.send_cr_attachment_available_email'\n )\n", (512, 610), False, 'from mock import patch\n'), ((726, 793), 'data.factories.AttachmentFileFactory', 'AttachmentFileFactory', ([], {'source_type': 'AttachmentSourceType.PORTAL_COPA'}), '(source_type=AttachmentSourceType.PORTAL_COPA)\n', (747, 793), False, 'from data.factories import AttachmentFileFactory\n'), ((822, 898), 'data.factories.AttachmentFileFactory', 'AttachmentFileFactory', ([], {'source_type': 'AttachmentSourceType.SUMMARY_REPORTS_COPA'}), '(source_type=AttachmentSourceType.SUMMARY_REPORTS_COPA)\n', (843, 898), False, 'from data.factories import AttachmentFileFactory\n'), ((1045, 1076), 'django.core.management.call_command', 'call_command', (['"""crawl_copa_data"""'], {}), "('crawl_copa_data')\n", (1057, 1076), False, 'from django.core.management import call_command\n'), ((1085, 1108), 'robber.expect', 'expect', (['send_email_mock'], {}), '(send_email_mock)\n', (1091, 1108), False, 'from robber import expect\n')] |
# Generated by Django 2.1 on 2018-09-21 16:16
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Cast',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('list_order', models.IntegerField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='Directors',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('list_order', models.IntegerField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='Movie',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.CharField(max_length=200)),
('title', models.CharField(max_length=200)),
('year', models.IntegerField()),
('description', models.TextField()),
],
),
migrations.CreateModel(
name='Person',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=200)),
('middle_name', models.CharField(blank=True, max_length=200)),
('last_name', models.CharField(max_length=200)),
('image', models.CharField(max_length=200)),
('bio', models.TextField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='Review',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', models.TextField()),
('rating', models.IntegerField()),
('creation_time', models.DateTimeField()),
],
),
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('image', models.CharField(max_length=200)),
],
),
migrations.AddField(
model_name='review',
name='author',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reviews', to='_django.User'),
),
migrations.AddField(
model_name='review',
name='movie',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reviews', to='_django.Movie'),
),
migrations.AddField(
model_name='movie',
name='cast',
field=models.ManyToManyField(related_name='acted_in', through='_django.Cast', to='_django.Person'),
),
migrations.AddField(
model_name='movie',
name='directors',
field=models.ManyToManyField(related_name='directed', through='_django.Directors', to='_django.Person'),
),
migrations.AddField(
model_name='directors',
name='movie',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='directors_rel', to='_django.Movie'),
),
migrations.AddField(
model_name='directors',
name='person',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='directed_rel', to='_django.Person'),
),
migrations.AddField(
model_name='cast',
name='movie',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='cast_rel', to='_django.Movie'),
),
migrations.AddField(
model_name='cast',
name='person',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='acted_in_rel', to='_django.Person'),
),
]
| [
"django.db.models.TextField",
"django.db.models.ManyToManyField",
"django.db.models.ForeignKey",
"django.db.models.CharField",
"django.db.models.AutoField",
"django.db.models.IntegerField",
"django.db.models.DateTimeField"
] | [((2651, 2761), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""reviews"""', 'to': '"""_django.User"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='reviews', to='_django.User')\n", (2668, 2761), False, 'from django.db import migrations, models\n'), ((2875, 2986), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""reviews"""', 'to': '"""_django.Movie"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='reviews', to='_django.Movie')\n", (2892, 2986), False, 'from django.db import migrations, models\n'), ((3098, 3195), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'related_name': '"""acted_in"""', 'through': '"""_django.Cast"""', 'to': '"""_django.Person"""'}), "(related_name='acted_in', through='_django.Cast', to=\n '_django.Person')\n", (3120, 3195), False, 'from django.db import migrations, models\n'), ((3312, 3413), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'related_name': '"""directed"""', 'through': '"""_django.Directors"""', 'to': '"""_django.Person"""'}), "(related_name='directed', through='_django.Directors',\n to='_django.Person')\n", (3334, 3413), False, 'from django.db import migrations, models\n'), ((3531, 3648), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""directors_rel"""', 'to': '"""_django.Movie"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='directors_rel', to='_django.Movie')\n", (3548, 3648), False, 'from django.db import migrations, models\n'), ((3766, 3883), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""directed_rel"""', 'to': '"""_django.Person"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='directed_rel', to='_django.Person')\n", (3783, 3883), False, 'from django.db import migrations, models\n'), ((3995, 4107), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""cast_rel"""', 'to': '"""_django.Movie"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='cast_rel', to='_django.Movie')\n", (4012, 4107), False, 'from django.db import migrations, models\n'), ((4220, 4337), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'related_name': '"""acted_in_rel"""', 'to': '"""_django.Person"""'}), "(on_delete=django.db.models.deletion.CASCADE, related_name\n ='acted_in_rel', to='_django.Person')\n", (4237, 4337), False, 'from django.db import migrations, models\n'), ((331, 424), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (347, 424), False, 'from django.db import migrations, models\n'), ((454, 496), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (473, 496), False, 'from django.db import migrations, models\n'), ((631, 724), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (647, 724), False, 'from django.db import migrations, models\n'), ((754, 796), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (773, 796), False, 'from django.db import migrations, models\n'), ((927, 1020), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (943, 1020), False, 'from django.db import migrations, models\n'), ((1045, 1077), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1061, 1077), False, 'from django.db import migrations, models\n'), ((1106, 1138), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1122, 1138), False, 'from django.db import migrations, models\n'), ((1166, 1187), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (1185, 1187), False, 'from django.db import migrations, models\n'), ((1222, 1240), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (1238, 1240), False, 'from django.db import migrations, models\n'), ((1372, 1465), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1388, 1465), False, 'from django.db import migrations, models\n'), ((1495, 1527), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1511, 1527), False, 'from django.db import migrations, models\n'), ((1562, 1606), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(200)'}), '(blank=True, max_length=200)\n', (1578, 1606), False, 'from django.db import migrations, models\n'), ((1639, 1671), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1655, 1671), False, 'from django.db import migrations, models\n'), ((1700, 1732), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (1716, 1732), False, 'from django.db import migrations, models\n'), ((1759, 1798), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (1775, 1798), False, 'from django.db import migrations, models\n'), ((1930, 2023), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1946, 2023), False, 'from django.db import migrations, models\n'), ((2047, 2065), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (2063, 2065), False, 'from django.db import migrations, models\n'), ((2095, 2116), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (2114, 2116), False, 'from django.db import migrations, models\n'), ((2153, 2175), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (2173, 2175), False, 'from django.db import migrations, models\n'), ((2305, 2398), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (2321, 2398), False, 'from django.db import migrations, models\n'), ((2422, 2454), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (2438, 2454), False, 'from django.db import migrations, models\n'), ((2483, 2515), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)'}), '(max_length=200)\n', (2499, 2515), False, 'from django.db import migrations, models\n')] |
import os
import jinja2
from webapp2_extras import i18n
jinja_environment = jinja2.Environment(loader=jinja2.FileSystemLoader('static/templates'),extensions=['jinja2.ext.i18n'])
jinja_environment.install_gettext_callables(
i18n.gettext,
i18n.ngettext,
newstyle=True)
jinja_environment.filters.update({
'format_date' : i18n.format_date,
'format_time' : i18n.format_time,
'format_datetime' : i18n.format_datetime,
'format_timedelta': i18n.format_timedelta
})
| [
"jinja2.FileSystemLoader"
] | [((103, 146), 'jinja2.FileSystemLoader', 'jinja2.FileSystemLoader', (['"""static/templates"""'], {}), "('static/templates')\n", (126, 146), False, 'import jinja2\n')] |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
import os
import pyfasta
import allel
import seaborn as sns
import petl as etl
import h5py
import pandas
title = 'Phase 1 AR3 release'
pop_ids = 'AOM', 'BFM', 'GWA', 'GNS', 'BFS', 'CMS', 'GAS', 'UGS', 'KES'
pop_labels = {
'AOM': 'AO $coluzzii$',
'BFM': 'BF $coluzzii$',
'GWA': 'GW',
'GNS': 'GN $gambiae$',
'BFS': 'BF $gambiae$',
'CMS': 'CM $gambiae$',
'UGS': 'UG $gambiae$',
'GAS': 'GA $gambiae$',
'KES': 'KE',
'colony': 'colony',
}
pop_colors = {
'AOM': sns.color_palette('YlOrBr', 5)[4],
'BFM': sns.color_palette('Reds', 3)[1],
'GWA': sns.color_palette('YlOrBr', 5)[1],
'GNS': sns.color_palette('Blues', 3)[0],
'BFS': sns.color_palette('Blues', 3)[1],
'CMS': sns.color_palette('Blues', 3)[2],
'UGS': sns.color_palette('Greens', 2)[0],
'GAS': sns.color_palette('Greens', 2)[1],
'KES': sns.color_palette('Greys', 5)[2],
'colony': sns.color_palette('Greys', 5)[-1]
}
# convert to hex notation for ease of use elsewhere
for p in pop_colors:
h = '#%02x%02x%02x' % tuple(int(255*c) for c in pop_colors[p])
# chromatin
_data_chromatin = b"""CHX chro X 20009764 24393108
CH2R chro 2R 58984778 61545105
CH2L chro 2L 1 2431617
PEU2L chro 2L 2487770 5042389
IH2L chro 2L 5078962 5788875
IH3R chro 3R 38988757 41860198
CH3R chro 3R 52161877 53200684
CH3L chro 3L 1 1815119
PEU3L chro 3L 1896830 4235209
IH3L chro 3L 4264713 5031692
"""
tbl_chromatin = (
etl
.fromtext(etl.MemorySource(_data_chromatin))
.split('lines', '\s+', ['name', 'type', 'chrom', 'start', 'stop'])
.convert(('start', 'stop'), int)
.cutout('type')
)
# genome regions
region_X_speciation = 'X-speciation', 'X', 15000000, 24000000
region_X_free = 'X-free', 'X', 1, 14000000
region_3L_free = '3L-free', '3L', 15000000, 41000000
region_3R_free = '3R-free', '3R', 1, 37000000
# noinspection PyGlobalUndefined
def init(release_dir, load_geneset=False):
"""Initialise data resources.
Parameters
----------
release_dir : string
Local filesystem path where data from the release are stored.
load_geneset : string
If True, load geneset into memory.
"""
# reference sequence
####################
global genome_fn, genome
genome_dir = os.path.join(release_dir, 'genome')
genome_fn = os.path.join(genome_dir, 'Anopheles-gambiae-PEST_CHROMOSOMES_AgamP3.fa')
if os.path.exists(genome_fn):
genome = pyfasta.Fasta(genome_fn)
# genome annotations
####################
global geneset_agamp42_fn, geneset_agamp42
geneset_dir = os.path.join(release_dir, 'geneset')
geneset_agamp42_fn = os.path.join(
geneset_dir,
'Anopheles-gambiae-PEST_BASEFEATURES_AgamP4.2.sorted.gff3.gz')
if os.path.exists(geneset_agamp42_fn) and load_geneset:
geneset_agamp42 = allel.FeatureTable.from_gff3(geneset_agamp42_fn)
# variant callsets
##################
global callset, callset_pass
variation_dir = os.path.join(release_dir, 'variation')
# main callset
callset_h5_fn = os.path.join(variation_dir, 'main', 'hdf5', 'ag1000g.phase1.ar3.h5')
if os.path.exists(callset_h5_fn):
callset = h5py.File(callset_h5_fn, mode='r')
# main callset, PASS variants only
callset_pass_h5_fn = os.path.join(variation_dir, 'main', 'hdf5', 'ag1000g.phase1.ar3.pass.h5')
if os.path.exists(callset_pass_h5_fn):
callset_pass = h5py.File(callset_pass_h5_fn, mode='r')
# accessibility
###############
global accessibility
accessibility_dir = os.path.join(release_dir, 'accessibility')
accessibility_fn = os.path.join(accessibility_dir, 'accessibility.h5')
if os.path.exists(accessibility_fn):
accessibility = h5py.File(accessibility_fn, mode='r')
# sample metadata
#################
global samples_fn, tbl_samples, lkp_samples, sample_ids, df_samples
samples_dir = os.path.join(release_dir, 'samples')
samples_fn = os.path.join(samples_dir, 'samples.all.txt')
if os.path.exists(samples_fn):
tbl_samples = (
etl
.fromtsv(samples_fn)
.convert(('index', 'year', 'n_sequences', 'kt_2la', 'kt_2rb'), int)
.convert(('mean_coverage', 'latitude', 'longitude') + tuple(range(20, 36)), float)
)
lkp_samples = tbl_samples.recordlookupone('ox_code')
sample_ids = tbl_samples.values('ox_code').list()
df_samples = pandas.read_csv(samples_fn, sep='\t', index_col='index')
# extras
########
global allele_counts, allele_counts_gq10, outgroup_alleles, outgroup_allele_counts, \
outgroup_species
extras_dir = os.path.join(release_dir, 'extras')
# allele counts
allele_counts_fn = os.path.join(extras_dir, 'allele_counts.h5')
if os.path.exists(allele_counts_fn):
allele_counts = h5py.File(allele_counts_fn, mode='r')
allele_counts_gq10_fn = os.path.join(extras_dir, 'allele_counts.gq10.h5')
if os.path.exists(allele_counts_gq10_fn):
allele_counts_gq10 = h5py.File(allele_counts_gq10_fn, mode='r')
# outgroup data
outgroup_species = 'arab', 'meru', 'mela', 'quad', 'epir', 'chri'
outgroup_alleles_fn = os.path.join(extras_dir, 'outgroup_alleles.h5')
if os.path.exists(outgroup_alleles_fn):
outgroup_alleles = h5py.File(outgroup_alleles_fn, mode='r')
outgroup_allele_counts_fn = os.path.join(extras_dir, 'outgroup_allele_counts.h5')
if os.path.exists(outgroup_allele_counts_fn):
outgroup_allele_counts = h5py.File(outgroup_allele_counts_fn, mode='r')
| [
"h5py.File",
"petl.MemorySource",
"pandas.read_csv",
"os.path.exists",
"pyfasta.Fasta",
"seaborn.color_palette",
"petl.fromtsv",
"os.path.join",
"allel.FeatureTable.from_gff3"
] | [((2498, 2533), 'os.path.join', 'os.path.join', (['release_dir', '"""genome"""'], {}), "(release_dir, 'genome')\n", (2510, 2533), False, 'import os\n'), ((2550, 2622), 'os.path.join', 'os.path.join', (['genome_dir', '"""Anopheles-gambiae-PEST_CHROMOSOMES_AgamP3.fa"""'], {}), "(genome_dir, 'Anopheles-gambiae-PEST_CHROMOSOMES_AgamP3.fa')\n", (2562, 2622), False, 'import os\n'), ((2630, 2655), 'os.path.exists', 'os.path.exists', (['genome_fn'], {}), '(genome_fn)\n', (2644, 2655), False, 'import os\n'), ((2816, 2852), 'os.path.join', 'os.path.join', (['release_dir', '"""geneset"""'], {}), "(release_dir, 'geneset')\n", (2828, 2852), False, 'import os\n'), ((2878, 2970), 'os.path.join', 'os.path.join', (['geneset_dir', '"""Anopheles-gambiae-PEST_BASEFEATURES_AgamP4.2.sorted.gff3.gz"""'], {}), "(geneset_dir,\n 'Anopheles-gambiae-PEST_BASEFEATURES_AgamP4.2.sorted.gff3.gz')\n", (2890, 2970), False, 'import os\n'), ((3220, 3258), 'os.path.join', 'os.path.join', (['release_dir', '"""variation"""'], {}), "(release_dir, 'variation')\n", (3232, 3258), False, 'import os\n'), ((3299, 3367), 'os.path.join', 'os.path.join', (['variation_dir', '"""main"""', '"""hdf5"""', '"""ag1000g.phase1.ar3.h5"""'], {}), "(variation_dir, 'main', 'hdf5', 'ag1000g.phase1.ar3.h5')\n", (3311, 3367), False, 'import os\n'), ((3375, 3404), 'os.path.exists', 'os.path.exists', (['callset_h5_fn'], {}), '(callset_h5_fn)\n', (3389, 3404), False, 'import os\n'), ((3524, 3597), 'os.path.join', 'os.path.join', (['variation_dir', '"""main"""', '"""hdf5"""', '"""ag1000g.phase1.ar3.pass.h5"""'], {}), "(variation_dir, 'main', 'hdf5', 'ag1000g.phase1.ar3.pass.h5')\n", (3536, 3597), False, 'import os\n'), ((3605, 3639), 'os.path.exists', 'os.path.exists', (['callset_pass_h5_fn'], {}), '(callset_pass_h5_fn)\n', (3619, 3639), False, 'import os\n'), ((3795, 3837), 'os.path.join', 'os.path.join', (['release_dir', '"""accessibility"""'], {}), "(release_dir, 'accessibility')\n", (3807, 3837), False, 'import os\n'), ((3861, 3912), 'os.path.join', 'os.path.join', (['accessibility_dir', '"""accessibility.h5"""'], {}), "(accessibility_dir, 'accessibility.h5')\n", (3873, 3912), False, 'import os\n'), ((3920, 3952), 'os.path.exists', 'os.path.exists', (['accessibility_fn'], {}), '(accessibility_fn)\n', (3934, 3952), False, 'import os\n'), ((4152, 4188), 'os.path.join', 'os.path.join', (['release_dir', '"""samples"""'], {}), "(release_dir, 'samples')\n", (4164, 4188), False, 'import os\n'), ((4206, 4250), 'os.path.join', 'os.path.join', (['samples_dir', '"""samples.all.txt"""'], {}), "(samples_dir, 'samples.all.txt')\n", (4218, 4250), False, 'import os\n'), ((4258, 4284), 'os.path.exists', 'os.path.exists', (['samples_fn'], {}), '(samples_fn)\n', (4272, 4284), False, 'import os\n'), ((4901, 4936), 'os.path.join', 'os.path.join', (['release_dir', '"""extras"""'], {}), "(release_dir, 'extras')\n", (4913, 4936), False, 'import os\n'), ((4981, 5025), 'os.path.join', 'os.path.join', (['extras_dir', '"""allele_counts.h5"""'], {}), "(extras_dir, 'allele_counts.h5')\n", (4993, 5025), False, 'import os\n'), ((5033, 5065), 'os.path.exists', 'os.path.exists', (['allele_counts_fn'], {}), '(allele_counts_fn)\n', (5047, 5065), False, 'import os\n'), ((5157, 5206), 'os.path.join', 'os.path.join', (['extras_dir', '"""allele_counts.gq10.h5"""'], {}), "(extras_dir, 'allele_counts.gq10.h5')\n", (5169, 5206), False, 'import os\n'), ((5214, 5251), 'os.path.exists', 'os.path.exists', (['allele_counts_gq10_fn'], {}), '(allele_counts_gq10_fn)\n', (5228, 5251), False, 'import os\n'), ((5442, 5489), 'os.path.join', 'os.path.join', (['extras_dir', '"""outgroup_alleles.h5"""'], {}), "(extras_dir, 'outgroup_alleles.h5')\n", (5454, 5489), False, 'import os\n'), ((5497, 5532), 'os.path.exists', 'os.path.exists', (['outgroup_alleles_fn'], {}), '(outgroup_alleles_fn)\n', (5511, 5532), False, 'import os\n'), ((5634, 5687), 'os.path.join', 'os.path.join', (['extras_dir', '"""outgroup_allele_counts.h5"""'], {}), "(extras_dir, 'outgroup_allele_counts.h5')\n", (5646, 5687), False, 'import os\n'), ((5695, 5736), 'os.path.exists', 'os.path.exists', (['outgroup_allele_counts_fn'], {}), '(outgroup_allele_counts_fn)\n', (5709, 5736), False, 'import os\n'), ((595, 625), 'seaborn.color_palette', 'sns.color_palette', (['"""YlOrBr"""', '(5)'], {}), "('YlOrBr', 5)\n", (612, 625), True, 'import seaborn as sns\n'), ((641, 669), 'seaborn.color_palette', 'sns.color_palette', (['"""Reds"""', '(3)'], {}), "('Reds', 3)\n", (658, 669), True, 'import seaborn as sns\n'), ((685, 715), 'seaborn.color_palette', 'sns.color_palette', (['"""YlOrBr"""', '(5)'], {}), "('YlOrBr', 5)\n", (702, 715), True, 'import seaborn as sns\n'), ((731, 760), 'seaborn.color_palette', 'sns.color_palette', (['"""Blues"""', '(3)'], {}), "('Blues', 3)\n", (748, 760), True, 'import seaborn as sns\n'), ((776, 805), 'seaborn.color_palette', 'sns.color_palette', (['"""Blues"""', '(3)'], {}), "('Blues', 3)\n", (793, 805), True, 'import seaborn as sns\n'), ((821, 850), 'seaborn.color_palette', 'sns.color_palette', (['"""Blues"""', '(3)'], {}), "('Blues', 3)\n", (838, 850), True, 'import seaborn as sns\n'), ((866, 896), 'seaborn.color_palette', 'sns.color_palette', (['"""Greens"""', '(2)'], {}), "('Greens', 2)\n", (883, 896), True, 'import seaborn as sns\n'), ((912, 942), 'seaborn.color_palette', 'sns.color_palette', (['"""Greens"""', '(2)'], {}), "('Greens', 2)\n", (929, 942), True, 'import seaborn as sns\n'), ((958, 987), 'seaborn.color_palette', 'sns.color_palette', (['"""Greys"""', '(5)'], {}), "('Greys', 5)\n", (975, 987), True, 'import seaborn as sns\n'), ((1006, 1035), 'seaborn.color_palette', 'sns.color_palette', (['"""Greys"""', '(5)'], {}), "('Greys', 5)\n", (1023, 1035), True, 'import seaborn as sns\n'), ((2674, 2698), 'pyfasta.Fasta', 'pyfasta.Fasta', (['genome_fn'], {}), '(genome_fn)\n', (2687, 2698), False, 'import pyfasta\n'), ((2991, 3025), 'os.path.exists', 'os.path.exists', (['geneset_agamp42_fn'], {}), '(geneset_agamp42_fn)\n', (3005, 3025), False, 'import os\n'), ((3070, 3118), 'allel.FeatureTable.from_gff3', 'allel.FeatureTable.from_gff3', (['geneset_agamp42_fn'], {}), '(geneset_agamp42_fn)\n', (3098, 3118), False, 'import allel\n'), ((3424, 3458), 'h5py.File', 'h5py.File', (['callset_h5_fn'], {'mode': '"""r"""'}), "(callset_h5_fn, mode='r')\n", (3433, 3458), False, 'import h5py\n'), ((3664, 3703), 'h5py.File', 'h5py.File', (['callset_pass_h5_fn'], {'mode': '"""r"""'}), "(callset_pass_h5_fn, mode='r')\n", (3673, 3703), False, 'import h5py\n'), ((3978, 4015), 'h5py.File', 'h5py.File', (['accessibility_fn'], {'mode': '"""r"""'}), "(accessibility_fn, mode='r')\n", (3987, 4015), False, 'import h5py\n'), ((4684, 4740), 'pandas.read_csv', 'pandas.read_csv', (['samples_fn'], {'sep': '"""\t"""', 'index_col': '"""index"""'}), "(samples_fn, sep='\\t', index_col='index')\n", (4699, 4740), False, 'import pandas\n'), ((5091, 5128), 'h5py.File', 'h5py.File', (['allele_counts_fn'], {'mode': '"""r"""'}), "(allele_counts_fn, mode='r')\n", (5100, 5128), False, 'import h5py\n'), ((5282, 5324), 'h5py.File', 'h5py.File', (['allele_counts_gq10_fn'], {'mode': '"""r"""'}), "(allele_counts_gq10_fn, mode='r')\n", (5291, 5324), False, 'import h5py\n'), ((5561, 5601), 'h5py.File', 'h5py.File', (['outgroup_alleles_fn'], {'mode': '"""r"""'}), "(outgroup_alleles_fn, mode='r')\n", (5570, 5601), False, 'import h5py\n'), ((5771, 5817), 'h5py.File', 'h5py.File', (['outgroup_allele_counts_fn'], {'mode': '"""r"""'}), "(outgroup_allele_counts_fn, mode='r')\n", (5780, 5817), False, 'import h5py\n'), ((4322, 4345), 'petl.fromtsv', 'etl.fromtsv', (['samples_fn'], {}), '(samples_fn)\n', (4333, 4345), True, 'import petl as etl\n'), ((1697, 1730), 'petl.MemorySource', 'etl.MemorySource', (['_data_chromatin'], {}), '(_data_chromatin)\n', (1713, 1730), True, 'import petl as etl\n')] |
import ast
import base64
import json
import requests
import pytest
from pytest_bdd import parsers, scenario, then, when
def _negation(value):
"""Parse an optional negation after a verb (in a Gherkin feature spec)."""
if value == "":
return False
elif value in [" not", "not"]:
return True
else:
raise ValueError("Cannot parse '{}' as an optional negation".format(value))
# Scenario {{{
@scenario("../features/salt_api.feature", "Login to SaltAPI using Basic auth")
def test_login_basic_auth_to_salt_api(host):
pass
@scenario(
"../features/salt_api.feature", "Login to SaltAPI using an admin ServiceAccount"
)
def test_login_salt_api_admin_sa(host):
pass
@scenario(
"../features/salt_api.feature",
"Login to SaltAPI using the storage-operator ServiceAccount",
)
def test_login_salt_api_storage_operator(host):
pass
@scenario("../features/salt_api.feature", "Login to SaltAPI using any ServiceAccount")
def test_login_salt_api_service_account(host):
pass
@scenario(
"../features/salt_api.feature", "SaltAPI impersonation using a ServiceAccount"
)
def test_salt_api_impersonation_with_bearer_auth(host):
pass
@pytest.fixture
def salt_api_address(control_plane_ip):
return "{}:{}".format(control_plane_ip, 4507)
@pytest.fixture(scope="function")
def context():
return {}
# }}}
# When {{{
@when(parsers.parse("we login to SaltAPI as '{username}' using password '{password}'"))
def login_salt_api_basic(host, username, password, salt_api_address, context):
context["salt-api"] = _salt_api_login(
salt_api_address, username=username, password=password
)
@when("we login to SaltAPI with an admin ServiceAccount")
def login_salt_api_admin_sa(host, k8s_client, admin_sa, salt_api_address, context):
sa_name, sa_namespace = admin_sa
context["salt-api"] = _login_salt_api_sa(
salt_api_address, k8s_client, sa_name, sa_namespace
)
@when(
parsers.parse(
"we login to SaltAPI with the ServiceAccount '{namespace}/{account_name}'"
)
)
def login_salt_api_system_sa(
host, k8s_client, namespace, account_name, salt_api_address, context
):
context["salt-api"] = _login_salt_api_sa(
salt_api_address,
k8s_client,
account_name,
namespace,
)
@when(
parsers.parse(
"we impersonate user '{username}' against SaltAPI "
"using the ServiceAccount '{namespace}/{account_name}'"
)
)
def login_salt_api_token_override_username(
host, k8s_client, namespace, account_name, username, salt_api_address, context
):
context["salt-api"] = _login_salt_api_sa(
salt_api_address,
k8s_client,
account_name,
namespace,
username=username,
)
# }}}
# Then {{{
@then(
parsers.cfparse(
"we can{negated:Negation?} ping all minions",
extra_types={"Negation": _negation},
)
)
def ping_all_minions(host, context, negated):
result = _salt_call(context, "test.ping", tgt="*")
if negated:
assert result.status_code == 401
assert "No permission" in result.text
else:
result_data = result.json()
assert result_data["return"][0] != []
@then(
parsers.cfparse(
"we can{negated:Negation?} run state '{module}' on '{targets}'",
extra_types={"Negation": _negation},
)
)
def run_state_on_targets(host, context, negated, module, targets):
result = _salt_call(context, "state.sls", tgt=targets, kwarg={"mods": module})
if negated:
assert result.status_code == 401
assert "No permission" in result.text
else:
assert result.status_code == 200
@then("authentication fails")
def authentication_fails(host, context):
assert context["salt-api"]["login-status-code"] == 401
@then("authentication succeeds")
def authentication_succeeds(host, context):
assert context["salt-api"]["login-status-code"] == 200
@then(parsers.parse("we can invoke '{modules}' on '{targets}'"))
def invoke_module_on_target(host, context, modules, targets):
assert {targets: ast.literal_eval(modules)} in context["salt-api"]["perms"]
@then(parsers.parse("we have '{perms}' perms"))
def have_perms(host, context, perms):
assert perms in context["salt-api"]["perms"]
@then(parsers.parse("we have no permissions"))
def have_no_perms(host, context):
assert context["salt-api"]["perms"] == {}
# }}}
# Helpers {{{
def _login_salt_api_sa(address, k8s_client, name, namespace, username=None):
service_account = k8s_client.read_namespaced_service_account(
name=name, namespace=namespace
)
secret = k8s_client.read_namespaced_secret(
name=service_account.secrets[0].name, namespace=namespace
)
token = base64.decodebytes(secret.data["token"].encode("utf-8"))
if username is None:
username = "system:serviceaccount:{}:{}".format(namespace, name)
return _salt_api_login(address, username=username, token=token)
def _salt_api_login(address, username=None, password=None, token=None):
data = {"eauth": "kubernetes_rbac"}
if username:
data["username"] = username
if password:
data["password"] = password
if token:
data["token"] = token
response = requests.post(
"https://{}/login".format(address),
data=data,
verify=False,
)
result = {
"url": "https://{}".format(address),
"token": None,
"perms": [],
"login-status-code": response.status_code,
}
if response.status_code == 200:
json_data = response.json()
result["token"] = json_data["return"][0]["token"]
result["perms"] = json_data["return"][0]["perms"]
return result
def _salt_call(context, fun, tgt="*", arg=None, kwarg=None):
action = {
"client": "local",
"tgt": tgt,
"fun": fun,
}
if arg is not None:
action["arg"] = arg
if kwarg is not None:
action["kwarg"] = kwarg
return requests.post(
context["salt-api"]["url"],
json=[action],
headers={
"X-Auth-Token": context["salt-api"]["token"],
},
verify=False,
)
# }}}
| [
"pytest_bdd.then",
"pytest_bdd.parsers.parse",
"pytest_bdd.parsers.cfparse",
"pytest.fixture",
"pytest_bdd.when",
"ast.literal_eval",
"requests.post",
"pytest_bdd.scenario"
] | [((435, 512), 'pytest_bdd.scenario', 'scenario', (['"""../features/salt_api.feature"""', '"""Login to SaltAPI using Basic auth"""'], {}), "('../features/salt_api.feature', 'Login to SaltAPI using Basic auth')\n", (443, 512), False, 'from pytest_bdd import parsers, scenario, then, when\n'), ((570, 664), 'pytest_bdd.scenario', 'scenario', (['"""../features/salt_api.feature"""', '"""Login to SaltAPI using an admin ServiceAccount"""'], {}), "('../features/salt_api.feature',\n 'Login to SaltAPI using an admin ServiceAccount')\n", (578, 664), False, 'from pytest_bdd import parsers, scenario, then, when\n'), ((719, 825), 'pytest_bdd.scenario', 'scenario', (['"""../features/salt_api.feature"""', '"""Login to SaltAPI using the storage-operator ServiceAccount"""'], {}), "('../features/salt_api.feature',\n 'Login to SaltAPI using the storage-operator ServiceAccount')\n", (727, 825), False, 'from pytest_bdd import parsers, scenario, then, when\n'), ((893, 982), 'pytest_bdd.scenario', 'scenario', (['"""../features/salt_api.feature"""', '"""Login to SaltAPI using any ServiceAccount"""'], {}), "('../features/salt_api.feature',\n 'Login to SaltAPI using any ServiceAccount')\n", (901, 982), False, 'from pytest_bdd import parsers, scenario, then, when\n'), ((1038, 1130), 'pytest_bdd.scenario', 'scenario', (['"""../features/salt_api.feature"""', '"""SaltAPI impersonation using a ServiceAccount"""'], {}), "('../features/salt_api.feature',\n 'SaltAPI impersonation using a ServiceAccount')\n", (1046, 1130), False, 'from pytest_bdd import parsers, scenario, then, when\n'), ((1309, 1341), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (1323, 1341), False, 'import pytest\n'), ((1674, 1730), 'pytest_bdd.when', 'when', (['"""we login to SaltAPI with an admin ServiceAccount"""'], {}), "('we login to SaltAPI with an admin ServiceAccount')\n", (1678, 1730), False, 'from pytest_bdd import parsers, scenario, then, when\n'), ((3702, 3730), 'pytest_bdd.then', 'then', (['"""authentication fails"""'], {}), "('authentication fails')\n", (3706, 3730), False, 'from pytest_bdd import parsers, scenario, then, when\n'), ((3834, 3865), 'pytest_bdd.then', 'then', (['"""authentication succeeds"""'], {}), "('authentication succeeds')\n", (3838, 3865), False, 'from pytest_bdd import parsers, scenario, then, when\n'), ((1398, 1483), 'pytest_bdd.parsers.parse', 'parsers.parse', (['"""we login to SaltAPI as \'{username}\' using password \'{password}\'"""'], {}), '("we login to SaltAPI as \'{username}\' using password \'{password}\'"\n )\n', (1411, 1483), False, 'from pytest_bdd import parsers, scenario, then, when\n'), ((1978, 2072), 'pytest_bdd.parsers.parse', 'parsers.parse', (['"""we login to SaltAPI with the ServiceAccount \'{namespace}/{account_name}\'"""'], {}), '(\n "we login to SaltAPI with the ServiceAccount \'{namespace}/{account_name}\'")\n', (1991, 2072), False, 'from pytest_bdd import parsers, scenario, then, when\n'), ((2342, 2471), 'pytest_bdd.parsers.parse', 'parsers.parse', (['"""we impersonate user \'{username}\' against SaltAPI using the ServiceAccount \'{namespace}/{account_name}\'"""'], {}), '(\n "we impersonate user \'{username}\' against SaltAPI using the ServiceAccount \'{namespace}/{account_name}\'"\n )\n', (2355, 2471), False, 'from pytest_bdd import parsers, scenario, then, when\n'), ((2817, 2920), 'pytest_bdd.parsers.cfparse', 'parsers.cfparse', (['"""we can{negated:Negation?} ping all minions"""'], {'extra_types': "{'Negation': _negation}"}), "('we can{negated:Negation?} ping all minions', extra_types={\n 'Negation': _negation})\n", (2832, 2920), False, 'from pytest_bdd import parsers, scenario, then, when\n'), ((3251, 3373), 'pytest_bdd.parsers.cfparse', 'parsers.cfparse', (['"""we can{negated:Negation?} run state \'{module}\' on \'{targets}\'"""'], {'extra_types': "{'Negation': _negation}"}), '("we can{negated:Negation?} run state \'{module}\' on \'{targets}\'"\n , extra_types={\'Negation\': _negation})\n', (3266, 3373), False, 'from pytest_bdd import parsers, scenario, then, when\n'), ((3977, 4034), 'pytest_bdd.parsers.parse', 'parsers.parse', (['"""we can invoke \'{modules}\' on \'{targets}\'"""'], {}), '("we can invoke \'{modules}\' on \'{targets}\'")\n', (3990, 4034), False, 'from pytest_bdd import parsers, scenario, then, when\n'), ((4186, 4226), 'pytest_bdd.parsers.parse', 'parsers.parse', (['"""we have \'{perms}\' perms"""'], {}), '("we have \'{perms}\' perms")\n', (4199, 4226), False, 'from pytest_bdd import parsers, scenario, then, when\n'), ((4323, 4362), 'pytest_bdd.parsers.parse', 'parsers.parse', (['"""we have no permissions"""'], {}), "('we have no permissions')\n", (4336, 4362), False, 'from pytest_bdd import parsers, scenario, then, when\n'), ((6040, 6171), 'requests.post', 'requests.post', (["context['salt-api']['url']"], {'json': '[action]', 'headers': "{'X-Auth-Token': context['salt-api']['token']}", 'verify': '(False)'}), "(context['salt-api']['url'], json=[action], headers={\n 'X-Auth-Token': context['salt-api']['token']}, verify=False)\n", (6053, 6171), False, 'import requests\n'), ((4119, 4144), 'ast.literal_eval', 'ast.literal_eval', (['modules'], {}), '(modules)\n', (4135, 4144), False, 'import ast\n')] |
# coding=utf-8
"""Comparison of various classifiers acting alone and inside an bagging ensemble."""
from sklearn import datasets, model_selection, metrics, tree, ensemble
if __name__ == "__main__":
print("Loading data...")
X, y = datasets.load_iris(return_X_y=True)
X_train, X_test, y_train, y_test = model_selection.train_test_split(X, y)
print("Fitting classifiers...")
t = tree.DecisionTreeClassifier()
t.fit(X_train, y_train)
e = ensemble.BaggingClassifier(tree.DecisionTreeClassifier(), n_estimators=35, max_features=0.5, max_samples=0.5)
e.fit(X_train, y_train)
print("Evaluating classifiers...")
print("#" * 128)
print("Decision tree:")
print("Test:")
print(metrics.classification_report(y_test, t.predict(X_test)))
print(metrics.confusion_matrix(y_test, t.predict(X_test)))
print("Training:")
print(metrics.classification_report(y_train, t.predict(X_train)))
print(metrics.confusion_matrix(y_train, t.predict(X_train)))
print("#" * 128)
print("Decision tree ensemble:")
print("Decision tree:")
print("Test:")
print(metrics.classification_report(y_test, e.predict(X_test)))
print(metrics.confusion_matrix(y_test, e.predict(X_test)))
print("Training:")
print(metrics.classification_report(y_train, e.predict(X_train)))
print(metrics.confusion_matrix(y_train, e.predict(X_train)))
| [
"sklearn.datasets.load_iris",
"sklearn.model_selection.train_test_split",
"sklearn.tree.DecisionTreeClassifier"
] | [((240, 275), 'sklearn.datasets.load_iris', 'datasets.load_iris', ([], {'return_X_y': '(True)'}), '(return_X_y=True)\n', (258, 275), False, 'from sklearn import datasets, model_selection, metrics, tree, ensemble\n'), ((315, 353), 'sklearn.model_selection.train_test_split', 'model_selection.train_test_split', (['X', 'y'], {}), '(X, y)\n', (347, 353), False, 'from sklearn import datasets, model_selection, metrics, tree, ensemble\n'), ((399, 428), 'sklearn.tree.DecisionTreeClassifier', 'tree.DecisionTreeClassifier', ([], {}), '()\n', (426, 428), False, 'from sklearn import datasets, model_selection, metrics, tree, ensemble\n'), ((493, 522), 'sklearn.tree.DecisionTreeClassifier', 'tree.DecisionTreeClassifier', ([], {}), '()\n', (520, 522), False, 'from sklearn import datasets, model_selection, metrics, tree, ensemble\n')] |
import sys
sys.setrecursionlimit(60000)
def solution(n):
# must recursive dp
# dp = [-1 for i in range(60001)]
# dp[1] = 1
# dp[2] = 2
# for i in range(3, (n+1)):
# dp[i] = dp[i-1] + dp[i-2]
# return dp[n] % 1000000007
dp = [-1 for i in range(60001)]
dp[1] = 1
dp[2] = 2
def call_dp(n):
if dp[n] != -1:
return dp[n]
elif n == 0:
return 1
elif n == 1:
return 1
elif n == 2:
return 2
dp[n] = (call_dp(n-1) + call_dp(n-2)) % 1000000007
return dp[n]
return call_dp(n)
| [
"sys.setrecursionlimit"
] | [((12, 40), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(60000)'], {}), '(60000)\n', (33, 40), False, 'import sys\n')] |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
def write_event_file():
writer = tf.summary.FileWriter('.')
writer.add_graph(tf.get_default_graph())
print("Event Written")
def execute():
"""
Tensor programs of graphs made of operations and tensors
Tensors: N dimensional arrays
Operations: Some kinf of transformation that normally outputs a tensor
Below are examples of a constant operation that outputs a single dimensional vector.
If you try to print these lines, you won't get a value because they have not yet been run in a session
"""
a = tf.constant(3.0, dtype=tf.float32)
b = tf.constant(4.0) # also tf.float32 implicitly
"""
We can create a graph by doing operations.
"""
total = a + b
"""
You can easily visualize this graph by using tensor board.
1) Output event file using write_event_file() [ABOVE]
2) Use command tensorboard --logdir . in your terminal
It'll start a webapp at localhost:6006
"""
| [
"tensorflow.get_default_graph",
"tensorflow.summary.FileWriter",
"tensorflow.constant"
] | [((177, 203), 'tensorflow.summary.FileWriter', 'tf.summary.FileWriter', (['"""."""'], {}), "('.')\n", (198, 203), True, 'import tensorflow as tf\n'), ((692, 726), 'tensorflow.constant', 'tf.constant', (['(3.0)'], {'dtype': 'tf.float32'}), '(3.0, dtype=tf.float32)\n', (703, 726), True, 'import tensorflow as tf\n'), ((735, 751), 'tensorflow.constant', 'tf.constant', (['(4.0)'], {}), '(4.0)\n', (746, 751), True, 'import tensorflow as tf\n'), ((225, 247), 'tensorflow.get_default_graph', 'tf.get_default_graph', ([], {}), '()\n', (245, 247), True, 'import tensorflow as tf\n')] |
"""Pytest plugin to setup and teardown a Decoy instance.
The plugin will be registered with pytest when you install Decoy. It adds a
fixture without modifying any other pytest behavior. Its usage is optional
but highly recommended.
"""
import pytest
from typing import Iterable
from decoy import Decoy
@pytest.fixture
def decoy() -> Iterable[Decoy]:
"""Get a [decoy.Decoy][] container and tear it down after the test.
This function is function-scoped [pytest fixture][] that will be
automatically inserted by the plugin.
[pytest fixture]: https://docs.pytest.org/en/latest/how-to/fixtures.html
Example:
```python
def test_my_thing(decoy: Decoy) -> None:
my_fake_func = decoy.mock()
# ...
```
"""
decoy = Decoy()
yield decoy
decoy.reset()
| [
"decoy.Decoy"
] | [((786, 793), 'decoy.Decoy', 'Decoy', ([], {}), '()\n', (791, 793), False, 'from decoy import Decoy\n')] |
#!/usr/bin/env python
import matplotlib.pyplot as plt
import re, os, sys
from dwave_qbsolv import QBSolv
from dwave.system.samplers import DWaveSampler, DWaveCliqueSampler
from dwave.system.composites import EmbeddingComposite, FixedEmbeddingComposite
import dimod
import hybrid
import minorminer
import networkx as nx
from numpy import linalg as la
from networkx.generators.atlas import *
import numpy as np
import networkx as nx
import random, copy
import math
from scipy.sparse import csr_matrix
import argparse
import logging
import datetime as dt
from qpu_sampler_time import QPUTimeSubproblemAutoEmbeddingSampler
#
# The Quantum Graph Community Detection Algorithm has been described
# in the following publications. Please cite in your publication.
#
# <NAME>, <NAME>, <NAME>,
# 2017, Graph Partitioning using Quantum Annealing on the
# D-Wave System, Proceedings of the 2nd International
# Workshop on Post Moore’s Era Supercomputing (PMES), 22-29.
#
# <NAME>, <NAME>, <NAME> 2020, Detecting
# Multiple Communities using Quantum Annealing on the D-Wave System,
# PLOS ONE 15(2): e0227538. https://doi.org/10.1371/journal.pone.0227538
#
# <NAME>, <NAME>, <NAME>, <NAME>, <NAME>,
# <NAME>, 2021, Reduction of the Molecular Hamiltonian Matrix using
# Quantum Community Detection, Sci Rep 11, 4099 (2021).
# https://doi.org/10.1038/s41598-021-83561-x#
#
def build_mod(Adj, thresh, num_edges):
#Builds the modularity matrix from the Adjacency matrix.
#Given an adj matrix, it constructs the modularity matrix and its graph.
Dim = Adj.shape[1]
print ("\n Dim = ", Dim)
print ("\n Computing modularity matrix ...")
Deg = np.zeros([Dim])
M = 0.0
# Calc Adj degrees
Deg = Adj.sum(1)
M = Deg.sum()
mtotal = M/2.0
Mod = np.zeros([Dim,Dim])
# Calc modularity matrix
Mod = Mod + Adj
Mod = Mod - (Deg * Deg.T)/M
np.set_printoptions(precision=3)
return mtotal, Mod
def get_block_number(big_indx, num_blocks, num_nodes):
#indx = math.ceil(big_indx/num_nodes) # node indx starts from 0
indx = math.floor(big_indx/num_nodes) # node indx starts from 0
#print("big_indx=", big_indx," Indx=", indx, " num_blocks=", num_blocks)
if indx > num_blocks-1:
raise ValueError("block indx cannot be larger than num_blocks-1")
return int(indx)
def get_indx_within_block(big_indx, num_nodes):
return big_indx%num_nodes
def get_entry_beta_B(i_indx, j_indx, beta, graph, modularity, num_nodes, num_blocks):
i_block_indx = get_block_number(i_indx, num_blocks, num_nodes)
j_block_indx = get_block_number(j_indx, num_blocks, num_nodes)
i_indx_within_block = get_indx_within_block(i_indx, num_nodes)
j_indx_within_block = get_indx_within_block(j_indx, num_nodes)
if i_block_indx == j_block_indx:
return beta*modularity[i_indx_within_block, j_indx_within_block]
else:
return 0
def get_entry_B_Gamma(i_indx, j_indx, modularity, beta,gamma, GAMMA, num_nodes, num_parts, num_blocks):
i_indx_within_block = get_indx_within_block(i_indx, num_nodes)
j_indx_within_block = get_indx_within_block(j_indx, num_nodes)
if i_indx_within_block == j_indx_within_block:
return gamma[i_indx_within_block]
else:
return 0
def get_entry_add_diag(i_indx,gamma, GAMMA, num_nodes, num_parts, num_blocks):
gamma_entry = GAMMA[i_indx]
return -2*gamma_entry
def get_i_j_entry(i_indx, j_indx, modularity, beta, gamma, GAMMA, graph, num_nodes, num_parts, num_blocks):
#print("i_indx=", i_indx," j_indx=", j_indx)
if i_indx == j_indx:
bB = get_entry_beta_B(i_indx, j_indx, beta, graph, modularity, num_nodes, num_blocks)
BG = get_entry_B_Gamma(i_indx, j_indx, modularity, beta, gamma, GAMMA, num_nodes, num_parts, num_blocks)
diag = get_entry_add_diag(i_indx,gamma, GAMMA, num_nodes, num_parts, num_blocks)
return bB + BG + diag
else:
bB = get_entry_beta_B(i_indx, j_indx, beta, graph, modularity, num_nodes, num_blocks)
BG = get_entry_B_Gamma(i_indx, j_indx, modularity, beta, gamma, GAMMA, num_nodes, num_parts, num_blocks)
return bB + BG
def threshold_mmatrix(graph, mmatrix, threshold):
msize = mmatrix.shape[0]
for i in range(0, msize):
mmatrix[i,i] = mmatrix[i,i] + graph.degree(i)
for i in range(0, msize):
for j in range(0, msize):
if i!=j and abs(mmatrix[i,j]) < threshold:
mmatrix[i,j] = 0.0
return mmatrix
def makeQubo(graph, modularity, beta, gamma, GAMMA, num_nodes, num_parts, num_blocks, threshold):
# Create QUBO matrix
qsize = num_blocks*num_nodes
Q = np.zeros([qsize,qsize])
# Note: weights are set to the negative due to maximization
# Set node weights
for i in range(qsize):
entry = get_i_j_entry(i, i, modularity, beta, gamma, GAMMA, graph, num_nodes, num_parts, num_blocks)
Q[i,i] = -entry
# Set off-diagonal weights
for i in range(qsize):
for j in range(i, qsize):
if i != j:
entry = get_i_j_entry(i, j, modularity, beta, gamma, GAMMA, graph, num_nodes, num_parts, num_blocks)
if abs(entry) > threshold:
Q[i,j] = -entry
Q[j,i] = -entry
return Q
def write_qubo_file(graph, modularity, beta, gamma, GAMMA, num_nodes, num_parts, num_blocks, threshold):
###qubo format
# p qubo target maxDiagonals nDiagonals nElements
#target = 0 implies unconstrained problem
nElements = 0 #to be counted
maxDiagonals = num_nodes*num_blocks # number of diagonal in topology
nDiagonals = num_nodes*num_blocks #number of diagonals the problem
qubo_file = open("body.qubo", "w")
# Write node header
qubo_string_diag = "".join(["\nc nodes first \n"])
qubo_file.write(qubo_string_diag)
# Write nodes
for i in range(num_blocks*num_nodes):
entry = get_i_j_entry(i, i, modularity, beta, gamma, GAMMA, graph, num_nodes, num_parts, num_blocks)
qubo_string_diag = "".join([str(i)+" "+str(i)+" "+str(entry)+"\n"])
qubo_file.write(qubo_string_diag)
# Write coupler header
qubo_string_couplers = "".join(["\nc couplers \n"])
qubo_file.write(qubo_string_couplers)
# Write couplers
for i in range(num_blocks*num_nodes):
for j in range(i, num_blocks*num_nodes):
if i != j:
entry = get_i_j_entry(i, j, modularity, beta, gamma, GAMMA, graph, num_nodes, num_parts, num_blocks)
if abs(entry) > threshold:
qubo_string_couplers = "".join([str(i)+" "+str(j)+" "+str(2*entry)+"\n"]) #x2 because of what qbsolv minimizes
qubo_file.write(qubo_string_couplers)
nElements += 1
qubo_file.close()
# Write header to separate file now that we know the nElements
# p qubo target maxDiagonals nDiagonals nElements
qubo_file = open("graph.qubo", "w")
qubo_string_initialize = "".join(["p qubo 0 " + str(maxDiagonals)+" "+str(nDiagonals)+" "+str(nElements)+"\n"])
qubo_file.write(qubo_string_initialize)
qubo_file.close()
# Put qubo file together - header and body
os.system("cat body.qubo >> graph.qubo")
os.system("rm body.qubo")
def get_qubo_solution():
myFile = open("dwave_output.out", 'r')
line_count = 0
for lines in myFile:
line_count += 1
if line_count == 2:
bit_string = lines
break
return bit_string.strip()
def violating_contraints(graph, x_indx, num_blocks, num_nodes, num_parts, result):
#each node in exactly one part
for node in range(num_nodes):
value = 0
for j in range(num_blocks):
value += x_indx[(node, j)]
if value >1:
print ("constraint violated: node %d in %d parts. Degree: %d" %(node, value, graph.degree(node)))
value = 0
#balancing contraints
sum_v_i = 0
for node in range(num_nodes):
sum_x_ik = 0
for j in range(num_blocks):
sum_x_ik += x_indx[(node, j)]
node_i = (1 - sum_x_ik)
sum_v_i += node_i
print ("\nlast part size",sum_v_i , - num_nodes/float(num_parts))
num_clusters_found = 0
for j in range(num_blocks):
value = 0
for node in range(num_nodes):
value += x_indx[(node, j)]
print ("part %d has %d nodes" %(j, value))
if value > 0:
num_clusters_found += 1
result['num_clusters_found'] = num_clusters_found
#######################################################
######## penalty weight function #####################
####### ###################
def set_penalty_constant(num_nodes, num_blocks, beta0, gamma0):
beta = beta0
gamma = [gamma0 for i in range(num_nodes)]
GAMMA = [gamma[i] for j in range(num_blocks) for i in range(num_nodes) ]
return beta, gamma, GAMMA
#########
def calcModularityMetric(mtotal, modularity, part_number):
Dim = modularity.shape[1]
print ("\n Dim = ", Dim)
msum = 0.0
for ii in range(0, Dim):
for jj in range(0, Dim):
if part_number[ii] == part_number[jj]:
msum = msum + modularity[ii,jj]
mmetric = msum / (2.0 * mtotal)
return mmetric
def run_qbsolv():
rval = random.randint(1,1000)
estring = "qbsolv -r " + str(rval) + " -i graph.qubo -m -o dwave_output.out"
print('\n', estring)
os.system(estring)
def process_solution_qbsolv(graph, num_blocks, num_nodes, num_parts, result):
bit_string = get_qubo_solution()
print (bit_string)
print ("num non-zeros: ", sum([int(i) for i in bit_string]))
x_indx = {}
qubo_soln = [int(i) for i in bit_string]
for i in range(num_blocks*num_nodes):
i_block_indx = get_block_number(i, num_blocks, num_nodes)
i_indx_within_block = get_indx_within_block(i, num_nodes)
x_indx[(i_indx_within_block, i_block_indx)] = qubo_soln[i]
violating_contraints(graph, x_indx, num_blocks, num_nodes, num_parts, result)
part_number = {}
for key in x_indx:
node, part = key
if x_indx[key] == 1:
part_number[node] = part
return part_number
def process_solution(ss, graph, num_blocks, num_nodes, num_parts, result):
qsol = {}
for i in range(num_blocks*num_nodes):
qsol[i] = int(ss[0,i])
qtotal = 0
for i in range(num_blocks*num_nodes):
qtotal += qsol[i]
print('\nnum non-zeros = ', qtotal)
x_indx = {}
qubo_soln = qsol
for i in range(num_blocks*num_nodes):
i_block_indx = get_block_number(i, num_blocks, num_nodes)
i_indx_within_block = get_indx_within_block(i, num_nodes)
x_indx[(i_indx_within_block, i_block_indx)] = qubo_soln[i]
violating_contraints(graph, x_indx, num_blocks, num_nodes, num_parts, result)
part_number = {}
for key in x_indx:
node, part = key
if x_indx[key] == 1:
part_number[node] = part
return part_number
def getEmbedding(qsize):
#dsystem = DWaveCliqueSampler()
#embedding = dsystem.largest_clique()
#print('embedding found, len = ', len(embedding))
#print('embedding = ', embedding)
#exit(0)
ksize = qsize
qsystem = DWaveSampler()
ksub = nx.complete_graph(ksize).edges()
embedding = minorminer.find_embedding(ksub, qsystem.edgelist)
print('\nembedding done')
return embedding
def runDwave(Q, num_nodes, k, embedding, qsize, run_label, result):
# Using D-Wave/qbsolv
# Needed when greater than number of nodes/variables that can fit on the D-Wave
sampler = FixedEmbeddingComposite(DWaveSampler(), embedding)
#sampler = DWaveCliqueSampler()
rval = random.randint(1,10000)
t0 = dt.datetime.now()
solution = QBSolv().sample_qubo(Q, solver=sampler, seed=rval,
label=run_label)
wtime = dt.datetime.now() - t0
result['wall_clock_time'] = wtime
# Collect first energy and num_occ, num diff solutions, and total solutions
first = True
ndiff = 0
total_solns = 0
for sample, energy, num_occurrences in solution.data():
#print(sample, "Energy: ", energy, "Occurrences: ", num_occurrences)
if first == True:
result['energy'] = energy
result['num_occ'] = num_occurrences
first = False
ndiff += 1
total_solns += num_occurrences
result['num_diff_solns'] = ndiff
result['total_solns'] = total_solns
print('\n qbsolv response:')
print(solution)
ss = solution.samples()
#print("\n qbsolv samples=" + str(list(solution.samples())))
#print('\nss = ', ss)
print(flush=True)
return ss
def runDwaveHybrid(Q, num_nodes, k, sub_qsize, run_label, result):
bqm = dimod.BQM.from_qubo(Q)
rparams = {}
rparams['label'] = run_label
# QPU sampler with timing
QPUSubSamTime = QPUTimeSubproblemAutoEmbeddingSampler(num_reads=100, sampling_params=rparams)
# define the workflow
iteration = hybrid.Race(
hybrid.InterruptableTabuSampler(),
hybrid.EnergyImpactDecomposer(size=sub_qsize, rolling=True, rolling_history=0.15)
#| hybrid.QPUSubproblemAutoEmbeddingSampler(num_reads=100, sampling_params=rparams)
#| QTS.QPUTimeSubproblemAutoEmbeddingSampler(num_reads=100, sampling_params=rparams)
| QPUSubSamTime
| hybrid.SplatComposer()
) | hybrid.MergeSamples(aggregate=True)
workflow = hybrid.LoopUntilNoImprovement(iteration, convergence=3)
# Run the workflow
init_state = hybrid.State.from_problem(bqm)
t0 = dt.datetime.now()
solution = workflow.run(init_state).result()
wtime = dt.datetime.now() - t0
#hybrid.profiling.print_counters(workflow)
#print('\nQ timers = ', QPUSubSamTime.timers)
#print('\nQ counters = ', QPUSubSamTime.counters)
result['wall_clock_time'] = wtime
# Collect number of QPU accesses and QPU time used
result['num_qpu_accesses'] = QPUSubSamTime.num_accesses
result['total_qpu_time'] = QPUSubSamTime.total_qpu_time
# Collect from lowest energy result
result['energy'] = solution.samples.first.energy
result['num_occ'] = solution.samples.first.num_occurrences
# Collect number of different solutions w different energies
result['num_diff_solns'] = len(solution.samples)
total_solns = 0
for energy, num_occ in solution.samples.data(['energy', 'num_occurrences']):
total_solns += num_occ
result['total_solns'] = total_solns
# Show list of results in energy order
print(solution.samples)
# Collect the first solution
ss = np.zeros([1,num_nodes])
for i in range(num_nodes):
ss[0,i] = solution.samples.first.sample[i]
return ss
def cluster(Q, k, embedding, qsize, run_label, result):
# Start with Q
qsize = Q.shape[1]
print('\n Q size = ', qsize)
# Cluster into k parts using DWave
ss = runDwave(Q, qsize, k, embedding, qsize, run_label, result)
return ss
def clusterHybrid(Q, k, sub_qsize, run_label, result):
# Start with Q
qsize = Q.shape[1]
print('\n Q size = ', qsize)
# Cluster into k parts using Hybrid/DWave ocean
ss = runDwaveHybrid(Q, qsize, k, sub_qsize, run_label, result)
return ss
| [
"numpy.set_printoptions",
"hybrid.MergeSamples",
"random.randint",
"hybrid.InterruptableTabuSampler",
"numpy.zeros",
"os.system",
"math.floor",
"qpu_sampler_time.QPUTimeSubproblemAutoEmbeddingSampler",
"hybrid.SplatComposer",
"dimod.BQM.from_qubo",
"minorminer.find_embedding",
"hybrid.LoopUnti... | [((1645, 1660), 'numpy.zeros', 'np.zeros', (['[Dim]'], {}), '([Dim])\n', (1653, 1660), True, 'import numpy as np\n'), ((1755, 1775), 'numpy.zeros', 'np.zeros', (['[Dim, Dim]'], {}), '([Dim, Dim])\n', (1763, 1775), True, 'import numpy as np\n'), ((1855, 1887), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'precision': '(3)'}), '(precision=3)\n', (1874, 1887), True, 'import numpy as np\n'), ((2046, 2078), 'math.floor', 'math.floor', (['(big_indx / num_nodes)'], {}), '(big_indx / num_nodes)\n', (2056, 2078), False, 'import math\n'), ((4557, 4581), 'numpy.zeros', 'np.zeros', (['[qsize, qsize]'], {}), '([qsize, qsize])\n', (4565, 4581), True, 'import numpy as np\n'), ((6934, 6974), 'os.system', 'os.system', (['"""cat body.qubo >> graph.qubo"""'], {}), "('cat body.qubo >> graph.qubo')\n", (6943, 6974), False, 'import re, os, sys\n'), ((6977, 7002), 'os.system', 'os.system', (['"""rm body.qubo"""'], {}), "('rm body.qubo')\n", (6986, 7002), False, 'import re, os, sys\n'), ((8944, 8967), 'random.randint', 'random.randint', (['(1)', '(1000)'], {}), '(1, 1000)\n', (8958, 8967), False, 'import random, copy\n'), ((9071, 9089), 'os.system', 'os.system', (['estring'], {}), '(estring)\n', (9080, 9089), False, 'import re, os, sys\n'), ((10781, 10795), 'dwave.system.samplers.DWaveSampler', 'DWaveSampler', ([], {}), '()\n', (10793, 10795), False, 'from dwave.system.samplers import DWaveSampler, DWaveCliqueSampler\n'), ((10852, 10901), 'minorminer.find_embedding', 'minorminer.find_embedding', (['ksub', 'qsystem.edgelist'], {}), '(ksub, qsystem.edgelist)\n', (10877, 10901), False, 'import minorminer\n'), ((11236, 11260), 'random.randint', 'random.randint', (['(1)', '(10000)'], {}), '(1, 10000)\n', (11250, 11260), False, 'import random, copy\n'), ((11267, 11284), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (11282, 11284), True, 'import datetime as dt\n'), ((12230, 12252), 'dimod.BQM.from_qubo', 'dimod.BQM.from_qubo', (['Q'], {}), '(Q)\n', (12249, 12252), False, 'import dimod\n'), ((12347, 12424), 'qpu_sampler_time.QPUTimeSubproblemAutoEmbeddingSampler', 'QPUTimeSubproblemAutoEmbeddingSampler', ([], {'num_reads': '(100)', 'sampling_params': 'rparams'}), '(num_reads=100, sampling_params=rparams)\n', (12384, 12424), False, 'from qpu_sampler_time import QPUTimeSubproblemAutoEmbeddingSampler\n'), ((12883, 12938), 'hybrid.LoopUntilNoImprovement', 'hybrid.LoopUntilNoImprovement', (['iteration'], {'convergence': '(3)'}), '(iteration, convergence=3)\n', (12912, 12938), False, 'import hybrid\n'), ((12976, 13006), 'hybrid.State.from_problem', 'hybrid.State.from_problem', (['bqm'], {}), '(bqm)\n', (13001, 13006), False, 'import hybrid\n'), ((13014, 13031), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (13029, 13031), True, 'import datetime as dt\n'), ((14000, 14024), 'numpy.zeros', 'np.zeros', (['[1, num_nodes]'], {}), '([1, num_nodes])\n', (14008, 14024), True, 'import numpy as np\n'), ((11164, 11178), 'dwave.system.samplers.DWaveSampler', 'DWaveSampler', ([], {}), '()\n', (11176, 11178), False, 'from dwave.system.samplers import DWaveSampler, DWaveCliqueSampler\n'), ((11403, 11420), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (11418, 11420), True, 'import datetime as dt\n'), ((12834, 12869), 'hybrid.MergeSamples', 'hybrid.MergeSamples', ([], {'aggregate': '(True)'}), '(aggregate=True)\n', (12853, 12869), False, 'import hybrid\n'), ((13089, 13106), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (13104, 13106), True, 'import datetime as dt\n'), ((10805, 10829), 'networkx.complete_graph', 'nx.complete_graph', (['ksize'], {}), '(ksize)\n', (10822, 10829), True, 'import networkx as nx\n'), ((11298, 11306), 'dwave_qbsolv.QBSolv', 'QBSolv', ([], {}), '()\n', (11304, 11306), False, 'from dwave_qbsolv import QBSolv\n'), ((12481, 12514), 'hybrid.InterruptableTabuSampler', 'hybrid.InterruptableTabuSampler', ([], {}), '()\n', (12512, 12514), False, 'import hybrid\n'), ((12805, 12827), 'hybrid.SplatComposer', 'hybrid.SplatComposer', ([], {}), '()\n', (12825, 12827), False, 'import hybrid\n'), ((12520, 12606), 'hybrid.EnergyImpactDecomposer', 'hybrid.EnergyImpactDecomposer', ([], {'size': 'sub_qsize', 'rolling': '(True)', 'rolling_history': '(0.15)'}), '(size=sub_qsize, rolling=True, rolling_history\n =0.15)\n', (12549, 12606), False, 'import hybrid\n')] |
import os
LABELS_FILE_NAME = 'labels.msl'
README_FILE_NAME = 'README.md'
if os.path.exists(README_FILE_NAME):
os.remove(README_FILE_NAME)
labelsFile = open(LABELS_FILE_NAME, 'r')
readmeFile = open(README_FILE_NAME, 'a+')
readmeFile.write('# WORK\n')
readmeFile.write('\n')
readmeFile.write('|Address|Label|Comment|\n')
readmeFile.write('|-------|-----|-------|\n')
for labelsFileLine in labelsFile:
labelsFileLineComponents = labelsFileLine.replace('\n', '').split(':')
if labelsFileLineComponents[0] == 'WORK' :
readmeFile.write('|0x')
readmeFile.write(labelsFileLineComponents[1].rjust(6, '0'))
readmeFile.write('|')
readmeFile.write(labelsFileLineComponents[2])
readmeFile.write('|')
if len(labelsFileLineComponents) > 3:
readmeFile.write(labelsFileLineComponents[3])
readmeFile.write('|')
readmeFile.write('\n')
labelsFile.close()
readmeFile.close() | [
"os.remove",
"os.path.exists"
] | [((78, 110), 'os.path.exists', 'os.path.exists', (['README_FILE_NAME'], {}), '(README_FILE_NAME)\n', (92, 110), False, 'import os\n'), ((114, 141), 'os.remove', 'os.remove', (['README_FILE_NAME'], {}), '(README_FILE_NAME)\n', (123, 141), False, 'import os\n')] |